forked from goodboy/tractor
Compare commits
900 Commits
prompt_on_
...
main
| Author | SHA1 | Date |
|---|---|---|
|
|
83ce2275b9 | |
|
|
9f757ffa63 | |
|
|
0c6d512ba4 | |
|
|
fc130d06b8 | |
|
|
73423ef2b7 | |
|
|
b1f2a6b394 | |
|
|
9489a2f84d | |
|
|
92eaed6fec | |
|
|
217d54b9d1 | |
|
|
34ca02ed11 | |
|
|
62a364a1d3 | |
|
|
07781e38cd | |
|
|
9c6b90ef04 | |
|
|
542d4c7840 | |
|
|
9aebe7d8f9 | |
|
|
04c3d5e239 | |
|
|
759174729c | |
|
|
e9f3689191 | |
|
|
93aa39db07 | |
|
|
5ab642bdf0 | |
|
|
ed18ecd064 | |
|
|
cec0282953 | |
|
|
25c5847f2e | |
|
|
ba793fadd9 | |
|
|
d17864a432 | |
|
|
6c361a9564 | |
|
|
34ca7429c7 | |
|
|
c9a55c2d46 | |
|
|
548855b4f5 | |
|
|
5322861d6d | |
|
|
46a2fa7074 | |
|
|
bfe5b2dde6 | |
|
|
a9f06df3fb | |
|
|
ee32bc433c | |
|
|
561954594e | |
|
|
28a6354e81 | |
|
|
d1599449e7 | |
|
|
2d27c94dec | |
|
|
6e4c76245b | |
|
|
a6f599901c | |
|
|
0fafd25f0d | |
|
|
b74e93ee55 | |
|
|
961504b657 | |
|
|
bd148300c5 | |
|
|
4a7491bda4 | |
|
|
62415518fc | |
|
|
5c7d930a9a | |
|
|
c46986504d | |
|
|
e05a4d3cac | |
|
|
a9aa5ec04e | |
|
|
5021514a6a | |
|
|
79f502034f | |
|
|
331921f612 | |
|
|
df0d00abf4 | |
|
|
a72d1e6c48 | |
|
|
5931c59aef | |
|
|
ba08052ddf | |
|
|
00112edd58 | |
|
|
1d706bddda | |
|
|
3c30c559d5 | |
|
|
599020c2c5 | |
|
|
50f6543ee7 | |
|
|
c0854fd221 | |
|
|
e875b62869 | |
|
|
3ab7498893 | |
|
|
dd041b0a01 | |
|
|
4e252526b5 | |
|
|
4ba3590450 | |
|
|
f1ff79a4e6 | |
|
|
70664b98de | |
|
|
1c425cbd22 | |
|
|
edc2211444 | |
|
|
b05abea51e | |
|
|
88c1c083bd | |
|
|
b096867d40 | |
|
|
a3c9822602 | |
|
|
e3a542f2b5 | |
|
|
0ffcea1033 | |
|
|
a7bdf0486c | |
|
|
d2ac9ecf95 | |
|
|
dcb1062bb8 | |
|
|
05d865c0f1 | |
|
|
8218f0f51f | |
|
|
8f19f5d3a8 | |
|
|
64c27a914b | |
|
|
d9c8d543b3 | |
|
|
048b154f00 | |
|
|
88828e9f99 | |
|
|
25ff195c17 | |
|
|
f60cc646ff | |
|
|
a2b754b5f5 | |
|
|
5e13588aed | |
|
|
0a56f40bab | |
|
|
f776c47cb4 | |
|
|
7f584d4f54 | |
|
|
d650dda0fa | |
|
|
f6598e8400 | |
|
|
59822ff093 | |
|
|
ca427aec7e | |
|
|
f53aa992af | |
|
|
69e0afccf0 | |
|
|
e275c49b23 | |
|
|
48fbf38c1d | |
|
|
defd6e28d2 | |
|
|
414b0e2bae | |
|
|
d34fb54f7c | |
|
|
5d87f63377 | |
|
|
0ca3d50602 | |
|
|
8880a80e3e | |
|
|
7be713ee1e | |
|
|
4bd8211abb | |
|
|
a23a98886c | |
|
|
31544c862c | |
|
|
7d320c4e1e | |
|
|
38944ad1d2 | |
|
|
9260909fe1 | |
|
|
c00b3c86ea | |
|
|
808a336508 | |
|
|
679d999185 | |
|
|
a8428d7de3 | |
|
|
e9f2fecd66 | |
|
|
547cf5a210 | |
|
|
b5e3fa7370 | |
|
|
cd16748598 | |
|
|
1af35f8170 | |
|
|
4569d11052 | |
|
|
6ba76ab700 | |
|
|
734dda35e9 | |
|
|
b7e04525cc | |
|
|
35977dcebb | |
|
|
e1f26f9611 | |
|
|
63c5b7696a | |
|
|
5f94f52226 | |
|
|
6bf571a124 | |
|
|
f5056cdd02 | |
|
|
9ff448faa3 | |
|
|
760b9890c4 | |
|
|
d000642462 | |
|
|
dd69948744 | |
|
|
5b69975f81 | |
|
|
6b474743f9 | |
|
|
5ac229244a | |
|
|
bbd2ea3e4f | |
|
|
6b903f7746 | |
|
|
2280bad135 | |
|
|
8d506796ec | |
|
|
02d03ce700 | |
|
|
9786e2c404 | |
|
|
116137d066 | |
|
|
7f87b4e717 | |
|
|
bb17d39c4e | |
|
|
fba6edfe9a | |
|
|
e4758550f7 | |
|
|
a7efbfdbc2 | |
|
|
1c6660c497 | |
|
|
202befa360 | |
|
|
c24708b273 | |
|
|
3aee702733 | |
|
|
a573c3c9a8 | |
|
|
6a352fee87 | |
|
|
6cb361352c | |
|
|
7807ffaabe | |
|
|
65b795612c | |
|
|
a42c1761a8 | |
|
|
359d732633 | |
|
|
b09e35f3dc | |
|
|
6618b004f4 | |
|
|
fc57a4d639 | |
|
|
2248ffb74f | |
|
|
1eb0d785a8 | |
|
|
98d0ca88e5 | |
|
|
37f843a128 | |
|
|
29cd2ddbac | |
|
|
295b06511b | |
|
|
1e6b5b3f0a | |
|
|
36ddb85197 | |
|
|
d6b0ddecd7 | |
|
|
9e5475391c | |
|
|
ef7ed7ac6f | |
|
|
d8094f4420 | |
|
|
d7b12735a8 | |
|
|
47107e44ed | |
|
|
ba384ca83d | |
|
|
ad9833a73a | |
|
|
161884fbf1 | |
|
|
c2e7dc7407 | |
|
|
309360daa2 | |
|
|
cbfb0d0144 | |
|
|
c0eef3bac3 | |
|
|
27e6ad18ee | |
|
|
28e32b8f85 | |
|
|
05df634d62 | |
|
|
6d2f4d108d | |
|
|
ae2687b381 | |
|
|
a331f6dab3 | |
|
|
9c0de24899 | |
|
|
1f3cef5ed6 | |
|
|
8538a9c591 | |
|
|
7533e93b0f | |
|
|
f67b0639b8 | |
|
|
26fedec6a1 | |
|
|
0711576678 | |
|
|
0477a62ac3 | |
|
|
01d6f111f6 | |
|
|
56ef4cba23 | |
|
|
52b5efd78d | |
|
|
a7d4bcdfb9 | |
|
|
79d0c17f6b | |
|
|
98c4614a36 | |
|
|
61df10b333 | |
|
|
094447787e | |
|
|
ba45c03e14 | |
|
|
00d8a2a099 | |
|
|
bedde076d9 | |
|
|
be1d8bf6fa | |
|
|
d9aee98db2 | |
|
|
708ce4a051 | |
|
|
d6d0112d95 | |
|
|
0fcbedd2be | |
|
|
412c66d000 | |
|
|
3cc835c215 | |
|
|
f15bbb30cc | |
|
|
ad211f8c2c | |
|
|
acac605c37 | |
|
|
078e507774 | |
|
|
81bf810fbb | |
|
|
7d1512e03a | |
|
|
1c85338ff8 | |
|
|
7a3c9d0458 | |
|
|
31196b9cb4 | |
|
|
44c9da1c91 | |
|
|
b4ce618e33 | |
|
|
a504d92536 | |
|
|
8c0d9614bc | |
|
|
a6fefcc2a8 | |
|
|
abdaf7bf1f | |
|
|
7b3324b240 | |
|
|
bbae2c91fd | |
|
|
2540d1f9e0 | |
|
|
63fac5a809 | |
|
|
568fb18d01 | |
|
|
f67e19a852 | |
|
|
0be9f5f907 | |
|
|
5e2d456029 | |
|
|
c7d5b021db | |
|
|
6f1f198fb1 | |
|
|
26fef82d33 | |
|
|
84d25b5727 | |
|
|
1ed0c861b5 | |
|
|
2dd3a682c8 | |
|
|
881813e61e | |
|
|
566a11c00d | |
|
|
af69272d16 | |
|
|
8e3f581d3f | |
|
|
eceb292415 | |
|
|
9921ea3cae | |
|
|
414a8c5b75 | |
|
|
eeb0516017 | |
|
|
d6eeddef4e | |
|
|
d478dbfcfe | |
|
|
ef6094a650 | |
|
|
4e8404bb09 | |
|
|
bbb3484ae9 | |
|
|
5cee222353 | |
|
|
8ebb1f09de | |
|
|
2683a7f33a | |
|
|
255209f881 | |
|
|
9a0d529b18 | |
|
|
1c441b0986 | |
|
|
afbdb50a30 | |
|
|
e46033cbe7 | |
|
|
c932bb5911 | |
|
|
33482d8f41 | |
|
|
7ae194baed | |
|
|
ef7ca49e9b | |
|
|
fde681fa19 | |
|
|
efcf81bcad | |
|
|
3988ea69f5 | |
|
|
8bd4490cad | |
|
|
622f840dfd | |
|
|
8ba315e60c | |
|
|
80f20b35b1 | |
|
|
9ec37dd13f | |
|
|
9be76b1dda | |
|
|
31f88b59f4 | |
|
|
155d581fa2 | |
|
|
a810f6c8f6 | |
|
|
83b9dc3c62 | |
|
|
f152a20025 | |
|
|
1ea8254ae3 | |
|
|
8ed890f892 | |
|
|
d4e6f2b8dc | |
|
|
64fe767647 | |
|
|
aca015f1c2 | |
|
|
818cd8535f | |
|
|
1e86722357 | |
|
|
eda48c8021 | |
|
|
ceda1e466d | |
|
|
d14d29ae8c | |
|
|
f068782e74 | |
|
|
84b04639f8 | |
|
|
4aa7e8c022 | |
|
|
b46a886449 | |
|
|
a26f817ed1 | |
|
|
2d18e6a4be | |
|
|
e815dcd3c8 | |
|
|
0d7b3f1ac5 | |
|
|
3ad558230a | |
|
|
22f405a707 | |
|
|
e5bcefb575 | |
|
|
8f7c022afe | |
|
|
c453623b9b | |
|
|
6e68f51617 | |
|
|
fdf934d02d | |
|
|
13572151aa | |
|
|
87342696a1 | |
|
|
8f774f52b1 | |
|
|
8b4ed31d3b | |
|
|
eb18168a4e | |
|
|
6b2809b82e | |
|
|
aa80b55567 | |
|
|
4186541724 | |
|
|
f0deda1fda | |
|
|
8f369b5132 | |
|
|
aa3432f2a4 | |
|
|
222b90940c | |
|
|
c91373148a | |
|
|
f1af87007e | |
|
|
13adaa110a | |
|
|
9e10064bda | |
|
|
bde355dcd5 | |
|
|
b021772a1e | |
|
|
03406e020c | |
|
|
b0acc9ffe8 | |
|
|
fc325a621b | |
|
|
d5ba9be3a9 | |
|
|
639186aa37 | |
|
|
182218a776 | |
|
|
6de17a3949 | |
|
|
41a3297b9f | |
|
|
255db4b127 | |
|
|
66a7d660f6 | |
|
|
f199cac5e8 | |
|
|
9b393338ca | |
|
|
4edf36a895 | |
|
|
bfd1864180 | |
|
|
3345962253 | |
|
|
3c8b1aa888 | |
|
|
d4f1a02f43 | |
|
|
c5291b7f33 | |
|
|
8f0ca44b79 | |
|
|
2fd9c0044b | |
|
|
79f4197d26 | |
|
|
b71d96fdee | |
|
|
4a8e1f56ae | |
|
|
a283d8c05a | |
|
|
c2bbb7e259 | |
|
|
2764d82c1a | |
|
|
824801d2ba | |
|
|
0fe6f63012 | |
|
|
8d190bb505 | |
|
|
514fb1a4ac | |
|
|
684253ab11 | |
|
|
9af2a4e739 | |
|
|
141a842d3d | |
|
|
61c5613943 | |
|
|
5b29dd5d2b | |
|
|
a58c1cad91 | |
|
|
e1d96099fc | |
|
|
ccd60b0c6e | |
|
|
c1c93e08a2 | |
|
|
bb60a6d623 | |
|
|
6ef06be6d0 | |
|
|
f8222356ce | |
|
|
4b9d638be9 | |
|
|
35ebc087dd | |
|
|
6b18fcd437 | |
|
|
00d1c8ea29 | |
|
|
8da7a1ca36 | |
|
|
5cdfee3bcf | |
|
|
64d506970a | |
|
|
de7b114303 | |
|
|
f195c5ec47 | |
|
|
92713af63e | |
|
|
4a08d586cd | |
|
|
607e1dcf45 | |
|
|
b057a1681c | |
|
|
82bee3c55b | |
|
|
4afab9ca47 | |
|
|
53409f2942 | |
|
|
7f00921be1 | |
|
|
a9b3336318 | |
|
|
978691c668 | |
|
|
4b92e14c92 | |
|
|
dbff7e6cd0 | |
|
|
125876185d | |
|
|
5ea324da5e | |
|
|
d1b4d4be52 | |
|
|
32f7742e53 | |
|
|
46066c02e4 | |
|
|
bac84a5e23 | |
|
|
950a2ec30f | |
|
|
50e02295a9 | |
|
|
cb998a2b2f | |
|
|
71e8d466ae | |
|
|
6cd19c408e | |
|
|
a796fb7103 | |
|
|
0332604044 | |
|
|
90bd757b48 | |
|
|
0263599cef | |
|
|
96960982ff | |
|
|
c7f153c266 | |
|
|
8ff682440d | |
|
|
7db5bbffc5 | |
|
|
59fa9dc452 | |
|
|
6b1558b675 | |
|
|
548fbe725b | |
|
|
f64447148e | |
|
|
b0f0971ad4 | |
|
|
3b056fd761 | |
|
|
3246b3a3bc | |
|
|
3613c37a6f | |
|
|
00dbf55fd3 | |
|
|
89c2137fc9 | |
|
|
535fd06f73 | |
|
|
097101f8d3 | |
|
|
b8d37060ec | |
|
|
0ffb4f0db1 | |
|
|
c10c34d717 | |
|
|
ad28f0c9b3 | |
|
|
f83e06d371 | |
|
|
6a5d33b7ed | |
|
|
31cc33c66c | |
|
|
ad44d59f3d | |
|
|
2f1a97e73e | |
|
|
18b4618b5f | |
|
|
54386900e0 | |
|
|
028bc3256f | |
|
|
4bc7569981 | |
|
|
15a47dc4f7 | |
|
|
d98f06314d | |
|
|
5bab7648e2 | |
|
|
d099466d21 | |
|
|
1c00668d20 | |
|
|
d51c19fe3d | |
|
|
b9ae41a161 | |
|
|
2e522d003f | |
|
|
56a46b1bf0 | |
|
|
830df00152 | |
|
|
4b3c6b7e39 | |
|
|
4b843d6219 | |
|
|
fa2893cc87 | |
|
|
4d935dcfb0 | |
|
|
b3387aca61 | |
|
|
a0091b77d8 | |
|
|
7bde00d711 | |
|
|
b992ff73da | |
|
|
9dc7602f21 | |
|
|
eaddde94c5 | |
|
|
a520951928 | |
|
|
cbd47d800e | |
|
|
aefdc9c094 | |
|
|
07ba69f697 | |
|
|
cbfabac813 | |
|
|
24c9c5397f | |
|
|
e92972a5f4 | |
|
|
da03deddf1 | |
|
|
50ed461996 | |
|
|
92ac95ce24 | |
|
|
defe34dec2 | |
|
|
9c11b2b04d | |
|
|
e1857413a3 | |
|
|
8b85b023f5 | |
|
|
117d6177e8 | |
|
|
da770f70d6 | |
|
|
cc6b2d4057 | |
|
|
0d4d461c41 | |
|
|
7dc9808480 | |
|
|
c67a04f978 | |
|
|
7656326484 | |
|
|
8bab8e8bde | |
|
|
e3b1c13eba | |
|
|
b22ee84d26 | |
|
|
683288c8db | |
|
|
fded92115a | |
|
|
953976d588 | |
|
|
e07e7da0b5 | |
|
|
429f8f4e13 | |
|
|
7010a39bd3 | |
|
|
c03f6f917e | |
|
|
888af6025b | |
|
|
ee03b8a214 | |
|
|
f17fd35ccb | |
|
|
51de6bd1bc | |
|
|
70d974fc99 | |
|
|
f992b9f2e8 | |
|
|
15cf54fc45 | |
|
|
77764aceef | |
|
|
8347a78276 | |
|
|
9f3a00c65e | |
|
|
5d1a0da5e6 | |
|
|
45f499cf3a | |
|
|
74b6871bfd | |
|
|
a67975f8f5 | |
|
|
753724252d | |
|
|
1d1cd9c51a | |
|
|
f32a9657c0 | |
|
|
799416661e | |
|
|
d83e0eb665 | |
|
|
32eb2df5aa | |
|
|
e17603402f | |
|
|
efb69f9bf9 | |
|
|
506575e4ca | |
|
|
eb5db36013 | |
|
|
f0155b4525 | |
|
|
74d6ffabf2 | |
|
|
e4e04c516f | |
|
|
fee20103c6 | |
|
|
dfc92352b3 | |
|
|
65e918298b | |
|
|
cc9af5758d | |
|
|
ca1d7c28ea | |
|
|
cc69d86baf | |
|
|
3c498c2eac | |
|
|
958e91962b | |
|
|
34b26862ad | |
|
|
2801ccf229 | |
|
|
94b735ed96 | |
|
|
dc31f0dac9 | |
|
|
846aff2724 | |
|
|
1d1c7cb3e8 | |
|
|
8baaeb414f | |
|
|
1c01608c72 | |
|
|
88686e2271 | |
|
|
203d0aceb4 | |
|
|
71693ac3dd | |
|
|
97b9d417d2 | |
|
|
26a3ff6b37 | |
|
|
8690a88e50 | |
|
|
aa4a4be668 | |
|
|
9e2133e3be | |
|
|
1567dfc3e2 | |
|
|
d716d8b6b4 | |
|
|
0653a70f2b | |
|
|
0b28b54e11 | |
|
|
648695a325 | |
|
|
62bb11975f | |
|
|
ae42b91384 | |
|
|
dbebcc54cc | |
|
|
fb94ecd729 | |
|
|
b3e3e0ff85 | |
|
|
8ac9ccf65d | |
|
|
3bccdf6de4 | |
|
|
7686dd7a15 | |
|
|
7b92d2b1cb | |
|
|
939f198dd9 | |
|
|
e87f688c8d | |
|
|
ffbe471790 | |
|
|
0df557d2dd | |
|
|
99a2e13c91 | |
|
|
d33eb15884 | |
|
|
c2fc6293aa | |
|
|
9de2fff273 | |
|
|
8f18c9febf | |
|
|
ed72974ec4 | |
|
|
e1f612996c | |
|
|
fc83f4ecf0 | |
|
|
09eed9d7e1 | |
|
|
b56b3aa890 | |
|
|
bc87c51ff1 | |
|
|
8468bcca36 | |
|
|
a38ac07af5 | |
|
|
48606b6c77 | |
|
|
4251ee4c51 | |
|
|
89bc5ab8c4 | |
|
|
e1e87c95c5 | |
|
|
c5985169cc | |
|
|
e77333eb73 | |
|
|
ae434ae8a4 | |
|
|
8c23f83889 | |
|
|
b06754db3a | |
|
|
213e083dc6 | |
|
|
154ef67c8e | |
|
|
470d349ef1 | |
|
|
627c514614 | |
|
|
33fcc036bd | |
|
|
799306ec4c | |
|
|
aace10ccfb | |
|
|
0272936fdc | |
|
|
db31bbfee2 | |
|
|
96738a094f | |
|
|
ba81e5106c | |
|
|
d927ed82d8 | |
|
|
9324d82ff1 | |
|
|
7f70e09c33 | |
|
|
a80829a702 | |
|
|
3a7e3505b4 | |
|
|
e27d63b75f | |
|
|
e8bd834b5b | |
|
|
863751b47b | |
|
|
46c8dbef1f | |
|
|
e7dbb52b34 | |
|
|
d044629cce | |
|
|
8832cdfe0d | |
|
|
f6fc43d58d | |
|
|
cdc513f25d | |
|
|
9eaee7a060 | |
|
|
63c087f08d | |
|
|
d5f80365b5 | |
|
|
d20f711fb0 | |
|
|
21509791e3 | |
|
|
ce6974690b | |
|
|
972325a28d | |
|
|
b4f890bd58 | |
|
|
e2fa5a4d05 | |
|
|
2f4c019f39 | |
|
|
2b1dbcb541 | |
|
|
49ebdc2e6a | |
|
|
daf37ed24c | |
|
|
0701874033 | |
|
|
4621c8c1b9 | |
|
|
a69f1a61a5 | |
|
|
0c9e1be883 | |
|
|
8731ab3134 | |
|
|
b38ff36e04 | |
|
|
819889702f | |
|
|
a36ee01592 | |
|
|
dd9fe0b043 | |
|
|
e10ab9741d | |
|
|
91a970091f | |
|
|
5bf550b64a | |
|
|
a3a3d0b8cb | |
|
|
c1e0328669 | |
|
|
cfb74e588d | |
|
|
3d2b6613e8 | |
|
|
2b124447c8 | |
|
|
5ffdda762a | |
|
|
9082efbe68 | |
|
|
14f34c111a | |
|
|
f947bdf80c | |
|
|
dbd79d8beb | |
|
|
15a4a2a51e | |
|
|
ebf9909cc4 | |
|
|
2d541fdd9b | |
|
|
5f0bfeae57 | |
|
|
8b0b4abb3c | |
|
|
51bd38976f | |
|
|
4868bf225c | |
|
|
f834b35aa9 | |
|
|
6d671f69b8 | |
|
|
94c89fd425 | |
|
|
0246c824b9 | |
|
|
2e17b084b2 | |
|
|
61d82d47c2 | |
|
|
7246749137 | |
|
|
4db377c01d | |
|
|
ef4c4be0bb | |
|
|
7ce4bc489e | |
|
|
dec2b1f0f5 | |
|
|
3ccbfd7e54 | |
|
|
8d318a8ac5 | |
|
|
d5eec6eb6c | |
|
|
a88564549a | |
|
|
f028181e19 | |
|
|
3a317c1581 | |
|
|
65e49696e7 | |
|
|
e834297503 | |
|
|
e3bb9c914c | |
|
|
526add2cae | |
|
|
1fb4d7318b | |
|
|
199ca48cc4 | |
|
|
5b3bcbaa7d | |
|
|
8647421ef9 | |
|
|
ba9448d52f | |
|
|
f5c35dca55 | |
|
|
cebc2cb515 | |
|
|
5042f1fdb8 | |
|
|
5912fecdc9 | |
|
|
cca4f952ed | |
|
|
ab0c0fb71d | |
|
|
b00ba158f1 | |
|
|
93f489e263 | |
|
|
fa5f458de0 | |
|
|
6de4a5a9f3 | |
|
|
ab8bd9b787 | |
|
|
1deed8dbee | |
|
|
36d2aa1852 | |
|
|
f0417d802b | |
|
|
62a0fff2fd | |
|
|
d65e4bbad7 | |
|
|
ee372933a7 | |
|
|
96cdcd8f39 | |
|
|
bc13599e1f | |
|
|
54576851e9 | |
|
|
2a5ff82061 | |
|
|
f2d3f0cc21 | |
|
|
6b282bfa06 | |
|
|
11bab13a06 | |
|
|
9a8cd13894 | |
|
|
3706abca71 | |
|
|
771fc33801 | |
|
|
a87df3009f | |
|
|
05f28c8728 | |
|
|
85825cdd76 | |
|
|
a5bc113fde | |
|
|
4f7823cf55 | |
|
|
544cb40533 | |
|
|
389b305d3b | |
|
|
1975b92dba | |
|
|
31ccdd79d7 | |
|
|
cbaf4fc05b | |
|
|
68a3969585 | |
|
|
cf68e075c9 | |
|
|
f730749dc9 | |
|
|
c8775dee41 | |
|
|
fd2391539e | |
|
|
8e3a2a9297 | |
|
|
f90ca0668b | |
|
|
36a81a60cd | |
|
|
c2480c2b97 | |
|
|
7b1528abed | |
|
|
c5228e7be5 | |
|
|
9966dbdfc1 | |
|
|
7fb1c45ac7 | |
|
|
59d6d0cd7f | |
|
|
ffed35e263 | |
|
|
885ba04908 | |
|
|
1879243257 | |
|
|
4fb34772e7 | |
|
|
1c9589cfc4 | |
|
|
910c07db06 | |
|
|
d8d206b93f | |
|
|
fb55784798 | |
|
|
1bc858cd00 | |
|
|
04aea5c4db | |
|
|
7bb44e6930 | |
|
|
2cc712cd81 | |
|
|
c421f7e722 | |
|
|
1c217ef36f | |
|
|
d7f2f51f7f | |
|
|
cfcbc4da01 | |
|
|
664ae87588 | |
|
|
e1d7004aec | |
|
|
a97b45d90b | |
|
|
a388d3185b | |
|
|
4d0df1bb4a | |
|
|
5eb62b3e9b | |
|
|
1be296c725 | |
|
|
9420ea0c14 | |
|
|
9194e5774b | |
|
|
51a3f1bef4 | |
|
|
ca1b8e0224 | |
|
|
e403d63eb7 | |
|
|
3c385c6949 | |
|
|
b28df738fe | |
|
|
5fa040c7db | |
|
|
27b750e907 | |
|
|
96150600fb | |
|
|
338ea5529c | |
|
|
6bc67338cf | |
|
|
fd20004757 | |
|
|
ddc2e5f0f8 | |
|
|
4b0aa5e379 | |
|
|
6a303358df | |
|
|
c85757aee1 | |
|
|
9fc9b10b53 | |
|
|
a86275996c | |
|
|
b5431c0343 | |
|
|
cdee6f9354 | |
|
|
a2f1bcc23f | |
|
|
4aa89bf391 | |
|
|
45e9cb4d09 | |
|
|
27c5ffe5a7 | |
|
|
914efd80eb | |
|
|
2d2d1ca1c4 | |
|
|
74aa5aa9cd | |
|
|
44e386dd99 | |
|
|
13fbcc723f | |
|
|
315f0fc7eb | |
|
|
fea111e882 | |
|
|
a1bf4db1e3 | |
|
|
bac9523ecf | |
|
|
abe31e9e2c | |
|
|
0222180c11 | |
|
|
7d5fda4485 | |
|
|
f5fcd8ca2e | |
|
|
04217f319a | |
|
|
8cb8390201 | |
|
|
5035617adf | |
|
|
715348c5c2 | |
|
|
fdf0c43bfa | |
|
|
f895c96600 | |
|
|
ca1a1476bb | |
|
|
a7c36a9cbe | |
|
|
22e4b324b1 | |
|
|
89ed8b67ff | |
|
|
11bbf15817 | |
|
|
a18663213a | |
|
|
d4d09b6071 | |
|
|
6d10f0c516 | |
|
|
fa9b57bae0 | |
|
|
81776a6238 | |
|
|
144d1f4d94 | |
|
|
51fdf3524c | |
|
|
cff69d07fe | |
|
|
ee94d6d62c | |
|
|
89b84ed6c0 | |
|
|
f33f689f34 | |
|
|
7507e269ec | |
|
|
17ae449160 | |
|
|
6495688730 | |
|
|
a0276f41c2 | |
|
|
ead9e418de | |
|
|
60791ed546 | |
|
|
7293b82bcc | |
|
|
20d75ff934 | |
|
|
041d7da721 | |
|
|
04e4397a8f | |
|
|
968f13f9ef | |
|
|
f9911c22a4 | |
|
|
63adf73b4b | |
|
|
f1e9c0be93 | |
|
|
6db656fecf | |
|
|
6994d2026d | |
|
|
c72026091e | |
|
|
90e41016b9 | |
|
|
f54c415060 | |
|
|
03644f59cc | |
|
|
67f82c6ebd | |
|
|
71cd445319 | |
|
|
220b244508 | |
|
|
831790377b | |
|
|
e80e0a551f | |
|
|
b3f9251eda | |
|
|
903537ce04 | |
|
|
d75343106b | |
|
|
cfb2bc0fee | |
|
|
e5ee2e3de8 | |
|
|
41aa91c8eb | |
|
|
6758e4487c | |
|
|
1c3893a383 | |
|
|
73befac9bc | |
|
|
79622bbeea | |
|
|
95535b2226 | |
|
|
87c6e09d6b | |
|
|
9ccd3a74b6 | |
|
|
ae4ff5dc8d | |
|
|
705538398f | |
|
|
86aef5238d | |
|
|
cc82447db6 | |
|
|
23cffbd940 | |
|
|
3d202272c4 | |
|
|
63cdb0891f | |
|
|
0f7db27b68 | |
|
|
c53d62d2f7 | |
|
|
f667d16d66 | |
|
|
24a062341e | |
|
|
e714bec8db | |
|
|
009cd6552e | |
|
|
649c5e7504 | |
|
|
203f95615c | |
|
|
efb8bec828 | |
|
|
8637778739 | |
|
|
47166e45f0 | |
|
|
4ce2dcd12b | |
|
|
80f983818f | |
|
|
6ba29f8d56 | |
|
|
2707a0e971 | |
|
|
c8efcdd0d3 | |
|
|
9f9907271b | |
|
|
c2367c1c5e | |
|
|
a777217674 | |
|
|
13c9eadc8f | |
|
|
af6c325072 | |
|
|
195d2f0ed4 | |
|
|
aa4871b13d | |
|
|
556f4626db | |
|
|
3967c0ed9e | |
|
|
e34823aab4 | |
|
|
6c35ba2cb6 | |
|
|
3a0817ff55 | |
|
|
7fddb4416b | |
|
|
1d92f2552a | |
|
|
4f8586a928 | |
|
|
fb9ff45745 | |
|
|
36a83cb306 | |
|
|
7394a187e0 | |
|
|
df01294bb2 | |
|
|
ddf3d0d1b3 | |
|
|
158569adae | |
|
|
97d5f7233b | |
|
|
d27c081a15 | |
|
|
a4874a3227 | |
|
|
de04bbb2bb | |
|
|
4f977189c0 | |
|
|
9fd62cf71f | |
|
|
606efa5bb7 | |
|
|
121a8cc891 | |
|
|
c54b8ca4ba | |
|
|
de93c8257c |
|
|
@ -8,46 +8,70 @@ on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
# ------ sdist ------
|
||||||
mypy:
|
|
||||||
name: 'MyPy'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Setup python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: '3.10'
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: pip install -U . --upgrade-strategy eager -r requirements-test.txt
|
|
||||||
|
|
||||||
- name: Run MyPy check
|
|
||||||
run: mypy tractor/ --ignore-missing-imports --show-traceback
|
|
||||||
|
|
||||||
# test that we can generate a software distribution and install it
|
# test that we can generate a software distribution and install it
|
||||||
# thus avoid missing file issues after packaging.
|
# thus avoid missing file issues after packaging.
|
||||||
|
#
|
||||||
|
# -[x] produce sdist with uv
|
||||||
|
# ------ - ------
|
||||||
sdist-linux:
|
sdist-linux:
|
||||||
name: 'sdist'
|
name: 'sdist'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup python
|
- name: Install latest uv
|
||||||
uses: actions/setup-python@v2
|
uses: astral-sh/setup-uv@v6
|
||||||
with:
|
|
||||||
python-version: '3.10'
|
|
||||||
|
|
||||||
- name: Build sdist
|
- name: Build sdist as tar.gz
|
||||||
run: python setup.py sdist --formats=zip
|
run: uv build --sdist --python=3.13
|
||||||
|
|
||||||
- name: Install sdist from .zips
|
- name: Install sdist from .tar.gz
|
||||||
run: python -m pip install dist/*.zip
|
run: python -m pip install dist/*.tar.gz
|
||||||
|
|
||||||
|
# ------ type-check ------
|
||||||
|
# mypy:
|
||||||
|
# name: 'MyPy'
|
||||||
|
# runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
# steps:
|
||||||
|
# - name: Checkout
|
||||||
|
# uses: actions/checkout@v4
|
||||||
|
|
||||||
|
# - name: Install latest uv
|
||||||
|
# uses: astral-sh/setup-uv@v6
|
||||||
|
|
||||||
|
# # faster due to server caching?
|
||||||
|
# # https://docs.astral.sh/uv/guides/integration/github/#setting-up-python
|
||||||
|
# - name: "Set up Python"
|
||||||
|
# uses: actions/setup-python@v6
|
||||||
|
# with:
|
||||||
|
# python-version-file: "pyproject.toml"
|
||||||
|
|
||||||
|
# # w uv
|
||||||
|
# # - name: Set up Python
|
||||||
|
# # run: uv python install
|
||||||
|
|
||||||
|
# - name: Setup uv venv
|
||||||
|
# run: uv venv .venv --python=3.13
|
||||||
|
|
||||||
|
# - name: Install
|
||||||
|
# run: uv sync --dev
|
||||||
|
|
||||||
|
# # TODO, ty cmd over repo
|
||||||
|
# # - name: type check with ty
|
||||||
|
# # run: ty ./tractor/
|
||||||
|
|
||||||
|
# # - uses: actions/cache@v3
|
||||||
|
# # name: Cache uv virtenv as default .venv
|
||||||
|
# # with:
|
||||||
|
# # path: ./.venv
|
||||||
|
# # key: venv-${{ hashFiles('uv.lock') }}
|
||||||
|
|
||||||
|
# - name: Run MyPy check
|
||||||
|
# run: mypy tractor/ --ignore-missing-imports --show-traceback
|
||||||
|
|
||||||
|
|
||||||
testing-linux:
|
testing-linux:
|
||||||
|
|
@ -59,32 +83,45 @@ jobs:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-latest]
|
||||||
python: ['3.10']
|
python-version: ['3.13']
|
||||||
spawn_backend: [
|
spawn_backend: [
|
||||||
'trio',
|
'trio',
|
||||||
'mp_spawn',
|
# 'mp_spawn',
|
||||||
'mp_forkserver',
|
# 'mp_forkserver',
|
||||||
]
|
]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: Checkout
|
- uses: actions/checkout@v4
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Setup python
|
- name: 'Install uv + py-${{ matrix.python-version }}'
|
||||||
uses: actions/setup-python@v2
|
uses: astral-sh/setup-uv@v6
|
||||||
with:
|
with:
|
||||||
python-version: '${{ matrix.python }}'
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
- name: Install dependencies
|
# GH way.. faster?
|
||||||
run: pip install -U . -r requirements-test.txt -r requirements-docs.txt --upgrade-strategy eager
|
# - name: setup-python@v6
|
||||||
|
# uses: actions/setup-python@v6
|
||||||
|
# with:
|
||||||
|
# python-version: '${{ matrix.python-version }}'
|
||||||
|
|
||||||
- name: List dependencies
|
# consider caching for speedups?
|
||||||
run: pip list
|
# https://docs.astral.sh/uv/guides/integration/github/#caching
|
||||||
|
|
||||||
|
- name: Install the project w uv
|
||||||
|
run: uv sync --all-extras --dev
|
||||||
|
|
||||||
|
# - name: Install dependencies
|
||||||
|
# run: pip install -U . -r requirements-test.txt -r requirements-docs.txt --upgrade-strategy eager
|
||||||
|
|
||||||
|
- name: List deps tree
|
||||||
|
run: uv tree
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: pytest tests/ --spawn-backend=${{ matrix.spawn_backend }} -rsx
|
run: uv run pytest tests/ --spawn-backend=${{ matrix.spawn_backend }} -rsx
|
||||||
|
|
||||||
|
# XXX legacy NOTE XXX
|
||||||
|
#
|
||||||
# We skip 3.10 on windows for now due to not having any collabs to
|
# We skip 3.10 on windows for now due to not having any collabs to
|
||||||
# debug the CI failures. Anyone wanting to hack and solve them is very
|
# debug the CI failures. Anyone wanting to hack and solve them is very
|
||||||
# welcome, but our primary user base is not using that OS.
|
# welcome, but our primary user base is not using that OS.
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,19 @@
|
||||||
|
{ pkgs ? import <nixpkgs> {} }:
|
||||||
|
let
|
||||||
|
nativeBuildInputs = with pkgs; [
|
||||||
|
stdenv.cc.cc.lib
|
||||||
|
uv
|
||||||
|
];
|
||||||
|
|
||||||
|
in
|
||||||
|
pkgs.mkShell {
|
||||||
|
inherit nativeBuildInputs;
|
||||||
|
|
||||||
|
LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath nativeBuildInputs;
|
||||||
|
TMPDIR = "/tmp";
|
||||||
|
|
||||||
|
shellHook = ''
|
||||||
|
set -e
|
||||||
|
uv venv .venv --python=3.12
|
||||||
|
'';
|
||||||
|
}
|
||||||
188
docs/README.rst
188
docs/README.rst
|
|
@ -1,40 +1,126 @@
|
||||||
|logo| ``tractor``: next-gen Python parallelism
|
|logo| ``tractor``: distributed structurred concurrency
|
||||||
|
|
||||||
|gh_actions|
|
``tractor`` is a `structured concurrency`_ (SC), multi-processing_ runtime built on trio_.
|
||||||
|docs|
|
|
||||||
|
|
||||||
``tractor`` is a `structured concurrent`_, multi-processing_ runtime
|
Fundamentally, ``tractor`` provides parallelism via
|
||||||
built on trio_.
|
``trio``-"*actors*": independent Python **processes** (i.e.
|
||||||
|
*non-shared-memory threads*) which can schedule ``trio`` tasks whilst
|
||||||
|
maintaining *end-to-end SC* inside a *distributed supervision tree*.
|
||||||
|
|
||||||
Fundamentally ``tractor`` gives you parallelism via ``trio``-"*actors*":
|
Cross-process (and thus cross-host) SC is accomplished through the
|
||||||
our nurseries_ let you spawn new Python processes which each run a ``trio``
|
combined use of our,
|
||||||
scheduled runtime - a call to ``trio.run()``.
|
|
||||||
|
- "actor nurseries_" which provide for spawning multiple, and
|
||||||
|
possibly nested, Python processes each running a ``trio`` scheduled
|
||||||
|
runtime - a call to ``trio.run()``,
|
||||||
|
- an "SC-transitive supervision protocol" enforced as an
|
||||||
|
IPC-message-spec encapsulating all RPC-dialogs.
|
||||||
|
|
||||||
We believe the system adheres to the `3 axioms`_ of an "`actor model`_"
|
We believe the system adheres to the `3 axioms`_ of an "`actor model`_"
|
||||||
but likely *does not* look like what *you* probably think an "actor
|
but likely **does not** look like what **you** probably *think* an "actor
|
||||||
model" looks like, and that's *intentional*.
|
model" looks like, and that's **intentional**.
|
||||||
|
|
||||||
The first step to grok ``tractor`` is to get the basics of ``trio`` down.
|
|
||||||
A great place to start is the `trio docs`_ and this `blog post`_.
|
Where do i start!?
|
||||||
|
------------------
|
||||||
|
The first step to grok ``tractor`` is to get an intermediate
|
||||||
|
knowledge of ``trio`` and **structured concurrency** B)
|
||||||
|
|
||||||
|
Some great places to start are,
|
||||||
|
|
||||||
|
- the seminal `blog post`_
|
||||||
|
- obviously the `trio docs`_
|
||||||
|
- wikipedia's nascent SC_ page
|
||||||
|
- the fancy diagrams @ libdill-docs_
|
||||||
|
|
||||||
|
|
||||||
Features
|
Features
|
||||||
--------
|
--------
|
||||||
- **It's just** a ``trio`` API
|
- **It's just** a ``trio`` API!
|
||||||
- *Infinitely nesteable* process trees
|
- *Infinitely nesteable* process trees running embedded ``trio`` tasks.
|
||||||
- Builtin IPC streaming APIs with task fan-out broadcasting
|
- Swappable, OS-specific, process spawning via multiple backends.
|
||||||
- A (first ever?) "native" multi-core debugger UX for Python using `pdb++`_
|
- Modular IPC stack, allowing for custom interchange formats (eg.
|
||||||
- Support for a swappable, OS specific, process spawning layer
|
as offered from `msgspec`_), varied transport protocols (TCP, RUDP,
|
||||||
- A modular transport stack, allowing for custom serialization (eg. with
|
QUIC, wireguard), and OS-env specific higher-perf primitives (UDS,
|
||||||
`msgspec`_), communications protocols, and environment specific IPC
|
shm-ring-buffers).
|
||||||
primitives
|
- Optionally distributed_: all IPC and RPC APIs work over multi-host
|
||||||
- Support for spawning process-level-SC, inter-loop one-to-one-task oriented
|
transports the same as local.
|
||||||
``asyncio`` actors via "infected ``asyncio``" mode
|
- Builtin high-level streaming API that enables your app to easily
|
||||||
- `structured chadcurrency`_ from the ground up
|
leverage the benefits of a "`cheap or nasty`_" `(un)protocol`_.
|
||||||
|
- A "native UX" around a multi-process safe debugger REPL using
|
||||||
|
`pdbp`_ (a fork & fix of `pdb++`_)
|
||||||
|
- "Infected ``asyncio``" mode: support for starting an actor's
|
||||||
|
runtime as a `guest`_ on the ``asyncio`` loop allowing us to
|
||||||
|
provide stringent SC-style ``trio.Task``-supervision around any
|
||||||
|
``asyncio.Task`` spawned via our ``tractor.to_asyncio`` APIs.
|
||||||
|
- A **very naive** and still very much work-in-progress inter-actor
|
||||||
|
`discovery`_ sys with plans to support multiple `modern protocol`_
|
||||||
|
approaches.
|
||||||
|
- Various ``trio`` extension APIs via ``tractor.trionics`` such as,
|
||||||
|
- task fan-out `broadcasting`_,
|
||||||
|
- multi-task-single-resource-caching and fan-out-to-multi
|
||||||
|
``__aenter__()`` APIs for ``@acm`` functions,
|
||||||
|
- (WIP) a ``TaskMngr``: one-cancels-one style nursery supervisor.
|
||||||
|
|
||||||
|
|
||||||
|
Status of `main` / infra
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
- |gh_actions|
|
||||||
|
- |docs|
|
||||||
|
|
||||||
|
|
||||||
|
Install
|
||||||
|
-------
|
||||||
|
``tractor`` is still in a *alpha-near-beta-stage* for many
|
||||||
|
of its subsystems, however we are very close to having a stable
|
||||||
|
lowlevel runtime and API.
|
||||||
|
|
||||||
|
As such, it's currently recommended that you clone and install the
|
||||||
|
repo from source::
|
||||||
|
|
||||||
|
pip install git+git://github.com/goodboy/tractor.git
|
||||||
|
|
||||||
|
|
||||||
|
We use the very hip `uv`_ for project mgmt::
|
||||||
|
|
||||||
|
git clone https://github.com/goodboy/tractor.git
|
||||||
|
cd tractor
|
||||||
|
uv sync --dev
|
||||||
|
uv run python examples/rpc_bidir_streaming.py
|
||||||
|
|
||||||
|
Consider activating a virtual/project-env before starting to hack on
|
||||||
|
the code base::
|
||||||
|
|
||||||
|
# you could use plain ol' venvs
|
||||||
|
# https://docs.astral.sh/uv/pip/environments/
|
||||||
|
uv venv tractor_py313 --python 3.13
|
||||||
|
|
||||||
|
# but @goodboy prefers the more explicit (and shell agnostic)
|
||||||
|
# https://docs.astral.sh/uv/configuration/environment/#uv_project_environment
|
||||||
|
UV_PROJECT_ENVIRONMENT="tractor_py313
|
||||||
|
|
||||||
|
# hint hint, enter @goodboy's fave shell B)
|
||||||
|
uv run --dev xonsh
|
||||||
|
|
||||||
|
Alongside all this we ofc offer "releases" on PyPi::
|
||||||
|
|
||||||
|
pip install tractor
|
||||||
|
|
||||||
|
Just note that YMMV since the main git branch is often much further
|
||||||
|
ahead then any latest release.
|
||||||
|
|
||||||
|
|
||||||
|
Example codez
|
||||||
|
-------------
|
||||||
|
In ``tractor``'s (very lacking) documention we prefer to point to
|
||||||
|
example scripts in the repo over duplicating them in docs, but with
|
||||||
|
that in mind here are some definitive snippets to try and hook you
|
||||||
|
into digging deeper.
|
||||||
|
|
||||||
|
|
||||||
Run a func in a process
|
Run a func in a process
|
||||||
-----------------------
|
***********************
|
||||||
Use ``trio``'s style of focussing on *tasks as functions*:
|
Use ``trio``'s style of focussing on *tasks as functions*:
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
@ -92,7 +178,7 @@ might want to check out `trio-parallel`_.
|
||||||
|
|
||||||
|
|
||||||
Zombie safe: self-destruct a process tree
|
Zombie safe: self-destruct a process tree
|
||||||
-----------------------------------------
|
*****************************************
|
||||||
``tractor`` tries to protect you from zombies, no matter what.
|
``tractor`` tries to protect you from zombies, no matter what.
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
@ -118,7 +204,7 @@ Zombie safe: self-destruct a process tree
|
||||||
f"running in pid {os.getpid()}"
|
f"running in pid {os.getpid()}"
|
||||||
)
|
)
|
||||||
|
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
|
|
@ -148,8 +234,8 @@ it **is a bug**.
|
||||||
|
|
||||||
|
|
||||||
"Native" multi-process debugging
|
"Native" multi-process debugging
|
||||||
--------------------------------
|
********************************
|
||||||
Using the magic of `pdb++`_ and our internal IPC, we've
|
Using the magic of `pdbp`_ and our internal IPC, we've
|
||||||
been able to create a native feeling debugging experience for
|
been able to create a native feeling debugging experience for
|
||||||
any (sub-)process in your ``tractor`` tree.
|
any (sub-)process in your ``tractor`` tree.
|
||||||
|
|
||||||
|
|
@ -203,7 +289,7 @@ We're hoping to add a respawn-from-repl system soon!
|
||||||
|
|
||||||
|
|
||||||
SC compatible bi-directional streaming
|
SC compatible bi-directional streaming
|
||||||
--------------------------------------
|
**************************************
|
||||||
Yes, you saw it here first; we provide 2-way streams
|
Yes, you saw it here first; we provide 2-way streams
|
||||||
with reliable, transitive setup/teardown semantics.
|
with reliable, transitive setup/teardown semantics.
|
||||||
|
|
||||||
|
|
@ -295,7 +381,7 @@ hear your thoughts on!
|
||||||
|
|
||||||
|
|
||||||
Worker poolz are easy peasy
|
Worker poolz are easy peasy
|
||||||
---------------------------
|
***************************
|
||||||
The initial ask from most new users is *"how do I make a worker
|
The initial ask from most new users is *"how do I make a worker
|
||||||
pool thing?"*.
|
pool thing?"*.
|
||||||
|
|
||||||
|
|
@ -317,10 +403,10 @@ This uses no extra threads, fancy semaphores or futures; all we need
|
||||||
is ``tractor``'s IPC!
|
is ``tractor``'s IPC!
|
||||||
|
|
||||||
"Infected ``asyncio``" mode
|
"Infected ``asyncio``" mode
|
||||||
---------------------------
|
***************************
|
||||||
Have a bunch of ``asyncio`` code you want to force to be SC at the process level?
|
Have a bunch of ``asyncio`` code you want to force to be SC at the process level?
|
||||||
|
|
||||||
Check out our experimental system for `guest-mode`_ controlled
|
Check out our experimental system for `guest`_-mode controlled
|
||||||
``asyncio`` actors:
|
``asyncio`` actors:
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
@ -426,7 +512,7 @@ We need help refining the `asyncio`-side channel API to be more
|
||||||
|
|
||||||
|
|
||||||
Higher level "cluster" APIs
|
Higher level "cluster" APIs
|
||||||
---------------------------
|
***************************
|
||||||
To be extra terse the ``tractor`` devs have started hacking some "higher
|
To be extra terse the ``tractor`` devs have started hacking some "higher
|
||||||
level" APIs for managing actor trees/clusters. These interfaces should
|
level" APIs for managing actor trees/clusters. These interfaces should
|
||||||
generally be condsidered provisional for now but we encourage you to try
|
generally be condsidered provisional for now but we encourage you to try
|
||||||
|
|
@ -483,18 +569,6 @@ spawn a flat cluster:
|
||||||
.. _full worker pool re-implementation: https://github.com/goodboy/tractor/blob/master/examples/parallelism/concurrent_actors_primes.py
|
.. _full worker pool re-implementation: https://github.com/goodboy/tractor/blob/master/examples/parallelism/concurrent_actors_primes.py
|
||||||
|
|
||||||
|
|
||||||
Install
|
|
||||||
-------
|
|
||||||
From PyPi::
|
|
||||||
|
|
||||||
pip install tractor
|
|
||||||
|
|
||||||
|
|
||||||
From git::
|
|
||||||
|
|
||||||
pip install git+git://github.com/goodboy/tractor.git
|
|
||||||
|
|
||||||
|
|
||||||
Under the hood
|
Under the hood
|
||||||
--------------
|
--------------
|
||||||
``tractor`` is an attempt to pair trionic_ `structured concurrency`_ with
|
``tractor`` is an attempt to pair trionic_ `structured concurrency`_ with
|
||||||
|
|
@ -586,6 +660,7 @@ matrix seems too hip, we're also mostly all in the the `trio gitter
|
||||||
channel`_!
|
channel`_!
|
||||||
|
|
||||||
.. _structured concurrent: https://trio.discourse.group/t/concise-definition-of-structured-concurrency/228
|
.. _structured concurrent: https://trio.discourse.group/t/concise-definition-of-structured-concurrency/228
|
||||||
|
.. _distributed: https://en.wikipedia.org/wiki/Distributed_computing
|
||||||
.. _multi-processing: https://en.wikipedia.org/wiki/Multiprocessing
|
.. _multi-processing: https://en.wikipedia.org/wiki/Multiprocessing
|
||||||
.. _trio: https://github.com/python-trio/trio
|
.. _trio: https://github.com/python-trio/trio
|
||||||
.. _nurseries: https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/#nurseries-a-structured-replacement-for-go-statements
|
.. _nurseries: https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/#nurseries-a-structured-replacement-for-go-statements
|
||||||
|
|
@ -597,23 +672,32 @@ channel`_!
|
||||||
.. _adherance to: https://www.youtube.com/watch?v=7erJ1DV_Tlo&t=1821s
|
.. _adherance to: https://www.youtube.com/watch?v=7erJ1DV_Tlo&t=1821s
|
||||||
.. _trio gitter channel: https://gitter.im/python-trio/general
|
.. _trio gitter channel: https://gitter.im/python-trio/general
|
||||||
.. _matrix channel: https://matrix.to/#/!tractor:matrix.org
|
.. _matrix channel: https://matrix.to/#/!tractor:matrix.org
|
||||||
|
.. _broadcasting: https://github.com/goodboy/tractor/pull/229
|
||||||
|
.. _modern procotol: https://en.wikipedia.org/wiki/Rendezvous_protocol
|
||||||
|
.. _pdbp: https://github.com/mdmintz/pdbp
|
||||||
.. _pdb++: https://github.com/pdbpp/pdbpp
|
.. _pdb++: https://github.com/pdbpp/pdbpp
|
||||||
.. _guest mode: https://trio.readthedocs.io/en/stable/reference-lowlevel.html?highlight=guest%20mode#using-guest-mode-to-run-trio-on-top-of-other-event-loops
|
.. _cheap or nasty: https://zguide.zeromq.org/docs/chapter7/#The-Cheap-or-Nasty-Pattern
|
||||||
|
.. _(un)protocol: https://zguide.zeromq.org/docs/chapter7/#Unprotocols
|
||||||
|
.. _discovery: https://zguide.zeromq.org/docs/chapter8/#Discovery
|
||||||
|
.. _modern protocol: https://en.wikipedia.org/wiki/Rendezvous_protocol
|
||||||
.. _messages: https://en.wikipedia.org/wiki/Message_passing
|
.. _messages: https://en.wikipedia.org/wiki/Message_passing
|
||||||
.. _trio docs: https://trio.readthedocs.io/en/latest/
|
.. _trio docs: https://trio.readthedocs.io/en/latest/
|
||||||
.. _blog post: https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/
|
.. _blog post: https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/
|
||||||
.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency
|
.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency
|
||||||
.. _structured chadcurrency: https://en.wikipedia.org/wiki/Structured_concurrency
|
.. _SC: https://en.wikipedia.org/wiki/Structured_concurrency
|
||||||
.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency
|
.. _libdill-docs: https://sustrik.github.io/libdill/structured-concurrency.html
|
||||||
.. _unrequirements: https://en.wikipedia.org/wiki/Actor_model#Direct_communication_and_asynchrony
|
.. _unrequirements: https://en.wikipedia.org/wiki/Actor_model#Direct_communication_and_asynchrony
|
||||||
.. _async generators: https://www.python.org/dev/peps/pep-0525/
|
.. _async generators: https://www.python.org/dev/peps/pep-0525/
|
||||||
.. _trio-parallel: https://github.com/richardsheridan/trio-parallel
|
.. _trio-parallel: https://github.com/richardsheridan/trio-parallel
|
||||||
|
.. _uv: https://docs.astral.sh/uv/
|
||||||
.. _msgspec: https://jcristharif.com/msgspec/
|
.. _msgspec: https://jcristharif.com/msgspec/
|
||||||
.. _guest-mode: https://trio.readthedocs.io/en/stable/reference-lowlevel.html?highlight=guest%20mode#using-guest-mode-to-run-trio-on-top-of-other-event-loops
|
.. _guest: https://trio.readthedocs.io/en/stable/reference-lowlevel.html?highlight=guest%20mode#using-guest-mode-to-run-trio-on-top-of-other-event-loops
|
||||||
|
|
||||||
|
..
|
||||||
.. |gh_actions| image:: https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Fgoodboy%2Ftractor%2Fbadge&style=popout-square
|
NOTE, on generating badge links from the UI
|
||||||
:target: https://actions-badge.atrox.dev/goodboy/tractor/goto
|
https://docs.github.com/en/actions/how-tos/monitoring-and-troubleshooting-workflows/monitoring-workflows/adding-a-workflow-status-badge?ref=gitguardian-blog-automated-secrets-detection#using-the-ui
|
||||||
|
.. |gh_actions| image:: https://github.com/goodboy/tractor/actions/workflows/ci.yml/badge.svg?branch=main
|
||||||
|
:target: https://github.com/goodboy/tractor/actions/workflows/ci.yml
|
||||||
|
|
||||||
.. |docs| image:: https://readthedocs.org/projects/tractor/badge/?version=latest
|
.. |docs| image:: https://readthedocs.org/projects/tractor/badge/?version=latest
|
||||||
:target: https://tractor.readthedocs.io/en/latest/?badge=latest
|
:target: https://tractor.readthedocs.io/en/latest/?badge=latest
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,262 @@
|
||||||
|
'''
|
||||||
|
Complex edge case where during real-time streaming the IPC tranport
|
||||||
|
channels are wiped out (purposely in this example though it could have
|
||||||
|
been an outage) and we want to ensure that despite being in debug mode
|
||||||
|
(or not) the user can sent SIGINT once they notice the hang and the
|
||||||
|
actor tree will eventually be cancelled without leaving any zombies.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
from tractor import (
|
||||||
|
open_nursery,
|
||||||
|
context,
|
||||||
|
Context,
|
||||||
|
ContextCancelled,
|
||||||
|
MsgStream,
|
||||||
|
_testing,
|
||||||
|
trionics,
|
||||||
|
)
|
||||||
|
import trio
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
async def break_ipc_then_error(
|
||||||
|
stream: MsgStream,
|
||||||
|
break_ipc_with: str|None = None,
|
||||||
|
pre_close: bool = False,
|
||||||
|
):
|
||||||
|
await _testing.break_ipc(
|
||||||
|
stream=stream,
|
||||||
|
method=break_ipc_with,
|
||||||
|
pre_close=pre_close,
|
||||||
|
)
|
||||||
|
async for msg in stream:
|
||||||
|
await stream.send(msg)
|
||||||
|
|
||||||
|
assert 0
|
||||||
|
|
||||||
|
|
||||||
|
async def iter_ipc_stream(
|
||||||
|
stream: MsgStream,
|
||||||
|
break_ipc_with: str|None = None,
|
||||||
|
pre_close: bool = False,
|
||||||
|
):
|
||||||
|
async for msg in stream:
|
||||||
|
await stream.send(msg)
|
||||||
|
|
||||||
|
|
||||||
|
@context
|
||||||
|
async def recv_and_spawn_net_killers(
|
||||||
|
|
||||||
|
ctx: Context,
|
||||||
|
break_ipc_after: bool|int = False,
|
||||||
|
pre_close: bool = False,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Receive stream msgs and spawn some IPC killers mid-stream.
|
||||||
|
|
||||||
|
'''
|
||||||
|
broke_ipc: bool = False
|
||||||
|
await ctx.started()
|
||||||
|
async with (
|
||||||
|
ctx.open_stream() as stream,
|
||||||
|
trionics.collapse_eg(),
|
||||||
|
trio.open_nursery() as tn,
|
||||||
|
):
|
||||||
|
async for i in stream:
|
||||||
|
print(f'child echoing {i}')
|
||||||
|
if not broke_ipc:
|
||||||
|
await stream.send(i)
|
||||||
|
else:
|
||||||
|
await trio.sleep(0.01)
|
||||||
|
|
||||||
|
if (
|
||||||
|
break_ipc_after
|
||||||
|
and
|
||||||
|
i >= break_ipc_after
|
||||||
|
):
|
||||||
|
broke_ipc = True
|
||||||
|
tn.start_soon(
|
||||||
|
iter_ipc_stream,
|
||||||
|
stream,
|
||||||
|
)
|
||||||
|
tn.start_soon(
|
||||||
|
partial(
|
||||||
|
break_ipc_then_error,
|
||||||
|
stream=stream,
|
||||||
|
pre_close=pre_close,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def stuff_hangin_ctlc(timeout: float = 1) -> None:
|
||||||
|
|
||||||
|
with trio.move_on_after(timeout) as cs:
|
||||||
|
yield timeout
|
||||||
|
|
||||||
|
if cs.cancelled_caught:
|
||||||
|
# pretend to be a user seeing no streaming action
|
||||||
|
# thinking it's a hang, and then hitting ctl-c..
|
||||||
|
print(
|
||||||
|
f"i'm a user on the PARENT side and thingz hangin "
|
||||||
|
f'after timeout={timeout} ???\n\n'
|
||||||
|
'MASHING CTlR-C..!?\n'
|
||||||
|
)
|
||||||
|
raise KeyboardInterrupt
|
||||||
|
|
||||||
|
|
||||||
|
async def main(
|
||||||
|
debug_mode: bool = False,
|
||||||
|
start_method: str = 'trio',
|
||||||
|
loglevel: str = 'cancel',
|
||||||
|
|
||||||
|
# by default we break the parent IPC first (if configured to break
|
||||||
|
# at all), but this can be changed so the child does first (even if
|
||||||
|
# both are set to break).
|
||||||
|
break_parent_ipc_after: int|bool = False,
|
||||||
|
break_child_ipc_after: int|bool = False,
|
||||||
|
pre_close: bool = False,
|
||||||
|
tpt_proto: str = 'tcp',
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
async with (
|
||||||
|
open_nursery(
|
||||||
|
start_method=start_method,
|
||||||
|
|
||||||
|
# NOTE: even debugger is used we shouldn't get
|
||||||
|
# a hang since it never engages due to broken IPC
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
loglevel=loglevel,
|
||||||
|
enable_transports=[tpt_proto],
|
||||||
|
|
||||||
|
) as an,
|
||||||
|
):
|
||||||
|
sub_name: str = 'chitty_hijo'
|
||||||
|
portal = await an.start_actor(
|
||||||
|
sub_name,
|
||||||
|
enable_modules=[__name__],
|
||||||
|
)
|
||||||
|
|
||||||
|
async with (
|
||||||
|
stuff_hangin_ctlc(timeout=2) as timeout,
|
||||||
|
_testing.expect_ctxc(
|
||||||
|
yay=(
|
||||||
|
break_parent_ipc_after
|
||||||
|
or
|
||||||
|
break_child_ipc_after
|
||||||
|
),
|
||||||
|
# TODO: we CAN'T remove this right?
|
||||||
|
# since we need the ctxc to bubble up from either
|
||||||
|
# the stream API after the `None` msg is sent
|
||||||
|
# (which actually implicitly cancels all remote
|
||||||
|
# tasks in the hijo) or from simluated
|
||||||
|
# KBI-mash-from-user
|
||||||
|
# or should we expect that a KBI triggers the ctxc
|
||||||
|
# and KBI in an eg?
|
||||||
|
reraise=True,
|
||||||
|
),
|
||||||
|
|
||||||
|
portal.open_context(
|
||||||
|
recv_and_spawn_net_killers,
|
||||||
|
break_ipc_after=break_child_ipc_after,
|
||||||
|
pre_close=pre_close,
|
||||||
|
) as (ctx, sent),
|
||||||
|
):
|
||||||
|
rx_eoc: bool = False
|
||||||
|
ipc_break_sent: bool = False
|
||||||
|
async with ctx.open_stream() as stream:
|
||||||
|
for i in range(1000):
|
||||||
|
|
||||||
|
if (
|
||||||
|
break_parent_ipc_after
|
||||||
|
and
|
||||||
|
i > break_parent_ipc_after
|
||||||
|
and
|
||||||
|
not ipc_break_sent
|
||||||
|
):
|
||||||
|
print(
|
||||||
|
'#################################\n'
|
||||||
|
'Simulating PARENT-side IPC BREAK!\n'
|
||||||
|
'#################################\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: other methods? see break func above.
|
||||||
|
# await stream._ctx.chan.send(None)
|
||||||
|
# await stream._ctx.chan.transport.stream.send_eof()
|
||||||
|
await stream._ctx.chan.transport.stream.aclose()
|
||||||
|
ipc_break_sent = True
|
||||||
|
|
||||||
|
# it actually breaks right here in the
|
||||||
|
# mp_spawn/forkserver backends and thus the
|
||||||
|
# zombie reaper never even kicks in?
|
||||||
|
try:
|
||||||
|
print(f'parent sending {i}')
|
||||||
|
await stream.send(i)
|
||||||
|
except ContextCancelled as ctxc:
|
||||||
|
print(
|
||||||
|
'parent received ctxc on `stream.send()`\n'
|
||||||
|
f'{ctxc}\n'
|
||||||
|
)
|
||||||
|
assert 'root' in ctxc.canceller
|
||||||
|
assert sub_name in ctx.canceller
|
||||||
|
|
||||||
|
# TODO: is this needed or no?
|
||||||
|
raise
|
||||||
|
|
||||||
|
except trio.ClosedResourceError:
|
||||||
|
# NOTE: don't send if we already broke the
|
||||||
|
# connection to avoid raising a closed-error
|
||||||
|
# such that we drop through to the ctl-c
|
||||||
|
# mashing by user.
|
||||||
|
await trio.sleep(0.01)
|
||||||
|
|
||||||
|
# timeout: int = 1
|
||||||
|
# with trio.move_on_after(timeout) as cs:
|
||||||
|
async with stuff_hangin_ctlc() as timeout:
|
||||||
|
print(
|
||||||
|
f'PARENT `stream.receive()` with timeout={timeout}\n'
|
||||||
|
)
|
||||||
|
# NOTE: in the parent side IPC failure case this
|
||||||
|
# will raise an ``EndOfChannel`` after the child
|
||||||
|
# is killed and sends a stop msg back to it's
|
||||||
|
# caller/this-parent.
|
||||||
|
try:
|
||||||
|
rx = await stream.receive()
|
||||||
|
print(
|
||||||
|
"I'm a happy PARENT user and echoed to me is\n"
|
||||||
|
f'{rx}\n'
|
||||||
|
)
|
||||||
|
except trio.EndOfChannel:
|
||||||
|
rx_eoc: bool = True
|
||||||
|
print('MsgStream got EoC for PARENT')
|
||||||
|
raise
|
||||||
|
|
||||||
|
print(
|
||||||
|
'Streaming finished and we got Eoc.\n'
|
||||||
|
'Canceling `.open_context()` in root with\n'
|
||||||
|
'CTlR-C..'
|
||||||
|
)
|
||||||
|
if rx_eoc:
|
||||||
|
assert stream.closed
|
||||||
|
try:
|
||||||
|
await stream.send(i)
|
||||||
|
pytest.fail('stream not closed?')
|
||||||
|
except (
|
||||||
|
trio.ClosedResourceError,
|
||||||
|
trio.EndOfChannel,
|
||||||
|
) as send_err:
|
||||||
|
if rx_eoc:
|
||||||
|
assert send_err is stream._eoc
|
||||||
|
else:
|
||||||
|
assert send_err is stream._closed
|
||||||
|
|
||||||
|
raise KeyboardInterrupt
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
trio.run(main)
|
||||||
|
|
@ -0,0 +1,136 @@
|
||||||
|
'''
|
||||||
|
Examples of using the builtin `breakpoint()` from an `asyncio.Task`
|
||||||
|
running in a subactor spawned with `infect_asyncio=True`.
|
||||||
|
|
||||||
|
'''
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
import trio
|
||||||
|
import tractor
|
||||||
|
from tractor import (
|
||||||
|
to_asyncio,
|
||||||
|
Portal,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def aio_sleep_forever():
|
||||||
|
await asyncio.sleep(float('inf'))
|
||||||
|
|
||||||
|
|
||||||
|
async def bp_then_error(
|
||||||
|
to_trio: trio.MemorySendChannel,
|
||||||
|
from_trio: asyncio.Queue,
|
||||||
|
|
||||||
|
raise_after_bp: bool = True,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
# sync with `trio`-side (caller) task
|
||||||
|
to_trio.send_nowait('start')
|
||||||
|
|
||||||
|
# NOTE: what happens here inside the hook needs some refinement..
|
||||||
|
# => seems like it's still `.debug._set_trace()` but
|
||||||
|
# we set `Lock.local_task_in_debug = 'sync'`, we probably want
|
||||||
|
# some further, at least, meta-data about the task/actor in debug
|
||||||
|
# in terms of making it clear it's `asyncio` mucking about.
|
||||||
|
breakpoint() # asyncio-side
|
||||||
|
|
||||||
|
# short checkpoint / delay
|
||||||
|
await asyncio.sleep(0.5) # asyncio-side
|
||||||
|
|
||||||
|
if raise_after_bp:
|
||||||
|
raise ValueError('asyncio side error!')
|
||||||
|
|
||||||
|
# TODO: test case with this so that it gets cancelled?
|
||||||
|
else:
|
||||||
|
# XXX NOTE: this is required in order to get the SIGINT-ignored
|
||||||
|
# hang case documented in the module script section!
|
||||||
|
await aio_sleep_forever()
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def trio_ctx(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
bp_before_started: bool = False,
|
||||||
|
):
|
||||||
|
|
||||||
|
# this will block until the ``asyncio`` task sends a "first"
|
||||||
|
# message, see first line in above func.
|
||||||
|
async with (
|
||||||
|
to_asyncio.open_channel_from(
|
||||||
|
bp_then_error,
|
||||||
|
# raise_after_bp=not bp_before_started,
|
||||||
|
) as (first, chan),
|
||||||
|
|
||||||
|
trio.open_nursery() as tn,
|
||||||
|
):
|
||||||
|
assert first == 'start'
|
||||||
|
|
||||||
|
if bp_before_started:
|
||||||
|
await tractor.pause() # trio-side
|
||||||
|
|
||||||
|
await ctx.started(first) # trio-side
|
||||||
|
|
||||||
|
tn.start_soon(
|
||||||
|
to_asyncio.run_task,
|
||||||
|
aio_sleep_forever,
|
||||||
|
)
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
||||||
|
async def main(
|
||||||
|
bps_all_over: bool = True,
|
||||||
|
|
||||||
|
# TODO, WHICH OF THESE HAZ BUGZ?
|
||||||
|
cancel_from_root: bool = False,
|
||||||
|
err_from_root: bool = False,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
async with tractor.open_nursery(
|
||||||
|
debug_mode=True,
|
||||||
|
maybe_enable_greenback=True,
|
||||||
|
# loglevel='devx',
|
||||||
|
) as an:
|
||||||
|
ptl: Portal = await an.start_actor(
|
||||||
|
'aio_daemon',
|
||||||
|
enable_modules=[__name__],
|
||||||
|
infect_asyncio=True,
|
||||||
|
debug_mode=True,
|
||||||
|
# loglevel='cancel',
|
||||||
|
)
|
||||||
|
|
||||||
|
async with ptl.open_context(
|
||||||
|
trio_ctx,
|
||||||
|
bp_before_started=bps_all_over,
|
||||||
|
) as (ctx, first):
|
||||||
|
|
||||||
|
assert first == 'start'
|
||||||
|
|
||||||
|
# pause in parent to ensure no cross-actor
|
||||||
|
# locking problems exist!
|
||||||
|
await tractor.pause() # trio-root
|
||||||
|
|
||||||
|
if cancel_from_root:
|
||||||
|
await ctx.cancel()
|
||||||
|
|
||||||
|
if err_from_root:
|
||||||
|
assert 0
|
||||||
|
else:
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: case where we cancel from trio-side while asyncio task
|
||||||
|
# has debugger lock?
|
||||||
|
# await ptl.cancel_actor()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
|
||||||
|
# works fine B)
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
# will hang and ignores SIGINT !!
|
||||||
|
# NOTE: you'll need to send a SIGQUIT (via ctl-\) to kill it
|
||||||
|
# manually..
|
||||||
|
# trio.run(main, True)
|
||||||
|
|
@ -0,0 +1,9 @@
|
||||||
|
'''
|
||||||
|
Reproduce a bug where enabling debug mode for a sub-actor actually causes
|
||||||
|
a hang on teardown...
|
||||||
|
|
||||||
|
'''
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
import trio
|
||||||
|
import tractor
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
'''
|
'''
|
||||||
Fast fail test with a context.
|
Fast fail test with a `Context`.
|
||||||
|
|
||||||
Ensure the partially initialized sub-actor process
|
Ensure the partially initialized sub-actor process
|
||||||
doesn't cause a hang on error/cancel of the parent
|
doesn't cause a hang on error/cancel of the parent
|
||||||
|
|
|
||||||
|
|
@ -4,9 +4,15 @@ import trio
|
||||||
|
|
||||||
async def breakpoint_forever():
|
async def breakpoint_forever():
|
||||||
"Indefinitely re-enter debugger in child actor."
|
"Indefinitely re-enter debugger in child actor."
|
||||||
while True:
|
try:
|
||||||
yield 'yo'
|
while True:
|
||||||
await tractor.breakpoint()
|
yield 'yo'
|
||||||
|
await tractor.pause()
|
||||||
|
except BaseException:
|
||||||
|
tractor.log.get_console_log().exception(
|
||||||
|
'Cancelled while trying to enter pause point!'
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
async def name_error():
|
async def name_error():
|
||||||
|
|
@ -15,11 +21,14 @@ async def name_error():
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
"""Test breakpoint in a streaming actor.
|
'''
|
||||||
"""
|
Test breakpoint in a streaming actor.
|
||||||
|
|
||||||
|
'''
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
debug_mode=True,
|
debug_mode=True,
|
||||||
loglevel='error',
|
loglevel='cancel',
|
||||||
|
# loglevel='devx',
|
||||||
) as n:
|
) as n:
|
||||||
|
|
||||||
p0 = await n.start_actor('bp_forever', enable_modules=[__name__])
|
p0 = await n.start_actor('bp_forever', enable_modules=[__name__])
|
||||||
|
|
@ -32,7 +41,7 @@ async def main():
|
||||||
try:
|
try:
|
||||||
await p1.run(name_error)
|
await p1.run(name_error)
|
||||||
except tractor.RemoteActorError as rae:
|
except tractor.RemoteActorError as rae:
|
||||||
assert rae.type is NameError
|
assert rae.boxed_type is NameError
|
||||||
|
|
||||||
async for i in stream:
|
async for i in stream:
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ async def name_error():
|
||||||
async def breakpoint_forever():
|
async def breakpoint_forever():
|
||||||
"Indefinitely re-enter debugger in child actor."
|
"Indefinitely re-enter debugger in child actor."
|
||||||
while True:
|
while True:
|
||||||
await tractor.breakpoint()
|
await tractor.pause()
|
||||||
|
|
||||||
# NOTE: if the test never sent 'q'/'quit' commands
|
# NOTE: if the test never sent 'q'/'quit' commands
|
||||||
# on the pdb repl, without this checkpoint line the
|
# on the pdb repl, without this checkpoint line the
|
||||||
|
|
@ -45,6 +45,7 @@ async def spawn_until(depth=0):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: notes on the new boxed-relayed errors through proxy actors
|
||||||
async def main():
|
async def main():
|
||||||
"""The main ``tractor`` routine.
|
"""The main ``tractor`` routine.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -40,7 +40,7 @@ async def main():
|
||||||
"""
|
"""
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
debug_mode=True,
|
debug_mode=True,
|
||||||
# loglevel='cancel',
|
loglevel='devx',
|
||||||
) as n:
|
) as n:
|
||||||
|
|
||||||
# spawn both actors
|
# spawn both actors
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ async def breakpoint_forever():
|
||||||
"Indefinitely re-enter debugger in child actor."
|
"Indefinitely re-enter debugger in child actor."
|
||||||
while True:
|
while True:
|
||||||
await trio.sleep(0.1)
|
await trio.sleep(0.1)
|
||||||
await tractor.breakpoint()
|
await tractor.pause()
|
||||||
|
|
||||||
|
|
||||||
async def name_error():
|
async def name_error():
|
||||||
|
|
@ -38,6 +38,7 @@ async def main():
|
||||||
"""
|
"""
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
debug_mode=True,
|
debug_mode=True,
|
||||||
|
# loglevel='runtime',
|
||||||
) as n:
|
) as n:
|
||||||
|
|
||||||
# Spawn both actors, don't bother with collecting results
|
# Spawn both actors, don't bother with collecting results
|
||||||
|
|
|
||||||
|
|
@ -23,5 +23,6 @@ async def main():
|
||||||
n.start_soon(debug_actor.run, die)
|
n.start_soon(debug_actor.run, die)
|
||||||
n.start_soon(crash_boi.run, die)
|
n.start_soon(crash_boi.run, die)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,56 @@
|
||||||
|
import trio
|
||||||
|
import tractor
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def name_error(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Raise a `NameError`, catch it and enter `.post_mortem()`, then
|
||||||
|
expect the `._rpc._invoke()` crash handler to also engage.
|
||||||
|
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
getattr(doggypants) # noqa (on purpose)
|
||||||
|
except NameError:
|
||||||
|
await tractor.post_mortem()
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
'''
|
||||||
|
Test 3 `PdbREPL` entries:
|
||||||
|
- one in the child due to manual `.post_mortem()`,
|
||||||
|
- another in the child due to runtime RPC crash handling.
|
||||||
|
- final one here in parent from the RAE.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# XXX NOTE: ideally the REPL arrives at this frame in the parent
|
||||||
|
# ONE UP FROM the inner ctx block below!
|
||||||
|
async with tractor.open_nursery(
|
||||||
|
debug_mode=True,
|
||||||
|
# loglevel='cancel',
|
||||||
|
) as an:
|
||||||
|
p: tractor.Portal = await an.start_actor(
|
||||||
|
'child',
|
||||||
|
enable_modules=[__name__],
|
||||||
|
)
|
||||||
|
|
||||||
|
# XXX should raise `RemoteActorError[NameError]`
|
||||||
|
# AND be the active frame when REPL enters!
|
||||||
|
try:
|
||||||
|
async with p.open_context(name_error) as (ctx, first):
|
||||||
|
assert first
|
||||||
|
except tractor.RemoteActorError as rae:
|
||||||
|
assert rae.boxed_type is NameError
|
||||||
|
|
||||||
|
# manually handle in root's parent task
|
||||||
|
await tractor.post_mortem()
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
raise RuntimeError('IPC ctx should have remote errored!?')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
trio.run(main)
|
||||||
|
|
@ -0,0 +1,58 @@
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import trio
|
||||||
|
import tractor
|
||||||
|
|
||||||
|
# ensure mod-path is correct!
|
||||||
|
from tractor.devx.debug import (
|
||||||
|
_sync_pause_from_builtin as _sync_pause_from_builtin,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def main() -> None:
|
||||||
|
|
||||||
|
# intially unset, no entry.
|
||||||
|
orig_pybp_var: int = os.environ.get('PYTHONBREAKPOINT')
|
||||||
|
assert orig_pybp_var in {None, "0"}
|
||||||
|
|
||||||
|
async with tractor.open_nursery(
|
||||||
|
debug_mode=True,
|
||||||
|
loglevel='devx',
|
||||||
|
maybe_enable_greenback=True,
|
||||||
|
# ^XXX REQUIRED to enable `breakpoint()` support (from sync
|
||||||
|
# fns) and thus required here to avoid an assertion err
|
||||||
|
# on the next line
|
||||||
|
):
|
||||||
|
assert (
|
||||||
|
(pybp_var := os.environ['PYTHONBREAKPOINT'])
|
||||||
|
==
|
||||||
|
'tractor.devx.debug._sync_pause_from_builtin'
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: an assert that verifies the hook has indeed been, hooked
|
||||||
|
# XD
|
||||||
|
assert (
|
||||||
|
(pybp_hook := sys.breakpointhook)
|
||||||
|
is not tractor.devx.debug._set_trace
|
||||||
|
)
|
||||||
|
|
||||||
|
print(
|
||||||
|
f'$PYTHONOBREAKPOINT: {pybp_var!r}\n'
|
||||||
|
f'`sys.breakpointhook`: {pybp_hook!r}\n'
|
||||||
|
)
|
||||||
|
breakpoint() # first bp, tractor hook set.
|
||||||
|
|
||||||
|
# XXX AFTER EXIT (of actor-runtime) verify the hook is unset..
|
||||||
|
#
|
||||||
|
# YES, this is weird but it's how stdlib docs say to do it..
|
||||||
|
# https://docs.python.org/3/library/sys.html#sys.breakpointhook
|
||||||
|
assert os.environ.get('PYTHONBREAKPOINT') is orig_pybp_var
|
||||||
|
assert sys.breakpointhook
|
||||||
|
|
||||||
|
# now ensure a regular builtin pause still works
|
||||||
|
breakpoint() # last bp, stdlib hook restored
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
trio.run(main)
|
||||||
|
|
@ -10,7 +10,7 @@ async def main():
|
||||||
|
|
||||||
await trio.sleep(0.1)
|
await trio.sleep(0.1)
|
||||||
|
|
||||||
await tractor.breakpoint()
|
await tractor.pause()
|
||||||
|
|
||||||
await trio.sleep(0.1)
|
await trio.sleep(0.1)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -2,13 +2,16 @@ import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
async def main(
|
||||||
|
registry_addrs: tuple[str, int]|None = None
|
||||||
|
):
|
||||||
|
|
||||||
async with tractor.open_root_actor(
|
async with tractor.open_root_actor(
|
||||||
debug_mode=True,
|
debug_mode=True,
|
||||||
|
# loglevel='runtime',
|
||||||
):
|
):
|
||||||
while True:
|
while True:
|
||||||
await tractor.breakpoint()
|
await tractor.pause()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
|
||||||
|
|
@ -24,10 +24,9 @@ async def spawn_until(depth=0):
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
"""The main ``tractor`` routine.
|
'''
|
||||||
|
The process tree should look as approximately as follows when the
|
||||||
The process tree should look as approximately as follows when the debugger
|
debugger first engages:
|
||||||
first engages:
|
|
||||||
|
|
||||||
python examples/debugging/multi_nested_subactors_bp_forever.py
|
python examples/debugging/multi_nested_subactors_bp_forever.py
|
||||||
├─ python -m tractor._child --uid ('spawner1', '7eab8462 ...)
|
├─ python -m tractor._child --uid ('spawner1', '7eab8462 ...)
|
||||||
|
|
@ -37,10 +36,11 @@ async def main():
|
||||||
└─ python -m tractor._child --uid ('spawner0', '1d42012b ...)
|
└─ python -m tractor._child --uid ('spawner0', '1d42012b ...)
|
||||||
└─ python -m tractor._child --uid ('name_error', '6c2733b8 ...)
|
└─ python -m tractor._child --uid ('name_error', '6c2733b8 ...)
|
||||||
|
|
||||||
"""
|
'''
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
debug_mode=True,
|
debug_mode=True,
|
||||||
loglevel='warning'
|
loglevel='devx',
|
||||||
|
enable_transports=['uds'],
|
||||||
) as n:
|
) as n:
|
||||||
|
|
||||||
# spawn both actors
|
# spawn both actors
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,35 @@
|
||||||
|
import trio
|
||||||
|
import tractor
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
async with tractor.open_root_actor(
|
||||||
|
debug_mode=True,
|
||||||
|
loglevel='cancel',
|
||||||
|
) as _root:
|
||||||
|
|
||||||
|
# manually trigger self-cancellation and wait
|
||||||
|
# for it to fully trigger.
|
||||||
|
_root.cancel_soon()
|
||||||
|
await _root._cancel_complete.wait()
|
||||||
|
print('root cancelled')
|
||||||
|
|
||||||
|
# now ensure we can still use the REPL
|
||||||
|
try:
|
||||||
|
await tractor.pause()
|
||||||
|
except trio.Cancelled as _taskc:
|
||||||
|
assert (root_cs := _root._root_tn.cancel_scope).cancel_called
|
||||||
|
# NOTE^^ above logic but inside `open_root_actor()` and
|
||||||
|
# passed to the `shield=` expression is effectively what
|
||||||
|
# we're testing here!
|
||||||
|
await tractor.pause(shield=root_cs.cancel_called)
|
||||||
|
|
||||||
|
# XXX, if shield logic *is wrong* inside `open_root_actor()`'s
|
||||||
|
# crash-handler block this should never be interacted,
|
||||||
|
# instead `trio.Cancelled` would be bubbled up: the original
|
||||||
|
# BUG.
|
||||||
|
assert 0
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
trio.run(main)
|
||||||
|
|
@ -0,0 +1,84 @@
|
||||||
|
'''
|
||||||
|
Verify we can dump a `stackscope` tree on a hang.
|
||||||
|
|
||||||
|
'''
|
||||||
|
import os
|
||||||
|
import signal
|
||||||
|
|
||||||
|
import trio
|
||||||
|
import tractor
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def start_n_shield_hang(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
):
|
||||||
|
# actor: tractor.Actor = tractor.current_actor()
|
||||||
|
|
||||||
|
# sync to parent-side task
|
||||||
|
await ctx.started(os.getpid())
|
||||||
|
|
||||||
|
print('Entering shield sleep..')
|
||||||
|
with trio.CancelScope(shield=True):
|
||||||
|
await trio.sleep_forever() # in subactor
|
||||||
|
|
||||||
|
# XXX NOTE ^^^ since this shields, we expect
|
||||||
|
# the zombie reaper (aka T800) to engage on
|
||||||
|
# SIGINT from the user and eventually hard-kill
|
||||||
|
# this subprocess!
|
||||||
|
|
||||||
|
|
||||||
|
async def main(
|
||||||
|
from_test: bool = False,
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
async with (
|
||||||
|
tractor.open_nursery(
|
||||||
|
debug_mode=True,
|
||||||
|
enable_stack_on_sig=True,
|
||||||
|
# maybe_enable_greenback=False,
|
||||||
|
loglevel='devx',
|
||||||
|
enable_transports=['uds'],
|
||||||
|
) as an,
|
||||||
|
):
|
||||||
|
ptl: tractor.Portal = await an.start_actor(
|
||||||
|
'hanger',
|
||||||
|
enable_modules=[__name__],
|
||||||
|
debug_mode=True,
|
||||||
|
)
|
||||||
|
async with ptl.open_context(
|
||||||
|
start_n_shield_hang,
|
||||||
|
) as (ctx, cpid):
|
||||||
|
|
||||||
|
_, proc, _ = an._children[ptl.chan.uid]
|
||||||
|
assert cpid == proc.pid
|
||||||
|
|
||||||
|
print(
|
||||||
|
'Yo my child hanging..?\n'
|
||||||
|
# "i'm a user who wants to see a `stackscope` tree!\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
# XXX simulate the wrapping test's "user actions"
|
||||||
|
# (i.e. if a human didn't run this manually but wants to
|
||||||
|
# know what they should do to reproduce test behaviour)
|
||||||
|
if from_test:
|
||||||
|
print(
|
||||||
|
f'Sending SIGUSR1 to {cpid!r}!\n'
|
||||||
|
)
|
||||||
|
os.kill(
|
||||||
|
cpid,
|
||||||
|
signal.SIGUSR1,
|
||||||
|
)
|
||||||
|
|
||||||
|
# simulate user cancelling program
|
||||||
|
await trio.sleep(0.5)
|
||||||
|
os.kill(
|
||||||
|
os.getpid(),
|
||||||
|
signal.SIGINT,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# actually let user send the ctl-c
|
||||||
|
await trio.sleep_forever() # in root
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
trio.run(main)
|
||||||
|
|
@ -0,0 +1,88 @@
|
||||||
|
import trio
|
||||||
|
import tractor
|
||||||
|
|
||||||
|
|
||||||
|
async def cancellable_pause_loop(
|
||||||
|
task_status: trio.TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED
|
||||||
|
):
|
||||||
|
with trio.CancelScope() as cs:
|
||||||
|
task_status.started(cs)
|
||||||
|
for _ in range(3):
|
||||||
|
try:
|
||||||
|
# ON first entry, there is no level triggered
|
||||||
|
# cancellation yet, so this cp does a parent task
|
||||||
|
# ctx-switch so that this scope raises for the NEXT
|
||||||
|
# checkpoint we hit.
|
||||||
|
await trio.lowlevel.checkpoint()
|
||||||
|
await tractor.pause()
|
||||||
|
|
||||||
|
cs.cancel()
|
||||||
|
|
||||||
|
# parent should have called `cs.cancel()` by now
|
||||||
|
await trio.lowlevel.checkpoint()
|
||||||
|
|
||||||
|
except trio.Cancelled:
|
||||||
|
print('INSIDE SHIELDED PAUSE')
|
||||||
|
await tractor.pause(shield=True)
|
||||||
|
else:
|
||||||
|
# should raise it again, bubbling up to parent
|
||||||
|
print('BUBBLING trio.Cancelled to parent task-nursery')
|
||||||
|
await trio.lowlevel.checkpoint()
|
||||||
|
|
||||||
|
|
||||||
|
async def pm_on_cancelled():
|
||||||
|
async with trio.open_nursery() as tn:
|
||||||
|
tn.cancel_scope.cancel()
|
||||||
|
try:
|
||||||
|
await trio.sleep_forever()
|
||||||
|
except trio.Cancelled:
|
||||||
|
# should also raise `Cancelled` since
|
||||||
|
# we didn't pass `shield=True`.
|
||||||
|
try:
|
||||||
|
await tractor.post_mortem(hide_tb=False)
|
||||||
|
except trio.Cancelled as taskc:
|
||||||
|
|
||||||
|
# should enter just fine, in fact it should
|
||||||
|
# be debugging the internals of the previous
|
||||||
|
# sin-shield call above Bo
|
||||||
|
await tractor.post_mortem(
|
||||||
|
hide_tb=False,
|
||||||
|
shield=True,
|
||||||
|
)
|
||||||
|
raise taskc
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise RuntimeError('Dint cancel as expected!?')
|
||||||
|
|
||||||
|
|
||||||
|
async def cancelled_before_pause(
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Verify that using a shielded pause works despite surrounding
|
||||||
|
cancellation called state in the calling task.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async with trio.open_nursery() as tn:
|
||||||
|
cs: trio.CancelScope = await tn.start(cancellable_pause_loop)
|
||||||
|
await trio.sleep(0.1)
|
||||||
|
|
||||||
|
assert cs.cancelled_caught
|
||||||
|
|
||||||
|
await pm_on_cancelled()
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
async with tractor.open_nursery(
|
||||||
|
debug_mode=True,
|
||||||
|
) as n:
|
||||||
|
portal: tractor.Portal = await n.run_in_actor(
|
||||||
|
cancelled_before_pause,
|
||||||
|
)
|
||||||
|
await portal.result()
|
||||||
|
|
||||||
|
# ensure the same works in the root actor!
|
||||||
|
await pm_on_cancelled()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
trio.run(main)
|
||||||
|
|
@ -4,9 +4,9 @@ import trio
|
||||||
|
|
||||||
async def gen():
|
async def gen():
|
||||||
yield 'yo'
|
yield 'yo'
|
||||||
await tractor.breakpoint()
|
await tractor.pause()
|
||||||
yield 'yo'
|
yield 'yo'
|
||||||
await tractor.breakpoint()
|
await tractor.pause()
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
|
|
@ -15,7 +15,7 @@ async def just_bp(
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
await ctx.started()
|
await ctx.started()
|
||||||
await tractor.breakpoint()
|
await tractor.pause()
|
||||||
|
|
||||||
# TODO: bps and errors in this call..
|
# TODO: bps and errors in this call..
|
||||||
async for val in gen():
|
async for val in gen():
|
||||||
|
|
@ -33,8 +33,11 @@ async def just_bp(
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
|
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
debug_mode=True,
|
debug_mode=True,
|
||||||
|
enable_transports=['uds'],
|
||||||
|
loglevel='devx',
|
||||||
) as n:
|
) as n:
|
||||||
p = await n.start_actor(
|
p = await n.start_actor(
|
||||||
'bp_boi',
|
'bp_boi',
|
||||||
|
|
|
||||||
|
|
@ -3,17 +3,20 @@ import tractor
|
||||||
|
|
||||||
|
|
||||||
async def breakpoint_forever():
|
async def breakpoint_forever():
|
||||||
"""Indefinitely re-enter debugger in child actor.
|
'''
|
||||||
"""
|
Indefinitely re-enter debugger in child actor.
|
||||||
|
|
||||||
|
'''
|
||||||
while True:
|
while True:
|
||||||
await trio.sleep(0.1)
|
await trio.sleep(0.1)
|
||||||
await tractor.breakpoint()
|
await tractor.pause()
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
|
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
debug_mode=True,
|
debug_mode=True,
|
||||||
|
loglevel='cancel',
|
||||||
) as n:
|
) as n:
|
||||||
|
|
||||||
portal = await n.run_in_actor(
|
portal = await n.run_in_actor(
|
||||||
|
|
|
||||||
|
|
@ -3,16 +3,26 @@ import tractor
|
||||||
|
|
||||||
|
|
||||||
async def name_error():
|
async def name_error():
|
||||||
getattr(doggypants)
|
getattr(doggypants) # noqa (on purpose)
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
debug_mode=True,
|
debug_mode=True,
|
||||||
) as n:
|
# loglevel='transport',
|
||||||
|
) as an:
|
||||||
|
|
||||||
portal = await n.run_in_actor(name_error)
|
# TODO: ideally the REPL arrives at this frame in the parent,
|
||||||
await portal.result()
|
# ABOVE the @api_frame of `Portal.run_in_actor()` (which
|
||||||
|
# should eventually not even be a portal method ... XD)
|
||||||
|
# await tractor.pause()
|
||||||
|
p: tractor.Portal = await an.run_in_actor(name_error)
|
||||||
|
|
||||||
|
# with this style, should raise on this line
|
||||||
|
await p.result()
|
||||||
|
|
||||||
|
# with this alt style should raise at `open_nusery()`
|
||||||
|
# return await p.result()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,169 @@
|
||||||
|
from functools import partial
|
||||||
|
import time
|
||||||
|
|
||||||
|
import trio
|
||||||
|
import tractor
|
||||||
|
|
||||||
|
# TODO: only import these when not running from test harness?
|
||||||
|
# can we detect `pexpect` usage maybe?
|
||||||
|
# from tractor.devx.debug import (
|
||||||
|
# get_lock,
|
||||||
|
# get_debug_req,
|
||||||
|
# )
|
||||||
|
|
||||||
|
|
||||||
|
def sync_pause(
|
||||||
|
use_builtin: bool = False,
|
||||||
|
error: bool = False,
|
||||||
|
hide_tb: bool = True,
|
||||||
|
pre_sleep: float|None = None,
|
||||||
|
):
|
||||||
|
if pre_sleep:
|
||||||
|
time.sleep(pre_sleep)
|
||||||
|
|
||||||
|
if use_builtin:
|
||||||
|
breakpoint(hide_tb=hide_tb)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# TODO: maybe for testing some kind of cm style interface
|
||||||
|
# where the `._set_trace()` call doesn't happen until block
|
||||||
|
# exit?
|
||||||
|
# assert get_lock().ctx_in_debug is None
|
||||||
|
# assert get_debug_req().repl is None
|
||||||
|
tractor.pause_from_sync()
|
||||||
|
# assert get_debug_req().repl is None
|
||||||
|
|
||||||
|
if error:
|
||||||
|
raise RuntimeError('yoyo sync code error')
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def start_n_sync_pause(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
):
|
||||||
|
actor: tractor.Actor = tractor.current_actor()
|
||||||
|
|
||||||
|
# sync to parent-side task
|
||||||
|
await ctx.started()
|
||||||
|
|
||||||
|
print(f'Entering `sync_pause()` in subactor: {actor.uid}\n')
|
||||||
|
sync_pause()
|
||||||
|
print(f'Exited `sync_pause()` in subactor: {actor.uid}\n')
|
||||||
|
|
||||||
|
|
||||||
|
async def main() -> None:
|
||||||
|
async with (
|
||||||
|
tractor.open_nursery(
|
||||||
|
debug_mode=True,
|
||||||
|
maybe_enable_greenback=True,
|
||||||
|
enable_stack_on_sig=True,
|
||||||
|
# loglevel='warning',
|
||||||
|
# loglevel='devx',
|
||||||
|
) as an,
|
||||||
|
trio.open_nursery() as tn,
|
||||||
|
):
|
||||||
|
# just from root task
|
||||||
|
sync_pause()
|
||||||
|
|
||||||
|
p: tractor.Portal = await an.start_actor(
|
||||||
|
'subactor',
|
||||||
|
enable_modules=[__name__],
|
||||||
|
# infect_asyncio=True,
|
||||||
|
debug_mode=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: 3 sub-actor usage cases:
|
||||||
|
# -[x] via a `.open_context()`
|
||||||
|
# -[ ] via a `.run_in_actor()` call
|
||||||
|
# -[ ] via a `.run()`
|
||||||
|
# -[ ] via a `.to_thread.run_sync()` in subactor
|
||||||
|
async with p.open_context(
|
||||||
|
start_n_sync_pause,
|
||||||
|
) as (ctx, first):
|
||||||
|
assert first is None
|
||||||
|
|
||||||
|
# TODO: handle bg-thread-in-root-actor special cases!
|
||||||
|
#
|
||||||
|
# there are a couple very subtle situations possible here
|
||||||
|
# and they are likely to become more important as cpython
|
||||||
|
# moves to support no-GIL.
|
||||||
|
#
|
||||||
|
# Cases:
|
||||||
|
# 1. root-actor bg-threads that call `.pause_from_sync()`
|
||||||
|
# whilst an in-tree subactor also is using ` .pause()`.
|
||||||
|
# |_ since the root-actor bg thread can not
|
||||||
|
# `Lock._debug_lock.acquire_nowait()` without running
|
||||||
|
# a `trio.Task`, AND because the
|
||||||
|
# `PdbREPL.set_continue()` is called from that
|
||||||
|
# bg-thread, we can not `._debug_lock.release()`
|
||||||
|
# either!
|
||||||
|
# |_ this results in no actor-tree `Lock` being used
|
||||||
|
# on behalf of the bg-thread and thus the subactor's
|
||||||
|
# task and the thread trying to to use stdio
|
||||||
|
# simultaneously which results in the classic TTY
|
||||||
|
# clobbering!
|
||||||
|
#
|
||||||
|
# 2. mutiple sync-bg-threads that call
|
||||||
|
# `.pause_from_sync()` where one is scheduled via
|
||||||
|
# `Nursery.start_soon(to_thread.run_sync)` in a bg
|
||||||
|
# task.
|
||||||
|
#
|
||||||
|
# Due to the GIL, the threads never truly try to step
|
||||||
|
# through the REPL simultaneously, BUT their `logging`
|
||||||
|
# and traceback outputs are interleaved since the GIL
|
||||||
|
# (seemingly) on every REPL-input from the user
|
||||||
|
# switches threads..
|
||||||
|
#
|
||||||
|
# Soo, the context switching semantics of the GIL
|
||||||
|
# result in a very confusing and messy interaction UX
|
||||||
|
# since eval and (tb) print output is NOT synced to
|
||||||
|
# each REPL-cycle (like we normally make it via
|
||||||
|
# a `.set_continue()` callback triggering the
|
||||||
|
# `Lock.release()`). Ideally we can solve this
|
||||||
|
# usability issue NOW because this will of course be
|
||||||
|
# that much more important when eventually there is no
|
||||||
|
# GIL!
|
||||||
|
|
||||||
|
# XXX should cause double REPL entry and thus TTY
|
||||||
|
# clobbering due to case 1. above!
|
||||||
|
tn.start_soon(
|
||||||
|
partial(
|
||||||
|
trio.to_thread.run_sync,
|
||||||
|
partial(
|
||||||
|
sync_pause,
|
||||||
|
use_builtin=False,
|
||||||
|
# pre_sleep=0.5,
|
||||||
|
),
|
||||||
|
abandon_on_cancel=True,
|
||||||
|
thread_name='start_soon_root_bg_thread',
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
await tractor.pause()
|
||||||
|
|
||||||
|
# XXX should cause double REPL entry and thus TTY
|
||||||
|
# clobbering due to case 2. above!
|
||||||
|
await trio.to_thread.run_sync(
|
||||||
|
partial(
|
||||||
|
sync_pause,
|
||||||
|
# NOTE this already works fine since in the new
|
||||||
|
# thread the `breakpoint()` built-in is never
|
||||||
|
# overloaded, thus NO locking is used, HOWEVER
|
||||||
|
# the case 2. from above still exists!
|
||||||
|
use_builtin=True,
|
||||||
|
),
|
||||||
|
# TODO: with this `False` we can hang!??!
|
||||||
|
# abandon_on_cancel=False,
|
||||||
|
abandon_on_cancel=True,
|
||||||
|
thread_name='inline_root_bg_thread',
|
||||||
|
)
|
||||||
|
|
||||||
|
await ctx.cancel()
|
||||||
|
|
||||||
|
# TODO: case where we cancel from trio-side while asyncio task
|
||||||
|
# has debugger lock?
|
||||||
|
await p.cancel_actor()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
trio.run(main)
|
||||||
|
|
@ -1,6 +1,11 @@
|
||||||
import time
|
import time
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
from tractor import (
|
||||||
|
ActorNursery,
|
||||||
|
MsgStream,
|
||||||
|
Portal,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# this is the first 2 actors, streamer_1 and streamer_2
|
# this is the first 2 actors, streamer_1 and streamer_2
|
||||||
|
|
@ -12,14 +17,18 @@ async def stream_data(seed):
|
||||||
|
|
||||||
# this is the third actor; the aggregator
|
# this is the third actor; the aggregator
|
||||||
async def aggregate(seed):
|
async def aggregate(seed):
|
||||||
"""Ensure that the two streams we receive match but only stream
|
'''
|
||||||
|
Ensure that the two streams we receive match but only stream
|
||||||
a single set of values to the parent.
|
a single set of values to the parent.
|
||||||
"""
|
|
||||||
async with tractor.open_nursery() as nursery:
|
'''
|
||||||
portals = []
|
an: ActorNursery
|
||||||
|
async with tractor.open_nursery() as an:
|
||||||
|
portals: list[Portal] = []
|
||||||
for i in range(1, 3):
|
for i in range(1, 3):
|
||||||
# fork point
|
|
||||||
portal = await nursery.start_actor(
|
# fork/spawn call
|
||||||
|
portal = await an.start_actor(
|
||||||
name=f'streamer_{i}',
|
name=f'streamer_{i}',
|
||||||
enable_modules=[__name__],
|
enable_modules=[__name__],
|
||||||
)
|
)
|
||||||
|
|
@ -43,7 +52,11 @@ async def aggregate(seed):
|
||||||
async with trio.open_nursery() as n:
|
async with trio.open_nursery() as n:
|
||||||
|
|
||||||
for portal in portals:
|
for portal in portals:
|
||||||
n.start_soon(push_to_chan, portal, send_chan.clone())
|
n.start_soon(
|
||||||
|
push_to_chan,
|
||||||
|
portal,
|
||||||
|
send_chan.clone(),
|
||||||
|
)
|
||||||
|
|
||||||
# close this local task's reference to send side
|
# close this local task's reference to send side
|
||||||
await send_chan.aclose()
|
await send_chan.aclose()
|
||||||
|
|
@ -60,26 +73,36 @@ async def aggregate(seed):
|
||||||
|
|
||||||
print("FINISHED ITERATING in aggregator")
|
print("FINISHED ITERATING in aggregator")
|
||||||
|
|
||||||
await nursery.cancel()
|
await an.cancel()
|
||||||
print("WAITING on `ActorNursery` to finish")
|
print("WAITING on `ActorNursery` to finish")
|
||||||
print("AGGREGATOR COMPLETE!")
|
print("AGGREGATOR COMPLETE!")
|
||||||
|
|
||||||
|
|
||||||
# this is the main actor and *arbiter*
|
async def main() -> list[int]:
|
||||||
async def main():
|
'''
|
||||||
# a nursery which spawns "actors"
|
This is the "root" actor's main task's entrypoint.
|
||||||
|
|
||||||
|
By default (and if not otherwise specified) that root process
|
||||||
|
also acts as a "registry actor" / "registrar" on the localhost
|
||||||
|
for the purposes of multi-actor "service discovery".
|
||||||
|
|
||||||
|
'''
|
||||||
|
# yes, a nursery which spawns `trio`-"actors" B)
|
||||||
|
an: ActorNursery
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
arbiter_addr=('127.0.0.1', 1616)
|
loglevel='cancel',
|
||||||
) as nursery:
|
# debug_mode=True,
|
||||||
|
) as an:
|
||||||
|
|
||||||
seed = int(1e3)
|
seed = int(1e3)
|
||||||
pre_start = time.time()
|
pre_start = time.time()
|
||||||
|
|
||||||
portal = await nursery.start_actor(
|
portal: Portal = await an.start_actor(
|
||||||
name='aggregator',
|
name='aggregator',
|
||||||
enable_modules=[__name__],
|
enable_modules=[__name__],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
stream: MsgStream
|
||||||
async with portal.open_stream_from(
|
async with portal.open_stream_from(
|
||||||
aggregate,
|
aggregate,
|
||||||
seed=seed,
|
seed=seed,
|
||||||
|
|
@ -88,11 +111,12 @@ async def main():
|
||||||
start = time.time()
|
start = time.time()
|
||||||
# the portal call returns exactly what you'd expect
|
# the portal call returns exactly what you'd expect
|
||||||
# as if the remote "aggregate" function was called locally
|
# as if the remote "aggregate" function was called locally
|
||||||
result_stream = []
|
result_stream: list[int] = []
|
||||||
async for value in stream:
|
async for value in stream:
|
||||||
result_stream.append(value)
|
result_stream.append(value)
|
||||||
|
|
||||||
await portal.cancel_actor()
|
cancelled: bool = await portal.cancel_actor()
|
||||||
|
assert cancelled
|
||||||
|
|
||||||
print(f"STREAM TIME = {time.time() - start}")
|
print(f"STREAM TIME = {time.time() - start}")
|
||||||
print(f"STREAM + SPAWN TIME = {time.time() - pre_start}")
|
print(f"STREAM + SPAWN TIME = {time.time() - pre_start}")
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,10 @@ This uses no extra threads, fancy semaphores or futures; all we need
|
||||||
is ``tractor``'s channels.
|
is ``tractor``'s channels.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import (
|
||||||
|
asynccontextmanager as acm,
|
||||||
|
aclosing,
|
||||||
|
)
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
import itertools
|
import itertools
|
||||||
import math
|
import math
|
||||||
|
|
@ -16,7 +19,6 @@ import time
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
from async_generator import aclosing
|
|
||||||
|
|
||||||
|
|
||||||
PRIMES = [
|
PRIMES = [
|
||||||
|
|
@ -44,7 +46,7 @@ async def is_prime(n):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def worker_pool(workers=4):
|
async def worker_pool(workers=4):
|
||||||
"""Though it's a trivial special case for ``tractor``, the well
|
"""Though it's a trivial special case for ``tractor``, the well
|
||||||
known "worker pool" seems to be the defacto "but, I want this
|
known "worker pool" seems to be the defacto "but, I want this
|
||||||
|
|
|
||||||
|
|
@ -3,20 +3,18 @@ import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
|
|
||||||
async def sleepy_jane():
|
async def sleepy_jane() -> None:
|
||||||
uid = tractor.current_actor().uid
|
uid: tuple = tractor.current_actor().uid
|
||||||
print(f'Yo i am actor {uid}')
|
print(f'Yo i am actor {uid}')
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
'''
|
'''
|
||||||
Spawn a flat actor cluster, with one process per
|
Spawn a flat actor cluster, with one process per detected core.
|
||||||
detected core.
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
portal_map: dict[str, tractor.Portal]
|
portal_map: dict[str, tractor.Portal]
|
||||||
results: dict[str, str]
|
|
||||||
|
|
||||||
# look at this hip new syntax!
|
# look at this hip new syntax!
|
||||||
async with (
|
async with (
|
||||||
|
|
@ -25,11 +23,15 @@ async def main():
|
||||||
modules=[__name__]
|
modules=[__name__]
|
||||||
) as portal_map,
|
) as portal_map,
|
||||||
|
|
||||||
trio.open_nursery() as n,
|
tractor.trionics.collapse_eg(),
|
||||||
|
trio.open_nursery() as tn,
|
||||||
):
|
):
|
||||||
|
|
||||||
for (name, portal) in portal_map.items():
|
for (name, portal) in portal_map.items():
|
||||||
n.start_soon(portal.run, sleepy_jane)
|
tn.start_soon(
|
||||||
|
portal.run,
|
||||||
|
sleepy_jane,
|
||||||
|
)
|
||||||
|
|
||||||
await trio.sleep(0.5)
|
await trio.sleep(0.5)
|
||||||
|
|
||||||
|
|
@ -41,4 +43,4 @@ if __name__ == '__main__':
|
||||||
try:
|
try:
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
pass
|
print('trio cancelled by KBI')
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,7 @@ async def simple_rpc(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# signal to parent that we're up much like
|
# signal to parent that we're up much like
|
||||||
# ``trio_typing.TaskStatus.started()``
|
# ``trio.TaskStatus.started()``
|
||||||
await ctx.started(data + 1)
|
await ctx.started(data + 1)
|
||||||
|
|
||||||
async with ctx.open_stream() as stream:
|
async with ctx.open_stream() as stream:
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,7 @@ async def main(service_name):
|
||||||
async with tractor.open_nursery() as an:
|
async with tractor.open_nursery() as an:
|
||||||
await an.start_actor(service_name)
|
await an.start_actor(service_name)
|
||||||
|
|
||||||
async with tractor.get_arbiter('127.0.0.1', 1616) as portal:
|
async with tractor.get_registry() as portal:
|
||||||
print(f"Arbiter is listening on {portal.channel}")
|
print(f"Arbiter is listening on {portal.channel}")
|
||||||
|
|
||||||
async with tractor.wait_for_actor(service_name) as sockaddr:
|
async with tractor.wait_for_actor(service_name) as sockaddr:
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,85 @@
|
||||||
|
from contextlib import (
|
||||||
|
asynccontextmanager as acm,
|
||||||
|
)
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
import tractor
|
||||||
|
import trio
|
||||||
|
|
||||||
|
|
||||||
|
log = tractor.log.get_logger(
|
||||||
|
name=__name__
|
||||||
|
)
|
||||||
|
|
||||||
|
_lock: trio.Lock|None = None
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def acquire_singleton_lock(
|
||||||
|
) -> None:
|
||||||
|
global _lock
|
||||||
|
if _lock is None:
|
||||||
|
log.info('Allocating LOCK')
|
||||||
|
_lock = trio.Lock()
|
||||||
|
|
||||||
|
log.info('TRYING TO LOCK ACQUIRE')
|
||||||
|
async with _lock:
|
||||||
|
log.info('ACQUIRED')
|
||||||
|
yield _lock
|
||||||
|
|
||||||
|
log.info('RELEASED')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
async def hold_lock_forever(
|
||||||
|
task_status=trio.TASK_STATUS_IGNORED
|
||||||
|
):
|
||||||
|
async with (
|
||||||
|
tractor.trionics.maybe_raise_from_masking_exc(),
|
||||||
|
acquire_singleton_lock() as lock,
|
||||||
|
):
|
||||||
|
task_status.started(lock)
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
||||||
|
async def main(
|
||||||
|
ignore_special_cases: bool,
|
||||||
|
loglevel: str = 'info',
|
||||||
|
debug_mode: bool = True,
|
||||||
|
):
|
||||||
|
async with (
|
||||||
|
trio.open_nursery() as tn,
|
||||||
|
|
||||||
|
# tractor.trionics.maybe_raise_from_masking_exc()
|
||||||
|
# ^^^ XXX NOTE, interestingly putting the unmasker
|
||||||
|
# here does not exhibit the same behaviour ??
|
||||||
|
):
|
||||||
|
if not ignore_special_cases:
|
||||||
|
from tractor.trionics import _taskc
|
||||||
|
_taskc._mask_cases.clear()
|
||||||
|
|
||||||
|
_lock = await tn.start(
|
||||||
|
hold_lock_forever,
|
||||||
|
)
|
||||||
|
with trio.move_on_after(0.2):
|
||||||
|
await tn.start(
|
||||||
|
hold_lock_forever,
|
||||||
|
)
|
||||||
|
|
||||||
|
tn.cancel_scope.cancel()
|
||||||
|
|
||||||
|
|
||||||
|
# XXX, manual test as script
|
||||||
|
if __name__ == '__main__':
|
||||||
|
tractor.log.get_console_log(level='info')
|
||||||
|
for case in [True, False]:
|
||||||
|
log.info(
|
||||||
|
f'\n'
|
||||||
|
f'------ RUNNING SCRIPT TRIAL ------\n'
|
||||||
|
f'ignore_special_cases: {case!r}\n'
|
||||||
|
)
|
||||||
|
trio.run(partial(
|
||||||
|
main,
|
||||||
|
ignore_special_cases=case,
|
||||||
|
loglevel='info',
|
||||||
|
))
|
||||||
|
|
@ -0,0 +1,195 @@
|
||||||
|
from contextlib import (
|
||||||
|
contextmanager as cm,
|
||||||
|
# TODO, any diff in async case(s)??
|
||||||
|
# asynccontextmanager as acm,
|
||||||
|
)
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
import tractor
|
||||||
|
import trio
|
||||||
|
|
||||||
|
|
||||||
|
log = tractor.log.get_logger(
|
||||||
|
name=__name__
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@cm
|
||||||
|
def teardown_on_exc(
|
||||||
|
raise_from_handler: bool = False,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
You could also have a teardown handler which catches any exc and
|
||||||
|
does some required teardown. In this case the problem is
|
||||||
|
compounded UNLESS you ensure the handler's scope is OUTSIDE the
|
||||||
|
`ux.aclose()`.. that is in the caller's enclosing scope.
|
||||||
|
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
except BaseException as _berr:
|
||||||
|
berr = _berr
|
||||||
|
log.exception(
|
||||||
|
f'Handling termination teardown in child due to,\n'
|
||||||
|
f'{berr!r}\n'
|
||||||
|
)
|
||||||
|
if raise_from_handler:
|
||||||
|
# XXX teardown ops XXX
|
||||||
|
# on termination these steps say need to be run to
|
||||||
|
# ensure wider system consistency (like the state of
|
||||||
|
# remote connections/services).
|
||||||
|
#
|
||||||
|
# HOWEVER, any bug in this teardown code is also
|
||||||
|
# masked by the `tx.aclose()`!
|
||||||
|
# this is also true if `_tn.cancel_scope` is
|
||||||
|
# `.cancel_called` by the parent in a graceful
|
||||||
|
# request case..
|
||||||
|
|
||||||
|
# simulate a bug in teardown handler.
|
||||||
|
raise RuntimeError(
|
||||||
|
'woopsie teardown bug!'
|
||||||
|
)
|
||||||
|
|
||||||
|
raise # no teardown bug.
|
||||||
|
|
||||||
|
|
||||||
|
async def finite_stream_to_rent(
|
||||||
|
tx: trio.abc.SendChannel,
|
||||||
|
child_errors_mid_stream: bool,
|
||||||
|
raise_unmasked: bool,
|
||||||
|
|
||||||
|
task_status: trio.TaskStatus[
|
||||||
|
trio.CancelScope,
|
||||||
|
] = trio.TASK_STATUS_IGNORED,
|
||||||
|
):
|
||||||
|
async with (
|
||||||
|
# XXX without this unmasker the mid-streaming RTE is never
|
||||||
|
# reported since it is masked by the `tx.aclose()`
|
||||||
|
# call which in turn raises `Cancelled`!
|
||||||
|
#
|
||||||
|
# NOTE, this is WITHOUT doing any exception handling
|
||||||
|
# inside the child task!
|
||||||
|
#
|
||||||
|
# TODO, uncomment next LoC to see the supprsessed beg[RTE]!
|
||||||
|
tractor.trionics.maybe_raise_from_masking_exc(
|
||||||
|
raise_unmasked=raise_unmasked,
|
||||||
|
),
|
||||||
|
|
||||||
|
tx as tx, # .aclose() is the guilty masker chkpt!
|
||||||
|
|
||||||
|
# XXX, this ONLY matters in the
|
||||||
|
# `child_errors_mid_stream=False` case oddly!?
|
||||||
|
# THAT IS, if no tn is opened in that case then the
|
||||||
|
# test will not fail; it raises the RTE correctly?
|
||||||
|
#
|
||||||
|
# -> so it seems this new scope somehow affects the form of
|
||||||
|
# eventual in the parent EG?
|
||||||
|
tractor.trionics.maybe_open_nursery(
|
||||||
|
nursery=(
|
||||||
|
None
|
||||||
|
if not child_errors_mid_stream
|
||||||
|
else True
|
||||||
|
),
|
||||||
|
) as _tn,
|
||||||
|
):
|
||||||
|
# pass our scope back to parent for supervision\
|
||||||
|
# control.
|
||||||
|
cs: trio.CancelScope|None = (
|
||||||
|
None
|
||||||
|
if _tn is True
|
||||||
|
else _tn.cancel_scope
|
||||||
|
)
|
||||||
|
task_status.started(cs)
|
||||||
|
|
||||||
|
with teardown_on_exc(
|
||||||
|
raise_from_handler=not child_errors_mid_stream,
|
||||||
|
):
|
||||||
|
for i in range(100):
|
||||||
|
log.debug(
|
||||||
|
f'Child tx {i!r}\n'
|
||||||
|
)
|
||||||
|
if (
|
||||||
|
child_errors_mid_stream
|
||||||
|
and
|
||||||
|
i == 66
|
||||||
|
):
|
||||||
|
# oh wait but WOOPS there's a bug
|
||||||
|
# in that teardown code!?
|
||||||
|
raise RuntimeError(
|
||||||
|
'woopsie, a mid-streaming bug!?'
|
||||||
|
)
|
||||||
|
|
||||||
|
await tx.send(i)
|
||||||
|
|
||||||
|
|
||||||
|
async def main(
|
||||||
|
# TODO! toggle this for the 2 cases!
|
||||||
|
# 1. child errors mid-stream while parent is also requesting
|
||||||
|
# (graceful) cancel of that child streamer.
|
||||||
|
#
|
||||||
|
# 2. child contains a teardown handler which contains a
|
||||||
|
# bug and raises.
|
||||||
|
#
|
||||||
|
child_errors_mid_stream: bool,
|
||||||
|
|
||||||
|
raise_unmasked: bool = False,
|
||||||
|
loglevel: str = 'info',
|
||||||
|
):
|
||||||
|
tractor.log.get_console_log(level=loglevel)
|
||||||
|
|
||||||
|
# the `.aclose()` being checkpoints on these
|
||||||
|
# is the source of the problem..
|
||||||
|
tx, rx = trio.open_memory_channel(1)
|
||||||
|
|
||||||
|
async with (
|
||||||
|
tractor.trionics.collapse_eg(),
|
||||||
|
trio.open_nursery() as tn,
|
||||||
|
rx as rx,
|
||||||
|
):
|
||||||
|
_child_cs = await tn.start(
|
||||||
|
partial(
|
||||||
|
finite_stream_to_rent,
|
||||||
|
child_errors_mid_stream=child_errors_mid_stream,
|
||||||
|
raise_unmasked=raise_unmasked,
|
||||||
|
tx=tx,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
async for msg in rx:
|
||||||
|
log.debug(
|
||||||
|
f'Rent rx {msg!r}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
# simulate some external cancellation
|
||||||
|
# request **JUST BEFORE** the child errors.
|
||||||
|
if msg == 65:
|
||||||
|
log.cancel(
|
||||||
|
f'Cancelling parent on,\n'
|
||||||
|
f'msg={msg}\n'
|
||||||
|
f'\n'
|
||||||
|
f'Simulates OOB cancel request!\n'
|
||||||
|
)
|
||||||
|
tn.cancel_scope.cancel()
|
||||||
|
|
||||||
|
|
||||||
|
# XXX, manual test as script
|
||||||
|
if __name__ == '__main__':
|
||||||
|
tractor.log.get_console_log(level='info')
|
||||||
|
for case in [True, False]:
|
||||||
|
log.info(
|
||||||
|
f'\n'
|
||||||
|
f'------ RUNNING SCRIPT TRIAL ------\n'
|
||||||
|
f'child_errors_midstream: {case!r}\n'
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
trio.run(partial(
|
||||||
|
main,
|
||||||
|
child_errors_mid_stream=case,
|
||||||
|
# raise_unmasked=True,
|
||||||
|
loglevel='info',
|
||||||
|
))
|
||||||
|
except Exception as _exc:
|
||||||
|
exc = _exc
|
||||||
|
log.exception(
|
||||||
|
'Should have raised an RTE or Cancelled?\n'
|
||||||
|
)
|
||||||
|
breakpoint()
|
||||||
|
|
@ -0,0 +1,19 @@
|
||||||
|
Rework our ``.trionics.BroadcastReceiver`` internals to avoid method
|
||||||
|
recursion and approach a design and interface closer to ``trio``'s
|
||||||
|
``MemoryReceiveChannel``.
|
||||||
|
|
||||||
|
The details of the internal changes include:
|
||||||
|
|
||||||
|
- implementing a ``BroadcastReceiver.receive_nowait()`` and using it
|
||||||
|
within the async ``.receive()`` thus avoiding recursion from
|
||||||
|
``.receive()``.
|
||||||
|
- failing over to an internal ``._receive_from_underlying()`` when the
|
||||||
|
``_nowait()`` call raises ``trio.WouldBlock``
|
||||||
|
- adding ``BroadcastState.statistics()`` for debugging and testing both
|
||||||
|
internals and by users.
|
||||||
|
- add an internal ``BroadcastReceiver._raise_on_lag: bool`` which can be
|
||||||
|
set to avoid ``Lagged`` raising for possible use cases where a user
|
||||||
|
wants to choose between a [cheap or nasty
|
||||||
|
pattern](https://zguide.zeromq.org/docs/chapter7/#The-Cheap-or-Nasty-Pattern)
|
||||||
|
the the particular stream (we use this in ``piker``'s dark clearing
|
||||||
|
engine to avoid fast feeds breaking during HFT periods).
|
||||||
|
|
@ -0,0 +1,15 @@
|
||||||
|
Fixes to ensure IPC (channel) breakage doesn't result in hung actor
|
||||||
|
trees; the zombie reaping and general supervision machinery will always
|
||||||
|
clean up and terminate.
|
||||||
|
|
||||||
|
This includes not only the (mostly minor) fixes to solve these cases but
|
||||||
|
also a new extensive test suite in `test_advanced_faults.py` with an
|
||||||
|
accompanying highly configurable example module-script in
|
||||||
|
`examples/advanced_faults/ipc_failure_during_stream.py`. Tests ensure we
|
||||||
|
never get hang or zombies despite operating in debug mode and attempt to
|
||||||
|
simulate all possible IPC transport failure cases for a local-host actor
|
||||||
|
tree.
|
||||||
|
|
||||||
|
Further we simplify `Context.open_stream.__aexit__()` to just call
|
||||||
|
`MsgStream.aclose()` directly more or less avoiding a pure duplicate
|
||||||
|
code path.
|
||||||
|
|
@ -0,0 +1,7 @@
|
||||||
|
Drop `trio.Process.aclose()` usage, copy into our spawning code.
|
||||||
|
|
||||||
|
The details are laid out in https://github.com/goodboy/tractor/issues/330.
|
||||||
|
`trio` changed is process running quite some time ago, this just copies
|
||||||
|
out the small bit we needed (from the old `.aclose()`) for hard kills
|
||||||
|
where a soft runtime cancel request fails and our "zombie killer"
|
||||||
|
implementation kicks in.
|
||||||
|
|
@ -0,0 +1,15 @@
|
||||||
|
Switch to using the fork & fix of `pdb++`, `pdbp`:
|
||||||
|
https://github.com/mdmintz/pdbp
|
||||||
|
|
||||||
|
Allows us to sidestep a variety of issues that aren't being maintained
|
||||||
|
in the upstream project thanks to the hard work of @mdmintz!
|
||||||
|
|
||||||
|
We also include some default settings adjustments as per recent
|
||||||
|
development on the fork:
|
||||||
|
|
||||||
|
- sticky mode is still turned on by default but now activates when
|
||||||
|
a using the `ll` repl command.
|
||||||
|
- turn off line truncation by default to avoid inter-line gaps when
|
||||||
|
resizing the terimnal during use.
|
||||||
|
- when using the backtrace cmd either by `w` or `bt`, the config
|
||||||
|
automatically switches to non-sticky mode.
|
||||||
|
|
@ -0,0 +1,18 @@
|
||||||
|
First generate a built disti:
|
||||||
|
|
||||||
|
```
|
||||||
|
python -m pip install --upgrade build
|
||||||
|
python -m build --sdist --outdir dist/alpha5/
|
||||||
|
```
|
||||||
|
|
||||||
|
Then try a test ``pypi`` upload:
|
||||||
|
|
||||||
|
```
|
||||||
|
python -m twine upload --repository testpypi dist/alpha5/*
|
||||||
|
```
|
||||||
|
|
||||||
|
The push to `pypi` for realz.
|
||||||
|
|
||||||
|
```
|
||||||
|
python -m twine upload --repository testpypi dist/alpha5/*
|
||||||
|
```
|
||||||
144
pyproject.toml
144
pyproject.toml
|
|
@ -1,3 +1,117 @@
|
||||||
|
[build-system]
|
||||||
|
requires = ["hatchling"]
|
||||||
|
build-backend = "hatchling.build"
|
||||||
|
|
||||||
|
# ------ build-system ------
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "tractor"
|
||||||
|
version = "0.1.0a6dev0"
|
||||||
|
description = 'structured concurrent `trio`-"actors"'
|
||||||
|
authors = [{ name = "Tyler Goodlet", email = "goodboy_foss@protonmail.com" }]
|
||||||
|
requires-python = ">= 3.11"
|
||||||
|
readme = "docs/README.rst"
|
||||||
|
license = "AGPL-3.0-or-later"
|
||||||
|
keywords = [
|
||||||
|
"trio",
|
||||||
|
"async",
|
||||||
|
"concurrency",
|
||||||
|
"structured concurrency",
|
||||||
|
"actor model",
|
||||||
|
"distributed",
|
||||||
|
"multiprocessing",
|
||||||
|
]
|
||||||
|
classifiers = [
|
||||||
|
"Development Status :: 3 - Alpha",
|
||||||
|
"Operating System :: POSIX :: Linux",
|
||||||
|
"Framework :: Trio",
|
||||||
|
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
|
||||||
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Topic :: System :: Distributed Computing",
|
||||||
|
]
|
||||||
|
dependencies = [
|
||||||
|
# trio runtime and friends
|
||||||
|
# (poetry) proper range specs,
|
||||||
|
# https://packaging.python.org/en/latest/discussions/install-requires-vs-requirements/#id5
|
||||||
|
# TODO, for 3.13 we must go go `0.27` which means we have to
|
||||||
|
# disable strict egs or port to handling them internally!
|
||||||
|
"trio>0.27",
|
||||||
|
"tricycle>=0.4.1,<0.5",
|
||||||
|
"wrapt>=1.16.0,<2",
|
||||||
|
"colorlog>=6.8.2,<7",
|
||||||
|
# built-in multi-actor `pdb` REPL
|
||||||
|
"pdbp>=1.6,<2", # windows only (from `pdbp`)
|
||||||
|
# typed IPC msging
|
||||||
|
"msgspec>=0.19.0",
|
||||||
|
"cffi>=1.17.1",
|
||||||
|
"bidict>=0.23.1",
|
||||||
|
]
|
||||||
|
|
||||||
|
# ------ project ------
|
||||||
|
|
||||||
|
[dependency-groups]
|
||||||
|
dev = [
|
||||||
|
# test suite
|
||||||
|
# TODO: maybe some of these layout choices?
|
||||||
|
# https://docs.pytest.org/en/8.0.x/explanation/goodpractices.html#choosing-a-test-layout-import-rules
|
||||||
|
"pytest>=8.3.5",
|
||||||
|
"pexpect>=4.9.0,<5",
|
||||||
|
# `tractor.devx` tooling
|
||||||
|
"greenback>=1.2.1,<2",
|
||||||
|
"stackscope>=0.2.2,<0.3",
|
||||||
|
# ^ requires this?
|
||||||
|
"typing-extensions>=4.14.1",
|
||||||
|
|
||||||
|
"pyperclip>=1.9.0",
|
||||||
|
"prompt-toolkit>=3.0.50",
|
||||||
|
"xonsh>=0.19.2",
|
||||||
|
"psutil>=7.0.0",
|
||||||
|
]
|
||||||
|
# TODO, add these with sane versions; were originally in
|
||||||
|
# `requirements-docs.txt`..
|
||||||
|
# docs = [
|
||||||
|
# "sphinx>="
|
||||||
|
# "sphinx_book_theme>="
|
||||||
|
# ]
|
||||||
|
|
||||||
|
# ------ dependency-groups ------
|
||||||
|
|
||||||
|
# ------ dependency-groups ------
|
||||||
|
|
||||||
|
[tool.uv.sources]
|
||||||
|
# XXX NOTE, only for @goodboy's hacking on `pprint(sort_dicts=False)`
|
||||||
|
# for the `pp` alias..
|
||||||
|
# pdbp = { path = "../pdbp", editable = true }
|
||||||
|
|
||||||
|
# ------ tool.uv.sources ------
|
||||||
|
# TODO, distributed (multi-host) extensions
|
||||||
|
# linux kernel networking
|
||||||
|
# 'pyroute2
|
||||||
|
|
||||||
|
# ------ tool.uv.sources ------
|
||||||
|
|
||||||
|
[tool.uv]
|
||||||
|
# XXX NOTE, prefer the sys python bc apparently the distis from
|
||||||
|
# `astral` are built in a way that breaks `pdbp`+`tabcompleter`'s
|
||||||
|
# likely due to linking against `libedit` over `readline`..
|
||||||
|
# |_https://docs.astral.sh/uv/concepts/python-versions/#managed-python-distributions
|
||||||
|
# |_https://gregoryszorc.com/docs/python-build-standalone/main/quirks.html#use-of-libedit-on-linux
|
||||||
|
#
|
||||||
|
# https://docs.astral.sh/uv/reference/settings/#python-preference
|
||||||
|
python-preference = 'system'
|
||||||
|
|
||||||
|
# ------ tool.uv ------
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.sdist]
|
||||||
|
include = ["tractor"]
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.wheel]
|
||||||
|
include = ["tractor"]
|
||||||
|
|
||||||
|
# ------ tool.hatch ------
|
||||||
|
|
||||||
[tool.towncrier]
|
[tool.towncrier]
|
||||||
package = "tractor"
|
package = "tractor"
|
||||||
filename = "NEWS.rst"
|
filename = "NEWS.rst"
|
||||||
|
|
@ -7,22 +121,44 @@ title_format = "tractor {version} ({project_date})"
|
||||||
template = "nooz/_template.rst"
|
template = "nooz/_template.rst"
|
||||||
all_bullets = true
|
all_bullets = true
|
||||||
|
|
||||||
[[tool.towncrier.type]]
|
[[tool.towncrier.type]]
|
||||||
directory = "feature"
|
directory = "feature"
|
||||||
name = "Features"
|
name = "Features"
|
||||||
showcontent = true
|
showcontent = true
|
||||||
|
|
||||||
[[tool.towncrier.type]]
|
[[tool.towncrier.type]]
|
||||||
directory = "bugfix"
|
directory = "bugfix"
|
||||||
name = "Bug Fixes"
|
name = "Bug Fixes"
|
||||||
showcontent = true
|
showcontent = true
|
||||||
|
|
||||||
[[tool.towncrier.type]]
|
[[tool.towncrier.type]]
|
||||||
directory = "doc"
|
directory = "doc"
|
||||||
name = "Improved Documentation"
|
name = "Improved Documentation"
|
||||||
showcontent = true
|
showcontent = true
|
||||||
|
|
||||||
[[tool.towncrier.type]]
|
[[tool.towncrier.type]]
|
||||||
directory = "trivial"
|
directory = "trivial"
|
||||||
name = "Trivial/Internal Changes"
|
name = "Trivial/Internal Changes"
|
||||||
showcontent = true
|
showcontent = true
|
||||||
|
|
||||||
|
# ------ tool.towncrier ------
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
minversion = '6.0'
|
||||||
|
testpaths = [
|
||||||
|
'tests'
|
||||||
|
]
|
||||||
|
addopts = [
|
||||||
|
# TODO: figure out why this isn't working..
|
||||||
|
'--rootdir=./tests',
|
||||||
|
|
||||||
|
'--import-mode=importlib',
|
||||||
|
# don't show frickin captured logs AGAIN in the report..
|
||||||
|
'--show-capture=no',
|
||||||
|
]
|
||||||
|
log_cli = false
|
||||||
|
# TODO: maybe some of these layout choices?
|
||||||
|
# https://docs.pytest.org/en/8.0.x/explanation/goodpractices.html#choosing-a-test-layout-import-rules
|
||||||
|
# pythonpath = "src"
|
||||||
|
|
||||||
|
# ------ tool.pytest ------
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,8 @@
|
||||||
|
# vim: ft=ini
|
||||||
|
# pytest.ini for tractor
|
||||||
|
|
||||||
|
[pytest]
|
||||||
|
# don't show frickin captured logs AGAIN in the report..
|
||||||
|
addopts = --show-capture='no'
|
||||||
|
log_cli = false
|
||||||
|
; minversion = 6.0
|
||||||
|
|
@ -1,2 +0,0 @@
|
||||||
sphinx
|
|
||||||
sphinx_book_theme
|
|
||||||
|
|
@ -1,8 +0,0 @@
|
||||||
pytest
|
|
||||||
pytest-trio
|
|
||||||
pytest-timeout
|
|
||||||
pdbpp
|
|
||||||
mypy
|
|
||||||
trio_typing
|
|
||||||
pexpect
|
|
||||||
towncrier
|
|
||||||
|
|
@ -0,0 +1,82 @@
|
||||||
|
# from default `ruff.toml` @
|
||||||
|
# https://docs.astral.sh/ruff/configuration/
|
||||||
|
|
||||||
|
# Exclude a variety of commonly ignored directories.
|
||||||
|
exclude = [
|
||||||
|
".bzr",
|
||||||
|
".direnv",
|
||||||
|
".eggs",
|
||||||
|
".git",
|
||||||
|
".git-rewrite",
|
||||||
|
".hg",
|
||||||
|
".ipynb_checkpoints",
|
||||||
|
".mypy_cache",
|
||||||
|
".nox",
|
||||||
|
".pants.d",
|
||||||
|
".pyenv",
|
||||||
|
".pytest_cache",
|
||||||
|
".pytype",
|
||||||
|
".ruff_cache",
|
||||||
|
".svn",
|
||||||
|
".tox",
|
||||||
|
".venv",
|
||||||
|
".vscode",
|
||||||
|
"__pypackages__",
|
||||||
|
"_build",
|
||||||
|
"buck-out",
|
||||||
|
"build",
|
||||||
|
"dist",
|
||||||
|
"node_modules",
|
||||||
|
"site-packages",
|
||||||
|
"venv",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Same as Black.
|
||||||
|
line-length = 88
|
||||||
|
indent-width = 4
|
||||||
|
|
||||||
|
# Assume Python 3.9
|
||||||
|
target-version = "py311"
|
||||||
|
|
||||||
|
[lint]
|
||||||
|
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||||
|
# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or
|
||||||
|
# McCabe complexity (`C901`) by default.
|
||||||
|
select = ["E4", "E7", "E9", "F"]
|
||||||
|
ignore = [
|
||||||
|
'E402', # https://docs.astral.sh/ruff/rules/module-import-not-at-top-of-file/
|
||||||
|
]
|
||||||
|
|
||||||
|
# Allow fix for all enabled rules (when `--fix`) is provided.
|
||||||
|
fixable = ["ALL"]
|
||||||
|
unfixable = []
|
||||||
|
|
||||||
|
# Allow unused variables when underscore-prefixed.
|
||||||
|
# dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||||
|
|
||||||
|
[format]
|
||||||
|
# Use single quotes in `ruff format`.
|
||||||
|
quote-style = "single"
|
||||||
|
|
||||||
|
# Like Black, indent with spaces, rather than tabs.
|
||||||
|
indent-style = "space"
|
||||||
|
|
||||||
|
# Like Black, respect magic trailing commas.
|
||||||
|
skip-magic-trailing-comma = false
|
||||||
|
|
||||||
|
# Like Black, automatically detect the appropriate line ending.
|
||||||
|
line-ending = "auto"
|
||||||
|
|
||||||
|
# Enable auto-formatting of code examples in docstrings. Markdown,
|
||||||
|
# reStructuredText code/literal blocks and doctests are all supported.
|
||||||
|
#
|
||||||
|
# This is currently disabled by default, but it is planned for this
|
||||||
|
# to be opt-out in the future.
|
||||||
|
docstring-code-format = false
|
||||||
|
|
||||||
|
# Set the line length limit used when formatting code snippets in
|
||||||
|
# docstrings.
|
||||||
|
#
|
||||||
|
# This only has an effect when the `docstring-code-format` setting is
|
||||||
|
# enabled.
|
||||||
|
docstring-code-line-length = "dynamic"
|
||||||
102
setup.py
102
setup.py
|
|
@ -1,102 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
#
|
|
||||||
# tractor: structured concurrent "actors".
|
|
||||||
#
|
|
||||||
# Copyright 2018-eternity Tyler Goodlet.
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
from setuptools import setup
|
|
||||||
|
|
||||||
with open('docs/README.rst', encoding='utf-8') as f:
|
|
||||||
readme = f.read()
|
|
||||||
|
|
||||||
|
|
||||||
setup(
|
|
||||||
name="tractor",
|
|
||||||
version='0.1.0a6dev0', # alpha zone
|
|
||||||
description='structured concurrrent "actors"',
|
|
||||||
long_description=readme,
|
|
||||||
license='AGPLv3',
|
|
||||||
author='Tyler Goodlet',
|
|
||||||
maintainer='Tyler Goodlet',
|
|
||||||
maintainer_email='jgbt@protonmail.com',
|
|
||||||
url='https://github.com/goodboy/tractor',
|
|
||||||
platforms=['linux', 'windows'],
|
|
||||||
packages=[
|
|
||||||
'tractor',
|
|
||||||
'tractor.experimental',
|
|
||||||
'tractor.trionics',
|
|
||||||
],
|
|
||||||
install_requires=[
|
|
||||||
|
|
||||||
# trio related
|
|
||||||
# proper range spec:
|
|
||||||
# https://packaging.python.org/en/latest/discussions/install-requires-vs-requirements/#id5
|
|
||||||
'trio >= 0.22',
|
|
||||||
'async_generator',
|
|
||||||
'trio_typing',
|
|
||||||
'exceptiongroup',
|
|
||||||
|
|
||||||
# tooling
|
|
||||||
'tricycle',
|
|
||||||
'trio_typing',
|
|
||||||
|
|
||||||
# tooling
|
|
||||||
'colorlog',
|
|
||||||
'wrapt',
|
|
||||||
|
|
||||||
# serialization
|
|
||||||
'msgspec',
|
|
||||||
|
|
||||||
# debug mode REPL
|
|
||||||
'pdbpp',
|
|
||||||
|
|
||||||
# pip ref docs on these specs:
|
|
||||||
# https://pip.pypa.io/en/stable/reference/requirement-specifiers/#examples
|
|
||||||
# and pep:
|
|
||||||
# https://peps.python.org/pep-0440/#version-specifiers
|
|
||||||
|
|
||||||
# windows deps workaround for ``pdbpp``
|
|
||||||
# https://github.com/pdbpp/pdbpp/issues/498
|
|
||||||
# https://github.com/pdbpp/fancycompleter/issues/37
|
|
||||||
'pyreadline3 ; platform_system == "Windows"',
|
|
||||||
|
|
||||||
|
|
||||||
],
|
|
||||||
tests_require=['pytest'],
|
|
||||||
python_requires=">=3.9",
|
|
||||||
keywords=[
|
|
||||||
'trio',
|
|
||||||
'async',
|
|
||||||
'concurrency',
|
|
||||||
'structured concurrency',
|
|
||||||
'actor model',
|
|
||||||
'distributed',
|
|
||||||
'multiprocessing'
|
|
||||||
],
|
|
||||||
classifiers=[
|
|
||||||
"Development Status :: 3 - Alpha",
|
|
||||||
"Operating System :: POSIX :: Linux",
|
|
||||||
"Operating System :: Microsoft :: Windows",
|
|
||||||
"Framework :: Trio",
|
|
||||||
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
|
|
||||||
"Programming Language :: Python :: Implementation :: CPython",
|
|
||||||
"Programming Language :: Python :: 3 :: Only",
|
|
||||||
"Programming Language :: Python :: 3.10",
|
|
||||||
"Intended Audience :: Science/Research",
|
|
||||||
"Intended Audience :: Developers",
|
|
||||||
"Topic :: System :: Distributed Computing",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
@ -1,97 +1,26 @@
|
||||||
"""
|
"""
|
||||||
``tractor`` testing!!
|
Top level of the testing suites!
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
import sys
|
import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
import os
|
import os
|
||||||
import random
|
|
||||||
import signal
|
import signal
|
||||||
import platform
|
import platform
|
||||||
import time
|
import time
|
||||||
import inspect
|
|
||||||
from functools import partial, wraps
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import trio
|
from tractor._testing import (
|
||||||
import tractor
|
examples_dir as examples_dir,
|
||||||
|
tractor_test as tractor_test,
|
||||||
|
expect_ctxc as expect_ctxc,
|
||||||
|
)
|
||||||
|
|
||||||
pytest_plugins = ['pytester']
|
pytest_plugins: list[str] = [
|
||||||
|
'pytester',
|
||||||
|
'tractor._testing.pytest',
|
||||||
def tractor_test(fn):
|
]
|
||||||
"""
|
|
||||||
Use:
|
|
||||||
|
|
||||||
@tractor_test
|
|
||||||
async def test_whatever():
|
|
||||||
await ...
|
|
||||||
|
|
||||||
If fixtures:
|
|
||||||
|
|
||||||
- ``arb_addr`` (a socket addr tuple where arbiter is listening)
|
|
||||||
- ``loglevel`` (logging level passed to tractor internals)
|
|
||||||
- ``start_method`` (subprocess spawning backend)
|
|
||||||
|
|
||||||
are defined in the `pytest` fixture space they will be automatically
|
|
||||||
injected to tests declaring these funcargs.
|
|
||||||
"""
|
|
||||||
@wraps(fn)
|
|
||||||
def wrapper(
|
|
||||||
*args,
|
|
||||||
loglevel=None,
|
|
||||||
arb_addr=None,
|
|
||||||
start_method=None,
|
|
||||||
**kwargs
|
|
||||||
):
|
|
||||||
# __tracebackhide__ = True
|
|
||||||
|
|
||||||
if 'arb_addr' in inspect.signature(fn).parameters:
|
|
||||||
# injects test suite fixture value to test as well
|
|
||||||
# as `run()`
|
|
||||||
kwargs['arb_addr'] = arb_addr
|
|
||||||
|
|
||||||
if 'loglevel' in inspect.signature(fn).parameters:
|
|
||||||
# allows test suites to define a 'loglevel' fixture
|
|
||||||
# that activates the internal logging
|
|
||||||
kwargs['loglevel'] = loglevel
|
|
||||||
|
|
||||||
if start_method is None:
|
|
||||||
if platform.system() == "Windows":
|
|
||||||
start_method = 'trio'
|
|
||||||
|
|
||||||
if 'start_method' in inspect.signature(fn).parameters:
|
|
||||||
# set of subprocess spawning backends
|
|
||||||
kwargs['start_method'] = start_method
|
|
||||||
|
|
||||||
if kwargs:
|
|
||||||
|
|
||||||
# use explicit root actor start
|
|
||||||
|
|
||||||
async def _main():
|
|
||||||
async with tractor.open_root_actor(
|
|
||||||
# **kwargs,
|
|
||||||
arbiter_addr=arb_addr,
|
|
||||||
loglevel=loglevel,
|
|
||||||
start_method=start_method,
|
|
||||||
|
|
||||||
# TODO: only enable when pytest is passed --pdb
|
|
||||||
# debug_mode=True,
|
|
||||||
|
|
||||||
):
|
|
||||||
await fn(*args, **kwargs)
|
|
||||||
|
|
||||||
main = _main
|
|
||||||
|
|
||||||
else:
|
|
||||||
# use implicit root actor start
|
|
||||||
main = partial(fn, *args, **kwargs)
|
|
||||||
|
|
||||||
return trio.run(main)
|
|
||||||
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
|
|
||||||
_arb_addr = '127.0.0.1', random.randint(1000, 9999)
|
|
||||||
|
|
||||||
|
|
||||||
# Sending signal.SIGINT on subprocess fails on windows. Use CTRL_* alternatives
|
# Sending signal.SIGINT on subprocess fails on windows. Use CTRL_* alternatives
|
||||||
|
|
@ -104,7 +33,11 @@ else:
|
||||||
_KILL_SIGNAL = signal.SIGKILL
|
_KILL_SIGNAL = signal.SIGKILL
|
||||||
_INT_SIGNAL = signal.SIGINT
|
_INT_SIGNAL = signal.SIGINT
|
||||||
_INT_RETURN_CODE = 1 if sys.version_info < (3, 8) else -signal.SIGINT.value
|
_INT_RETURN_CODE = 1 if sys.version_info < (3, 8) else -signal.SIGINT.value
|
||||||
_PROC_SPAWN_WAIT = 0.6 if sys.version_info < (3, 7) else 0.4
|
_PROC_SPAWN_WAIT = (
|
||||||
|
0.6
|
||||||
|
if sys.version_info < (3, 7)
|
||||||
|
else 0.4
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
no_windows = pytest.mark.skipif(
|
no_windows = pytest.mark.skipif(
|
||||||
|
|
@ -113,36 +46,23 @@ no_windows = pytest.mark.skipif(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def repodir():
|
def pytest_addoption(
|
||||||
"""Return the abspath to the repo directory.
|
parser: pytest.Parser,
|
||||||
"""
|
):
|
||||||
dirname = os.path.dirname
|
# ?TODO? should this be exposed from our `._testing.pytest`
|
||||||
dirpath = os.path.abspath(
|
# plugin or should we make it more explicit with `--tl` for
|
||||||
dirname(dirname(os.path.realpath(__file__)))
|
# tractor logging like we do in other client projects?
|
||||||
)
|
|
||||||
return dirpath
|
|
||||||
|
|
||||||
|
|
||||||
def pytest_addoption(parser):
|
|
||||||
parser.addoption(
|
parser.addoption(
|
||||||
"--ll", action="store", dest='loglevel',
|
"--ll",
|
||||||
|
action="store",
|
||||||
|
dest='loglevel',
|
||||||
default='ERROR', help="logging level to set when testing"
|
default='ERROR', help="logging level to set when testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
parser.addoption(
|
|
||||||
"--spawn-backend", action="store", dest='spawn_backend',
|
|
||||||
default='trio',
|
|
||||||
help="Processing spawning backend to use for test run",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def pytest_configure(config):
|
|
||||||
backend = config.option.spawn_backend
|
|
||||||
tractor._spawn.try_set_start_method(backend)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session', autouse=True)
|
@pytest.fixture(scope='session', autouse=True)
|
||||||
def loglevel(request):
|
def loglevel(request):
|
||||||
|
import tractor
|
||||||
orig = tractor.log._default_loglevel
|
orig = tractor.log._default_loglevel
|
||||||
level = tractor.log._default_loglevel = request.config.option.loglevel
|
level = tractor.log._default_loglevel = request.config.option.loglevel
|
||||||
tractor.log.get_console_log(level)
|
tractor.log.get_console_log(level)
|
||||||
|
|
@ -150,87 +70,148 @@ def loglevel(request):
|
||||||
tractor.log._default_loglevel = orig
|
tractor.log._default_loglevel = orig
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
|
||||||
def spawn_backend(request):
|
|
||||||
return request.config.option.spawn_backend
|
|
||||||
|
|
||||||
|
|
||||||
_ci_env: bool = os.environ.get('CI', False)
|
_ci_env: bool = os.environ.get('CI', False)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope='session')
|
||||||
def ci_env() -> bool:
|
def ci_env() -> bool:
|
||||||
"""Detect CI envoirment.
|
'''
|
||||||
"""
|
Detect CI environment.
|
||||||
|
|
||||||
|
'''
|
||||||
return _ci_env
|
return _ci_env
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
def sig_prog(
|
||||||
def arb_addr():
|
proc: subprocess.Popen,
|
||||||
return _arb_addr
|
sig: int,
|
||||||
|
canc_timeout: float = 0.1,
|
||||||
|
) -> int:
|
||||||
def pytest_generate_tests(metafunc):
|
|
||||||
spawn_backend = metafunc.config.option.spawn_backend
|
|
||||||
|
|
||||||
if not spawn_backend:
|
|
||||||
# XXX some weird windows bug with `pytest`?
|
|
||||||
spawn_backend = 'trio'
|
|
||||||
|
|
||||||
# TODO: maybe just use the literal `._spawn.SpawnMethodKey`?
|
|
||||||
assert spawn_backend in (
|
|
||||||
'mp_spawn',
|
|
||||||
'mp_forkserver',
|
|
||||||
'trio',
|
|
||||||
)
|
|
||||||
|
|
||||||
# NOTE: used to be used to dyanmically parametrize tests for when
|
|
||||||
# you just passed --spawn-backend=`mp` on the cli, but now we expect
|
|
||||||
# that cli input to be manually specified, BUT, maybe we'll do
|
|
||||||
# something like this again in the future?
|
|
||||||
if 'start_method' in metafunc.fixturenames:
|
|
||||||
metafunc.parametrize("start_method", [spawn_backend], scope='module')
|
|
||||||
|
|
||||||
|
|
||||||
def sig_prog(proc, sig):
|
|
||||||
"Kill the actor-process with ``sig``."
|
"Kill the actor-process with ``sig``."
|
||||||
proc.send_signal(sig)
|
proc.send_signal(sig)
|
||||||
time.sleep(0.1)
|
time.sleep(canc_timeout)
|
||||||
if not proc.poll():
|
if not proc.poll():
|
||||||
# TODO: why sometimes does SIGINT not work on teardown?
|
# TODO: why sometimes does SIGINT not work on teardown?
|
||||||
# seems to happen only when trace logging enabled?
|
# seems to happen only when trace logging enabled?
|
||||||
proc.send_signal(_KILL_SIGNAL)
|
proc.send_signal(_KILL_SIGNAL)
|
||||||
ret = proc.wait()
|
ret: int = proc.wait()
|
||||||
assert ret
|
assert ret
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: factor into @cm and move to `._testing`?
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def daemon(loglevel, testdir, arb_addr):
|
def daemon(
|
||||||
"""Run a daemon actor as a "remote arbiter".
|
debug_mode: bool,
|
||||||
"""
|
loglevel: str,
|
||||||
if loglevel in ('trace', 'debug'):
|
testdir: pytest.Pytester,
|
||||||
# too much logging will lock up the subproc (smh)
|
reg_addr: tuple[str, int],
|
||||||
loglevel = 'info'
|
tpt_proto: str,
|
||||||
|
|
||||||
cmdargs = [
|
) -> subprocess.Popen:
|
||||||
sys.executable, '-c',
|
'''
|
||||||
"import tractor; tractor.run_daemon([], arbiter_addr={}, loglevel={})"
|
Run a daemon root actor as a separate actor-process tree and
|
||||||
.format(
|
"remote registrar" for discovery-protocol related tests.
|
||||||
arb_addr,
|
|
||||||
"'{}'".format(loglevel) if loglevel else None)
|
'''
|
||||||
|
if loglevel in ('trace', 'debug'):
|
||||||
|
# XXX: too much logging will lock up the subproc (smh)
|
||||||
|
loglevel: str = 'info'
|
||||||
|
|
||||||
|
code: str = (
|
||||||
|
"import tractor; "
|
||||||
|
"tractor.run_daemon([], "
|
||||||
|
"registry_addrs={reg_addrs}, "
|
||||||
|
"debug_mode={debug_mode}, "
|
||||||
|
"loglevel={ll})"
|
||||||
|
).format(
|
||||||
|
reg_addrs=str([reg_addr]),
|
||||||
|
ll="'{}'".format(loglevel) if loglevel else None,
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
)
|
||||||
|
cmd: list[str] = [
|
||||||
|
sys.executable,
|
||||||
|
'-c', code,
|
||||||
]
|
]
|
||||||
kwargs = dict()
|
# breakpoint()
|
||||||
|
kwargs = {}
|
||||||
if platform.system() == 'Windows':
|
if platform.system() == 'Windows':
|
||||||
# without this, tests hang on windows forever
|
# without this, tests hang on windows forever
|
||||||
kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP
|
kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP
|
||||||
|
|
||||||
proc = testdir.popen(
|
proc: subprocess.Popen = testdir.popen(
|
||||||
cmdargs,
|
cmd,
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
stderr=subprocess.PIPE,
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
assert not proc.returncode
|
|
||||||
|
# UDS sockets are **really** fast to bind()/listen()/connect()
|
||||||
|
# so it's often required that we delay a bit more starting
|
||||||
|
# the first actor-tree..
|
||||||
|
if tpt_proto == 'uds':
|
||||||
|
global _PROC_SPAWN_WAIT
|
||||||
|
_PROC_SPAWN_WAIT = 0.6
|
||||||
|
|
||||||
time.sleep(_PROC_SPAWN_WAIT)
|
time.sleep(_PROC_SPAWN_WAIT)
|
||||||
|
|
||||||
|
assert not proc.returncode
|
||||||
yield proc
|
yield proc
|
||||||
sig_prog(proc, _INT_SIGNAL)
|
sig_prog(proc, _INT_SIGNAL)
|
||||||
|
|
||||||
|
# XXX! yeah.. just be reaaal careful with this bc sometimes it
|
||||||
|
# can lock up on the `_io.BufferedReader` and hang..
|
||||||
|
stderr: str = proc.stderr.read().decode()
|
||||||
|
if stderr:
|
||||||
|
print(
|
||||||
|
f'Daemon actor tree produced STDERR:\n'
|
||||||
|
f'{proc.args}\n'
|
||||||
|
f'\n'
|
||||||
|
f'{stderr}\n'
|
||||||
|
)
|
||||||
|
if proc.returncode != -2:
|
||||||
|
raise RuntimeError(
|
||||||
|
'Daemon actor tree failed !?\n'
|
||||||
|
f'{proc.args}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# @pytest.fixture(autouse=True)
|
||||||
|
# def shared_last_failed(pytestconfig):
|
||||||
|
# val = pytestconfig.cache.get("example/value", None)
|
||||||
|
# breakpoint()
|
||||||
|
# if val is None:
|
||||||
|
# pytestconfig.cache.set("example/value", val)
|
||||||
|
# return val
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: a way to let test scripts (like from `examples/`)
|
||||||
|
# guarantee they won't `registry_addrs` collide!
|
||||||
|
# -[ ] maybe use some kinda standard `def main()` arg-spec that
|
||||||
|
# we can introspect from a fixture that is called from the test
|
||||||
|
# body?
|
||||||
|
# -[ ] test and figure out typing for below prototype! Bp
|
||||||
|
#
|
||||||
|
# @pytest.fixture
|
||||||
|
# def set_script_runtime_args(
|
||||||
|
# reg_addr: tuple,
|
||||||
|
# ) -> Callable[[...], None]:
|
||||||
|
|
||||||
|
# def import_n_partial_in_args_n_triorun(
|
||||||
|
# script: Path, # under examples?
|
||||||
|
# **runtime_args,
|
||||||
|
# ) -> Callable[[], Any]: # a `partial`-ed equiv of `trio.run()`
|
||||||
|
|
||||||
|
# # NOTE, below is taken from
|
||||||
|
# # `.test_advanced_faults.test_ipc_channel_break_during_stream`
|
||||||
|
# mod: ModuleType = import_path(
|
||||||
|
# examples_dir() / 'advanced_faults'
|
||||||
|
# / 'ipc_failure_during_stream.py',
|
||||||
|
# root=examples_dir(),
|
||||||
|
# consider_namespace_packages=False,
|
||||||
|
# )
|
||||||
|
# return partial(
|
||||||
|
# trio.run,
|
||||||
|
# partial(
|
||||||
|
# mod.main,
|
||||||
|
# **runtime_args,
|
||||||
|
# )
|
||||||
|
# )
|
||||||
|
# return import_n_partial_in_args_n_triorun
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,253 @@
|
||||||
|
'''
|
||||||
|
`tractor.devx.*` tooling sub-pkg test space.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from __future__ import annotations
|
||||||
|
import time
|
||||||
|
from typing import (
|
||||||
|
Callable,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from pexpect.exceptions import (
|
||||||
|
TIMEOUT,
|
||||||
|
)
|
||||||
|
from pexpect.spawnbase import SpawnBase
|
||||||
|
|
||||||
|
from tractor._testing import (
|
||||||
|
mk_cmd,
|
||||||
|
)
|
||||||
|
from tractor.devx.debug import (
|
||||||
|
_pause_msg as _pause_msg,
|
||||||
|
_crash_msg as _crash_msg,
|
||||||
|
_repl_fail_msg as _repl_fail_msg,
|
||||||
|
_ctlc_ignore_header as _ctlc_ignore_header,
|
||||||
|
)
|
||||||
|
from ..conftest import (
|
||||||
|
_ci_env,
|
||||||
|
)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from pexpect import pty_spawn
|
||||||
|
|
||||||
|
|
||||||
|
# a fn that sub-instantiates a `pexpect.spawn()`
|
||||||
|
# and returns it.
|
||||||
|
type PexpectSpawner = Callable[[str], pty_spawn.spawn]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def spawn(
|
||||||
|
start_method: str,
|
||||||
|
testdir: pytest.Pytester,
|
||||||
|
reg_addr: tuple[str, int],
|
||||||
|
|
||||||
|
) -> PexpectSpawner:
|
||||||
|
'''
|
||||||
|
Use the `pexpect` module shipped via `testdir.spawn()` to
|
||||||
|
run an `./examples/..` script by name.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if start_method != 'trio':
|
||||||
|
pytest.skip(
|
||||||
|
'`pexpect` based tests only supported on `trio` backend'
|
||||||
|
)
|
||||||
|
|
||||||
|
def unset_colors():
|
||||||
|
'''
|
||||||
|
Python 3.13 introduced colored tracebacks that break patt
|
||||||
|
matching,
|
||||||
|
|
||||||
|
https://docs.python.org/3/using/cmdline.html#envvar-PYTHON_COLORS
|
||||||
|
https://docs.python.org/3/using/cmdline.html#using-on-controlling-color
|
||||||
|
|
||||||
|
'''
|
||||||
|
import os
|
||||||
|
os.environ['PYTHON_COLORS'] = '0'
|
||||||
|
|
||||||
|
def _spawn(
|
||||||
|
cmd: str,
|
||||||
|
**mkcmd_kwargs,
|
||||||
|
) -> pty_spawn.spawn:
|
||||||
|
unset_colors()
|
||||||
|
return testdir.spawn(
|
||||||
|
cmd=mk_cmd(
|
||||||
|
cmd,
|
||||||
|
**mkcmd_kwargs,
|
||||||
|
),
|
||||||
|
expect_timeout=3,
|
||||||
|
# preexec_fn=unset_colors,
|
||||||
|
# ^TODO? get `pytest` core to expose underlying
|
||||||
|
# `pexpect.spawn()` stuff?
|
||||||
|
)
|
||||||
|
|
||||||
|
# such that test-dep can pass input script name.
|
||||||
|
return _spawn # the `PexpectSpawner`, type alias.
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(
|
||||||
|
params=[False, True],
|
||||||
|
ids='ctl-c={}'.format,
|
||||||
|
)
|
||||||
|
def ctlc(
|
||||||
|
request,
|
||||||
|
ci_env: bool,
|
||||||
|
|
||||||
|
) -> bool:
|
||||||
|
|
||||||
|
use_ctlc = request.param
|
||||||
|
|
||||||
|
node = request.node
|
||||||
|
markers = node.own_markers
|
||||||
|
for mark in markers:
|
||||||
|
if mark.name == 'has_nested_actors':
|
||||||
|
pytest.skip(
|
||||||
|
f'Test {node} has nested actors and fails with Ctrl-C.\n'
|
||||||
|
f'The test can sometimes run fine locally but until'
|
||||||
|
' we solve' 'this issue this CI test will be xfail:\n'
|
||||||
|
'https://github.com/goodboy/tractor/issues/320'
|
||||||
|
)
|
||||||
|
|
||||||
|
if mark.name == 'ctlcs_bish':
|
||||||
|
pytest.skip(
|
||||||
|
f'Test {node} prolly uses something from the stdlib (namely `asyncio`..)\n'
|
||||||
|
f'The test and/or underlying example script can *sometimes* run fine '
|
||||||
|
f'locally but more then likely until the cpython peeps get their sh#$ together, '
|
||||||
|
f'this test will definitely not behave like `trio` under SIGINT..\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
if use_ctlc:
|
||||||
|
# XXX: disable pygments highlighting for auto-tests
|
||||||
|
# since some envs (like actions CI) will struggle
|
||||||
|
# the the added color-char encoding..
|
||||||
|
from tractor.devx.debug import TractorConfig
|
||||||
|
TractorConfig.use_pygements = False
|
||||||
|
|
||||||
|
yield use_ctlc
|
||||||
|
|
||||||
|
|
||||||
|
def expect(
|
||||||
|
child,
|
||||||
|
|
||||||
|
# normally a `pdb` prompt by default
|
||||||
|
patt: str,
|
||||||
|
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Expect wrapper that prints last seen console
|
||||||
|
data before failing.
|
||||||
|
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
child.expect(
|
||||||
|
patt,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
except TIMEOUT:
|
||||||
|
before = str(child.before.decode())
|
||||||
|
print(before)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
PROMPT = r"\(Pdb\+\)"
|
||||||
|
|
||||||
|
|
||||||
|
def in_prompt_msg(
|
||||||
|
child: SpawnBase,
|
||||||
|
parts: list[str],
|
||||||
|
|
||||||
|
pause_on_false: bool = False,
|
||||||
|
err_on_false: bool = False,
|
||||||
|
print_prompt_on_false: bool = True,
|
||||||
|
|
||||||
|
) -> bool:
|
||||||
|
'''
|
||||||
|
Predicate check if (the prompt's) std-streams output has all
|
||||||
|
`str`-parts in it.
|
||||||
|
|
||||||
|
Can be used in test asserts for bulk matching expected
|
||||||
|
log/REPL output for a given `pdb` interact point.
|
||||||
|
|
||||||
|
'''
|
||||||
|
__tracebackhide__: bool = False
|
||||||
|
|
||||||
|
before: str = str(child.before.decode())
|
||||||
|
for part in parts:
|
||||||
|
if part not in before:
|
||||||
|
if pause_on_false:
|
||||||
|
import pdbp
|
||||||
|
pdbp.set_trace()
|
||||||
|
|
||||||
|
if print_prompt_on_false:
|
||||||
|
print(before)
|
||||||
|
|
||||||
|
if err_on_false:
|
||||||
|
raise ValueError(
|
||||||
|
f'Could not find pattern in `before` output?\n'
|
||||||
|
f'part: {part!r}\n'
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: todo support terminal color-chars stripping so we can match
|
||||||
|
# against call stack frame output from the the 'll' command the like!
|
||||||
|
# -[ ] SO answer for stipping ANSI codes: https://stackoverflow.com/a/14693789
|
||||||
|
def assert_before(
|
||||||
|
child: SpawnBase,
|
||||||
|
patts: list[str],
|
||||||
|
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
__tracebackhide__: bool = False
|
||||||
|
|
||||||
|
assert in_prompt_msg(
|
||||||
|
child=child,
|
||||||
|
parts=patts,
|
||||||
|
|
||||||
|
# since this is an "assert" helper ;)
|
||||||
|
err_on_false=True,
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def do_ctlc(
|
||||||
|
child,
|
||||||
|
count: int = 3,
|
||||||
|
delay: float = 0.1,
|
||||||
|
patt: str|None = None,
|
||||||
|
|
||||||
|
# expect repl UX to reprint the prompt after every
|
||||||
|
# ctrl-c send.
|
||||||
|
# XXX: no idea but, in CI this never seems to work even on 3.10 so
|
||||||
|
# needs some further investigation potentially...
|
||||||
|
expect_prompt: bool = not _ci_env,
|
||||||
|
|
||||||
|
) -> str|None:
|
||||||
|
|
||||||
|
before: str|None = None
|
||||||
|
|
||||||
|
# make sure ctl-c sends don't do anything but repeat output
|
||||||
|
for _ in range(count):
|
||||||
|
time.sleep(delay)
|
||||||
|
child.sendcontrol('c')
|
||||||
|
|
||||||
|
# TODO: figure out why this makes CI fail..
|
||||||
|
# if you run this test manually it works just fine..
|
||||||
|
if expect_prompt:
|
||||||
|
time.sleep(delay)
|
||||||
|
child.expect(PROMPT)
|
||||||
|
before = str(child.before.decode())
|
||||||
|
time.sleep(delay)
|
||||||
|
|
||||||
|
if patt:
|
||||||
|
# should see the last line on console
|
||||||
|
assert patt in before
|
||||||
|
|
||||||
|
# return the console content up to the final prompt
|
||||||
|
return before
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,381 @@
|
||||||
|
'''
|
||||||
|
That "foreign loop/thread" debug REPL support better ALSO WORK!
|
||||||
|
|
||||||
|
Same as `test_native_pause.py`.
|
||||||
|
All these tests can be understood (somewhat) by running the
|
||||||
|
equivalent `examples/debugging/` scripts manually.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from contextlib import (
|
||||||
|
contextmanager as cm,
|
||||||
|
)
|
||||||
|
# from functools import partial
|
||||||
|
# import itertools
|
||||||
|
import time
|
||||||
|
# from typing import (
|
||||||
|
# Iterator,
|
||||||
|
# )
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from pexpect.exceptions import (
|
||||||
|
TIMEOUT,
|
||||||
|
EOF,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .conftest import (
|
||||||
|
# _ci_env,
|
||||||
|
do_ctlc,
|
||||||
|
PROMPT,
|
||||||
|
# expect,
|
||||||
|
in_prompt_msg,
|
||||||
|
assert_before,
|
||||||
|
_pause_msg,
|
||||||
|
_crash_msg,
|
||||||
|
_ctlc_ignore_header,
|
||||||
|
# _repl_fail_msg,
|
||||||
|
)
|
||||||
|
|
||||||
|
@cm
|
||||||
|
def maybe_expect_timeout(
|
||||||
|
ctlc: bool = False,
|
||||||
|
) -> None:
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
except TIMEOUT:
|
||||||
|
# breakpoint()
|
||||||
|
if ctlc:
|
||||||
|
pytest.xfail(
|
||||||
|
'Some kinda redic threading SIGINT bug i think?\n'
|
||||||
|
'See the notes in `examples/debugging/sync_bp.py`..\n'
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.ctlcs_bish
|
||||||
|
def test_pause_from_sync(
|
||||||
|
spawn,
|
||||||
|
ctlc: bool,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Verify we can use the `pdbp` REPL from sync functions AND from
|
||||||
|
any thread spawned with `trio.to_thread.run_sync()`.
|
||||||
|
|
||||||
|
`examples/debugging/sync_bp.py`
|
||||||
|
|
||||||
|
'''
|
||||||
|
child = spawn('sync_bp')
|
||||||
|
|
||||||
|
# first `sync_pause()` after nurseries open
|
||||||
|
child.expect(PROMPT)
|
||||||
|
assert_before(
|
||||||
|
child,
|
||||||
|
[
|
||||||
|
# pre-prompt line
|
||||||
|
_pause_msg,
|
||||||
|
"<Task '__main__.main'",
|
||||||
|
"('root'",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
if ctlc:
|
||||||
|
do_ctlc(child)
|
||||||
|
# ^NOTE^ subactor not spawned yet; don't need extra delay.
|
||||||
|
|
||||||
|
child.sendline('c')
|
||||||
|
|
||||||
|
# first `await tractor.pause()` inside `p.open_context()` body
|
||||||
|
child.expect(PROMPT)
|
||||||
|
|
||||||
|
# XXX shouldn't see gb loaded message with PDB loglevel!
|
||||||
|
# assert not in_prompt_msg(
|
||||||
|
# child,
|
||||||
|
# ['`greenback` portal opened!'],
|
||||||
|
# )
|
||||||
|
# should be same root task
|
||||||
|
assert_before(
|
||||||
|
child,
|
||||||
|
[
|
||||||
|
_pause_msg,
|
||||||
|
"<Task '__main__.main'",
|
||||||
|
"('root'",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
if ctlc:
|
||||||
|
do_ctlc(
|
||||||
|
child,
|
||||||
|
# NOTE: setting this to 0 (or some other sufficient
|
||||||
|
# small val) can cause the test to fail since the
|
||||||
|
# `subactor` suffers a race where the root/parent
|
||||||
|
# sends an actor-cancel prior to it hitting its pause
|
||||||
|
# point; by def the value is 0.1
|
||||||
|
delay=0.4,
|
||||||
|
)
|
||||||
|
|
||||||
|
# XXX, fwiw without a brief sleep here the SIGINT might actually
|
||||||
|
# trigger "subactor" cancellation by its parent before the
|
||||||
|
# shield-handler is engaged.
|
||||||
|
#
|
||||||
|
# => similar to the `delay` input to `do_ctlc()` below, setting
|
||||||
|
# this too low can cause the test to fail since the `subactor`
|
||||||
|
# suffers a race where the root/parent sends an actor-cancel
|
||||||
|
# prior to the context task hitting its pause point (and thus
|
||||||
|
# engaging the `sigint_shield()` handler in time); this value
|
||||||
|
# seems be good enuf?
|
||||||
|
time.sleep(0.6)
|
||||||
|
|
||||||
|
# one of the bg thread or subactor should have
|
||||||
|
# `Lock.acquire()`-ed
|
||||||
|
# (NOT both, which will result in REPL clobbering!)
|
||||||
|
attach_patts: dict[str, list[str]] = {
|
||||||
|
'subactor': [
|
||||||
|
"'start_n_sync_pause'",
|
||||||
|
"('subactor'",
|
||||||
|
],
|
||||||
|
'inline_root_bg_thread': [
|
||||||
|
"<Thread(inline_root_bg_thread",
|
||||||
|
"('root'",
|
||||||
|
],
|
||||||
|
'start_soon_root_bg_thread': [
|
||||||
|
"<Thread(start_soon_root_bg_thread",
|
||||||
|
"('root'",
|
||||||
|
],
|
||||||
|
}
|
||||||
|
conts: int = 0 # for debugging below matching logic on failure
|
||||||
|
while attach_patts:
|
||||||
|
child.sendline('c')
|
||||||
|
conts += 1
|
||||||
|
child.expect(PROMPT)
|
||||||
|
before = str(child.before.decode())
|
||||||
|
for key in attach_patts:
|
||||||
|
if key in before:
|
||||||
|
attach_key: str = key
|
||||||
|
expected_patts: str = attach_patts.pop(key)
|
||||||
|
assert_before(
|
||||||
|
child,
|
||||||
|
[_pause_msg]
|
||||||
|
+
|
||||||
|
expected_patts
|
||||||
|
)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
pytest.fail(
|
||||||
|
f'No keys found?\n\n'
|
||||||
|
f'{attach_patts.keys()}\n\n'
|
||||||
|
f'{before}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
# ensure no other task/threads engaged a REPL
|
||||||
|
# at the same time as the one that was detected above.
|
||||||
|
for key, other_patts in attach_patts.copy().items():
|
||||||
|
assert not in_prompt_msg(
|
||||||
|
child,
|
||||||
|
other_patts,
|
||||||
|
)
|
||||||
|
|
||||||
|
if ctlc:
|
||||||
|
do_ctlc(
|
||||||
|
child,
|
||||||
|
patt=attach_key,
|
||||||
|
# NOTE same as comment above
|
||||||
|
delay=0.4,
|
||||||
|
)
|
||||||
|
|
||||||
|
child.sendline('c')
|
||||||
|
|
||||||
|
# XXX TODO, weird threading bug it seems despite the
|
||||||
|
# `abandon_on_cancel: bool` setting to
|
||||||
|
# `trio.to_thread.run_sync()`..
|
||||||
|
with maybe_expect_timeout(
|
||||||
|
ctlc=ctlc,
|
||||||
|
):
|
||||||
|
child.expect(EOF)
|
||||||
|
|
||||||
|
|
||||||
|
def expect_any_of(
|
||||||
|
attach_patts: dict[str, list[str]],
|
||||||
|
child, # what type?
|
||||||
|
ctlc: bool = False,
|
||||||
|
prompt: str = _ctlc_ignore_header,
|
||||||
|
ctlc_delay: float = .4,
|
||||||
|
|
||||||
|
) -> list[str]:
|
||||||
|
'''
|
||||||
|
Receive any of a `list[str]` of patterns provided in
|
||||||
|
`attach_patts`.
|
||||||
|
|
||||||
|
Used to test racing prompts from multiple actors and/or
|
||||||
|
tasks using a common root process' `pdbp` REPL.
|
||||||
|
|
||||||
|
'''
|
||||||
|
assert attach_patts
|
||||||
|
|
||||||
|
child.expect(PROMPT)
|
||||||
|
before = str(child.before.decode())
|
||||||
|
|
||||||
|
for attach_key in attach_patts:
|
||||||
|
if attach_key in before:
|
||||||
|
expected_patts: str = attach_patts.pop(attach_key)
|
||||||
|
assert_before(
|
||||||
|
child,
|
||||||
|
expected_patts
|
||||||
|
)
|
||||||
|
break # from for
|
||||||
|
else:
|
||||||
|
pytest.fail(
|
||||||
|
f'No keys found?\n\n'
|
||||||
|
f'{attach_patts.keys()}\n\n'
|
||||||
|
f'{before}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
# ensure no other task/threads engaged a REPL
|
||||||
|
# at the same time as the one that was detected above.
|
||||||
|
for key, other_patts in attach_patts.copy().items():
|
||||||
|
assert not in_prompt_msg(
|
||||||
|
child,
|
||||||
|
other_patts,
|
||||||
|
)
|
||||||
|
|
||||||
|
if ctlc:
|
||||||
|
do_ctlc(
|
||||||
|
child,
|
||||||
|
patt=prompt,
|
||||||
|
# NOTE same as comment above
|
||||||
|
delay=ctlc_delay,
|
||||||
|
)
|
||||||
|
|
||||||
|
return expected_patts
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.ctlcs_bish
|
||||||
|
def test_sync_pause_from_aio_task(
|
||||||
|
spawn,
|
||||||
|
|
||||||
|
ctlc: bool
|
||||||
|
# ^TODO, fix for `asyncio`!!
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Verify we can use the `pdbp` REPL from an `asyncio.Task` spawned using
|
||||||
|
APIs in `.to_asyncio`.
|
||||||
|
|
||||||
|
`examples/debugging/asycio_bp.py`
|
||||||
|
|
||||||
|
'''
|
||||||
|
child = spawn('asyncio_bp')
|
||||||
|
|
||||||
|
# RACE on whether trio/asyncio task bps first
|
||||||
|
attach_patts: dict[str, list[str]] = {
|
||||||
|
|
||||||
|
# first pause in guest-mode (aka "infecting")
|
||||||
|
# `trio.Task`.
|
||||||
|
'trio-side': [
|
||||||
|
_pause_msg,
|
||||||
|
"<Task 'trio_ctx'",
|
||||||
|
"('aio_daemon'",
|
||||||
|
],
|
||||||
|
|
||||||
|
# `breakpoint()` from `asyncio.Task`.
|
||||||
|
'asyncio-side': [
|
||||||
|
_pause_msg,
|
||||||
|
"<Task pending name='Task-2' coro=<greenback_shim()",
|
||||||
|
"('aio_daemon'",
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
while attach_patts:
|
||||||
|
expect_any_of(
|
||||||
|
attach_patts=attach_patts,
|
||||||
|
child=child,
|
||||||
|
ctlc=ctlc,
|
||||||
|
)
|
||||||
|
child.sendline('c')
|
||||||
|
|
||||||
|
# NOW in race order,
|
||||||
|
# - the asyncio-task will error
|
||||||
|
# - the root-actor parent task will pause
|
||||||
|
#
|
||||||
|
attach_patts: dict[str, list[str]] = {
|
||||||
|
|
||||||
|
# error raised in `asyncio.Task`
|
||||||
|
"raise ValueError('asyncio side error!')": [
|
||||||
|
_crash_msg,
|
||||||
|
"<Task 'trio_ctx'",
|
||||||
|
"@ ('aio_daemon'",
|
||||||
|
"ValueError: asyncio side error!",
|
||||||
|
|
||||||
|
# XXX, we no longer show this frame by default!
|
||||||
|
# 'return await chan.receive()', # `.to_asyncio` impl internals in tb
|
||||||
|
],
|
||||||
|
|
||||||
|
# parent-side propagation via actor-nursery/portal
|
||||||
|
# "tractor._exceptions.RemoteActorError: remote task raised a 'ValueError'": [
|
||||||
|
"remote task raised a 'ValueError'": [
|
||||||
|
_crash_msg,
|
||||||
|
"src_uid=('aio_daemon'",
|
||||||
|
"('aio_daemon'",
|
||||||
|
],
|
||||||
|
|
||||||
|
# a final pause in root-actor
|
||||||
|
"<Task '__main__.main'": [
|
||||||
|
_pause_msg,
|
||||||
|
"<Task '__main__.main'",
|
||||||
|
"('root'",
|
||||||
|
],
|
||||||
|
}
|
||||||
|
while attach_patts:
|
||||||
|
expect_any_of(
|
||||||
|
attach_patts=attach_patts,
|
||||||
|
child=child,
|
||||||
|
ctlc=ctlc,
|
||||||
|
)
|
||||||
|
child.sendline('c')
|
||||||
|
|
||||||
|
assert not attach_patts
|
||||||
|
|
||||||
|
# final boxed error propagates to root
|
||||||
|
assert_before(
|
||||||
|
child,
|
||||||
|
[
|
||||||
|
_crash_msg,
|
||||||
|
"<Task '__main__.main'",
|
||||||
|
"('root'",
|
||||||
|
"remote task raised a 'ValueError'",
|
||||||
|
"ValueError: asyncio side error!",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
if ctlc:
|
||||||
|
do_ctlc(
|
||||||
|
child,
|
||||||
|
# NOTE: setting this to 0 (or some other sufficient
|
||||||
|
# small val) can cause the test to fail since the
|
||||||
|
# `subactor` suffers a race where the root/parent
|
||||||
|
# sends an actor-cancel prior to it hitting its pause
|
||||||
|
# point; by def the value is 0.1
|
||||||
|
delay=0.4,
|
||||||
|
)
|
||||||
|
|
||||||
|
child.sendline('c')
|
||||||
|
# with maybe_expect_timeout():
|
||||||
|
child.expect(EOF)
|
||||||
|
|
||||||
|
|
||||||
|
def test_sync_pause_from_non_greenbacked_aio_task():
|
||||||
|
'''
|
||||||
|
Where the `breakpoint()` caller task is NOT spawned by
|
||||||
|
`tractor.to_asyncio` and thus never activates
|
||||||
|
a `greenback.ensure_portal()` beforehand, presumably bc the task
|
||||||
|
was started by some lib/dep as in often seen in the field.
|
||||||
|
|
||||||
|
Ensure sync pausing works when the pause is in,
|
||||||
|
|
||||||
|
- the root actor running in infected-mode?
|
||||||
|
|_ since we don't need any IPC to acquire the debug lock?
|
||||||
|
|_ is there some way to handle this like the non-main-thread case?
|
||||||
|
|
||||||
|
All other cases need to error out appropriately right?
|
||||||
|
|
||||||
|
- for any subactor we can't avoid needing the repl lock..
|
||||||
|
|_ is there a way to hook into `asyncio.ensure_future(obj)`?
|
||||||
|
|
||||||
|
'''
|
||||||
|
pass
|
||||||
|
|
@ -0,0 +1,306 @@
|
||||||
|
'''
|
||||||
|
That "native" runtime-hackin toolset better be dang useful!
|
||||||
|
|
||||||
|
Verify the funtion of a variety of "developer-experience" tools we
|
||||||
|
offer from the `.devx` sub-pkg:
|
||||||
|
|
||||||
|
- use of the lovely `stackscope` for dumping actor `trio`-task trees
|
||||||
|
during operation and hangs.
|
||||||
|
|
||||||
|
TODO:
|
||||||
|
- demonstration of `CallerInfo` call stack frame filtering such that
|
||||||
|
for logging and REPL purposes a user sees exactly the layers needed
|
||||||
|
when debugging a problem inside the stack vs. in their app.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from __future__ import annotations
|
||||||
|
from contextlib import (
|
||||||
|
contextmanager as cm,
|
||||||
|
)
|
||||||
|
import os
|
||||||
|
import signal
|
||||||
|
import time
|
||||||
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .conftest import (
|
||||||
|
expect,
|
||||||
|
assert_before,
|
||||||
|
in_prompt_msg,
|
||||||
|
PROMPT,
|
||||||
|
_pause_msg,
|
||||||
|
)
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from pexpect.exceptions import (
|
||||||
|
# TIMEOUT,
|
||||||
|
EOF,
|
||||||
|
)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ..conftest import PexpectSpawner
|
||||||
|
|
||||||
|
|
||||||
|
def test_shield_pause(
|
||||||
|
spawn: PexpectSpawner,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Verify the `tractor.pause()/.post_mortem()` API works inside an
|
||||||
|
already cancelled `trio.CancelScope` and that you can step to the
|
||||||
|
next checkpoint wherein the cancelled will get raised.
|
||||||
|
|
||||||
|
'''
|
||||||
|
child = spawn(
|
||||||
|
'shield_hang_in_sub'
|
||||||
|
)
|
||||||
|
expect(
|
||||||
|
child,
|
||||||
|
'Yo my child hanging..?',
|
||||||
|
)
|
||||||
|
assert_before(
|
||||||
|
child,
|
||||||
|
[
|
||||||
|
'Entering shield sleep..',
|
||||||
|
'Enabling trace-trees on `SIGUSR1` since `stackscope` is installed @',
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
script_pid: int = child.pid
|
||||||
|
print(
|
||||||
|
f'Sending SIGUSR1 to {script_pid}\n'
|
||||||
|
f'(kill -s SIGUSR1 {script_pid})\n'
|
||||||
|
)
|
||||||
|
os.kill(
|
||||||
|
script_pid,
|
||||||
|
signal.SIGUSR1,
|
||||||
|
)
|
||||||
|
time.sleep(0.2)
|
||||||
|
expect(
|
||||||
|
child,
|
||||||
|
# end-of-tree delimiter
|
||||||
|
"end-of-\('root'",
|
||||||
|
)
|
||||||
|
assert_before(
|
||||||
|
child,
|
||||||
|
[
|
||||||
|
# 'Srying to dump `stackscope` tree..',
|
||||||
|
# 'Dumping `stackscope` tree for actor',
|
||||||
|
"('root'", # uid line
|
||||||
|
|
||||||
|
# TODO!? this used to show?
|
||||||
|
# -[ ] mk reproducable for @oremanj?
|
||||||
|
#
|
||||||
|
# parent block point (non-shielded)
|
||||||
|
# 'await trio.sleep_forever() # in root',
|
||||||
|
]
|
||||||
|
)
|
||||||
|
expect(
|
||||||
|
child,
|
||||||
|
# end-of-tree delimiter
|
||||||
|
"end-of-\('hanger'",
|
||||||
|
)
|
||||||
|
assert_before(
|
||||||
|
child,
|
||||||
|
[
|
||||||
|
# relay to the sub should be reported
|
||||||
|
'Relaying `SIGUSR1`[10] to sub-actor',
|
||||||
|
|
||||||
|
"('hanger'", # uid line
|
||||||
|
|
||||||
|
# TODO!? SEE ABOVE
|
||||||
|
# hanger LOC where it's shield-halted
|
||||||
|
# 'await trio.sleep_forever() # in subactor',
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# simulate the user sending a ctl-c to the hanging program.
|
||||||
|
# this should result in the terminator kicking in since
|
||||||
|
# the sub is shield blocking and can't respond to SIGINT.
|
||||||
|
os.kill(
|
||||||
|
child.pid,
|
||||||
|
signal.SIGINT,
|
||||||
|
)
|
||||||
|
from tractor._supervise import _shutdown_msg
|
||||||
|
expect(
|
||||||
|
child,
|
||||||
|
# 'Shutting down actor runtime',
|
||||||
|
_shutdown_msg,
|
||||||
|
timeout=6,
|
||||||
|
)
|
||||||
|
assert_before(
|
||||||
|
child,
|
||||||
|
[
|
||||||
|
'raise KeyboardInterrupt',
|
||||||
|
# 'Shutting down actor runtime',
|
||||||
|
'#T-800 deployed to collect zombie B0',
|
||||||
|
"'--uid', \"('hanger',",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_breakpoint_hook_restored(
|
||||||
|
spawn: PexpectSpawner,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Ensures our actor runtime sets a custom `breakpoint()` hook
|
||||||
|
on open then restores the stdlib's default on close.
|
||||||
|
|
||||||
|
The hook state validation is done via `assert`s inside the
|
||||||
|
invoked script with only `breakpoint()` (not `tractor.pause()`)
|
||||||
|
calls used.
|
||||||
|
|
||||||
|
'''
|
||||||
|
child = spawn('restore_builtin_breakpoint')
|
||||||
|
|
||||||
|
child.expect(PROMPT)
|
||||||
|
try:
|
||||||
|
assert_before(
|
||||||
|
child,
|
||||||
|
[
|
||||||
|
_pause_msg,
|
||||||
|
"<Task '__main__.main'",
|
||||||
|
"('root'",
|
||||||
|
"first bp, tractor hook set",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
# XXX if the above raises `AssertionError`, without sending
|
||||||
|
# the final 'continue' cmd to the REPL-active sub-process,
|
||||||
|
# we'll hang waiting for that pexpect instance to terminate..
|
||||||
|
finally:
|
||||||
|
child.sendline('c')
|
||||||
|
|
||||||
|
child.expect(PROMPT)
|
||||||
|
assert_before(
|
||||||
|
child,
|
||||||
|
[
|
||||||
|
"last bp, stdlib hook restored",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# since the stdlib hook was already restored there should be NO
|
||||||
|
# `tractor` `log.pdb()` content from console!
|
||||||
|
assert not in_prompt_msg(
|
||||||
|
child,
|
||||||
|
[
|
||||||
|
_pause_msg,
|
||||||
|
"<Task '__main__.main'",
|
||||||
|
"('root'",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
child.sendline('c')
|
||||||
|
child.expect(EOF)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_to_raise = Exception('Triggering a crash')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'to_raise',
|
||||||
|
[
|
||||||
|
None,
|
||||||
|
_to_raise,
|
||||||
|
RuntimeError('Never crash handle this!'),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'raise_on_exit',
|
||||||
|
[
|
||||||
|
True,
|
||||||
|
[type(_to_raise)],
|
||||||
|
False,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_crash_handler_cms(
|
||||||
|
debug_mode: bool,
|
||||||
|
to_raise: Exception,
|
||||||
|
raise_on_exit: bool|list[Exception],
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Verify the `.devx.open_crash_handler()` API(s) by also
|
||||||
|
(conveniently enough) tesing its `repl_fixture: ContextManager`
|
||||||
|
param support which for this suite allows use to avoid use of
|
||||||
|
a `pexpect`-style-test since we use the fixture to avoid actually
|
||||||
|
entering `PdbpREPL.iteract()` :smirk:
|
||||||
|
|
||||||
|
'''
|
||||||
|
import tractor
|
||||||
|
# import trio
|
||||||
|
|
||||||
|
# state flags
|
||||||
|
repl_acquired: bool = False
|
||||||
|
repl_released: bool = False
|
||||||
|
|
||||||
|
@cm
|
||||||
|
def block_repl_ux(
|
||||||
|
repl: tractor.devx.debug.PdbREPL,
|
||||||
|
maybe_bxerr: (
|
||||||
|
tractor.devx._debug.BoxedMaybeException
|
||||||
|
|None
|
||||||
|
) = None,
|
||||||
|
enter_repl: bool = True,
|
||||||
|
|
||||||
|
) -> bool:
|
||||||
|
'''
|
||||||
|
Set pre/post-REPL state vars and bypass actual conole
|
||||||
|
interaction.
|
||||||
|
|
||||||
|
'''
|
||||||
|
nonlocal repl_acquired, repl_released
|
||||||
|
|
||||||
|
# task: trio.Task = trio.lowlevel.current_task()
|
||||||
|
# print(f'pre-REPL active_task={task.name}')
|
||||||
|
|
||||||
|
print('pre-REPL')
|
||||||
|
repl_acquired = True
|
||||||
|
yield False # never actually .interact()
|
||||||
|
print('post-REPL')
|
||||||
|
repl_released = True
|
||||||
|
|
||||||
|
try:
|
||||||
|
# TODO, with runtime's `debug_mode` setting
|
||||||
|
# -[ ] need to open runtime tho obvi..
|
||||||
|
#
|
||||||
|
# with tractor.devx.maybe_open_crash_handler(
|
||||||
|
# pdb=True,
|
||||||
|
|
||||||
|
with tractor.devx.open_crash_handler(
|
||||||
|
raise_on_exit=raise_on_exit,
|
||||||
|
repl_fixture=block_repl_ux
|
||||||
|
) as bxerr:
|
||||||
|
if to_raise is not None:
|
||||||
|
raise to_raise
|
||||||
|
|
||||||
|
except Exception as _exc:
|
||||||
|
exc = _exc
|
||||||
|
if (
|
||||||
|
raise_on_exit is True
|
||||||
|
or
|
||||||
|
type(to_raise) in raise_on_exit
|
||||||
|
):
|
||||||
|
assert (
|
||||||
|
exc
|
||||||
|
is
|
||||||
|
to_raise
|
||||||
|
is
|
||||||
|
bxerr.value
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
assert (
|
||||||
|
to_raise is None
|
||||||
|
or
|
||||||
|
not raise_on_exit
|
||||||
|
or
|
||||||
|
type(to_raise) not in raise_on_exit
|
||||||
|
)
|
||||||
|
assert bxerr.value is to_raise
|
||||||
|
|
||||||
|
assert bxerr.raise_on_exit == raise_on_exit
|
||||||
|
|
||||||
|
if to_raise is not None:
|
||||||
|
assert repl_acquired
|
||||||
|
assert repl_released
|
||||||
|
|
@ -0,0 +1,4 @@
|
||||||
|
'''
|
||||||
|
`tractor.ipc` subsystem(s)/unit testing suites.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
@ -0,0 +1,114 @@
|
||||||
|
'''
|
||||||
|
Unit-ish tests for specific IPC transport protocol backends.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from __future__ import annotations
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import trio
|
||||||
|
import tractor
|
||||||
|
from tractor import (
|
||||||
|
Actor,
|
||||||
|
_state,
|
||||||
|
_addr,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def bindspace_dir_str() -> str:
|
||||||
|
|
||||||
|
rt_dir: Path = tractor._state.get_rt_dir()
|
||||||
|
bs_dir: Path = rt_dir / 'doggy'
|
||||||
|
bs_dir_str: str = str(bs_dir)
|
||||||
|
assert not bs_dir.is_dir()
|
||||||
|
|
||||||
|
yield bs_dir_str
|
||||||
|
|
||||||
|
# delete it on suite teardown.
|
||||||
|
# ?TODO? should we support this internally
|
||||||
|
# or is leaking it ok?
|
||||||
|
if bs_dir.is_dir():
|
||||||
|
bs_dir.rmdir()
|
||||||
|
|
||||||
|
|
||||||
|
def test_uds_bindspace_created_implicitly(
|
||||||
|
debug_mode: bool,
|
||||||
|
bindspace_dir_str: str,
|
||||||
|
):
|
||||||
|
registry_addr: tuple = (
|
||||||
|
f'{bindspace_dir_str}',
|
||||||
|
'registry@doggy.sock',
|
||||||
|
)
|
||||||
|
bs_dir_str: str = registry_addr[0]
|
||||||
|
|
||||||
|
# XXX, ensure bindspace-dir DNE beforehand!
|
||||||
|
assert not Path(bs_dir_str).is_dir()
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
async with tractor.open_nursery(
|
||||||
|
enable_transports=['uds'],
|
||||||
|
registry_addrs=[registry_addr],
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
) as _an:
|
||||||
|
|
||||||
|
# XXX MUST be created implicitly by
|
||||||
|
# `.ipc._uds.start_listener()`!
|
||||||
|
assert Path(bs_dir_str).is_dir()
|
||||||
|
|
||||||
|
root: Actor = tractor.current_actor()
|
||||||
|
assert root.is_registrar
|
||||||
|
|
||||||
|
assert registry_addr in root.reg_addrs
|
||||||
|
assert (
|
||||||
|
registry_addr
|
||||||
|
in
|
||||||
|
_state._runtime_vars['_registry_addrs']
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
_addr.wrap_address(registry_addr)
|
||||||
|
in
|
||||||
|
root.registry_addrs
|
||||||
|
)
|
||||||
|
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
|
def test_uds_double_listen_raises_connerr(
|
||||||
|
debug_mode: bool,
|
||||||
|
bindspace_dir_str: str,
|
||||||
|
):
|
||||||
|
registry_addr: tuple = (
|
||||||
|
f'{bindspace_dir_str}',
|
||||||
|
'registry@doggy.sock',
|
||||||
|
)
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
async with tractor.open_nursery(
|
||||||
|
enable_transports=['uds'],
|
||||||
|
registry_addrs=[registry_addr],
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
) as _an:
|
||||||
|
|
||||||
|
# runtime up
|
||||||
|
root: Actor = tractor.current_actor()
|
||||||
|
|
||||||
|
from tractor.ipc._uds import (
|
||||||
|
start_listener,
|
||||||
|
UDSAddress,
|
||||||
|
)
|
||||||
|
ya_bound_addr: UDSAddress = root.registry_addrs[0]
|
||||||
|
try:
|
||||||
|
await start_listener(
|
||||||
|
addr=ya_bound_addr,
|
||||||
|
)
|
||||||
|
except ConnectionError as connerr:
|
||||||
|
assert type(src_exc := connerr.__context__) is OSError
|
||||||
|
assert 'Address already in use' in src_exc.args
|
||||||
|
# complete, exit test.
|
||||||
|
|
||||||
|
else:
|
||||||
|
pytest.fail('It dint raise a connerr !?')
|
||||||
|
|
||||||
|
|
||||||
|
trio.run(main)
|
||||||
|
|
@ -0,0 +1,95 @@
|
||||||
|
'''
|
||||||
|
Verify the `enable_transports` param drives various
|
||||||
|
per-root/sub-actor IPC endpoint/server settings.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import trio
|
||||||
|
import tractor
|
||||||
|
from tractor import (
|
||||||
|
Actor,
|
||||||
|
Portal,
|
||||||
|
ipc,
|
||||||
|
msg,
|
||||||
|
_state,
|
||||||
|
_addr,
|
||||||
|
)
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def chk_tpts(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
tpt_proto_key: str,
|
||||||
|
):
|
||||||
|
rtvars = _state._runtime_vars
|
||||||
|
assert (
|
||||||
|
tpt_proto_key
|
||||||
|
in
|
||||||
|
rtvars['_enable_tpts']
|
||||||
|
)
|
||||||
|
actor: Actor = tractor.current_actor()
|
||||||
|
spec: msg.types.SpawnSpec = actor._spawn_spec
|
||||||
|
assert spec._runtime_vars == rtvars
|
||||||
|
|
||||||
|
# ensure individual IPC ep-addr types
|
||||||
|
serv: ipc._server.Server = actor.ipc_server
|
||||||
|
addr: ipc._types.Address
|
||||||
|
for addr in serv.addrs:
|
||||||
|
assert addr.proto_key == tpt_proto_key
|
||||||
|
|
||||||
|
# Actor delegate-props enforcement
|
||||||
|
assert (
|
||||||
|
actor.accept_addrs
|
||||||
|
==
|
||||||
|
serv.accept_addrs
|
||||||
|
)
|
||||||
|
|
||||||
|
await ctx.started(serv.accept_addrs)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO, parametrize over mis-matched-proto-typed `registry_addrs`
|
||||||
|
# since i seems to work in `piker` but not exactly sure if both tcp
|
||||||
|
# & uds are being deployed then?
|
||||||
|
#
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'tpt_proto_key',
|
||||||
|
['tcp', 'uds'],
|
||||||
|
ids=lambda item: f'ipc_tpt={item!r}'
|
||||||
|
)
|
||||||
|
def test_root_passes_tpt_to_sub(
|
||||||
|
tpt_proto_key: str,
|
||||||
|
reg_addr: tuple,
|
||||||
|
debug_mode: bool,
|
||||||
|
):
|
||||||
|
async def main():
|
||||||
|
async with tractor.open_nursery(
|
||||||
|
enable_transports=[tpt_proto_key],
|
||||||
|
registry_addrs=[reg_addr],
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
) as an:
|
||||||
|
|
||||||
|
assert (
|
||||||
|
tpt_proto_key
|
||||||
|
in
|
||||||
|
_state._runtime_vars['_enable_tpts']
|
||||||
|
)
|
||||||
|
|
||||||
|
ptl: Portal = await an.start_actor(
|
||||||
|
name='sub',
|
||||||
|
enable_modules=[__name__],
|
||||||
|
)
|
||||||
|
async with ptl.open_context(
|
||||||
|
chk_tpts,
|
||||||
|
tpt_proto_key=tpt_proto_key,
|
||||||
|
) as (ctx, accept_addrs):
|
||||||
|
|
||||||
|
uw_addr: tuple
|
||||||
|
for uw_addr in accept_addrs:
|
||||||
|
addr = _addr.wrap_address(uw_addr)
|
||||||
|
assert addr.is_valid
|
||||||
|
|
||||||
|
# shudown sub-actor(s)
|
||||||
|
await an.cancel()
|
||||||
|
|
||||||
|
trio.run(main)
|
||||||
|
|
@ -0,0 +1,72 @@
|
||||||
|
'''
|
||||||
|
High-level `.ipc._server` unit tests.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import trio
|
||||||
|
from tractor import (
|
||||||
|
devx,
|
||||||
|
ipc,
|
||||||
|
log,
|
||||||
|
)
|
||||||
|
from tractor._testing.addr import (
|
||||||
|
get_rando_addr,
|
||||||
|
)
|
||||||
|
# TODO, use/check-roundtripping with some of these wrapper types?
|
||||||
|
#
|
||||||
|
# from .._addr import Address
|
||||||
|
# from ._chan import Channel
|
||||||
|
# from ._transport import MsgTransport
|
||||||
|
# from ._uds import UDSAddress
|
||||||
|
# from ._tcp import TCPAddress
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'_tpt_proto',
|
||||||
|
['uds', 'tcp']
|
||||||
|
)
|
||||||
|
def test_basic_ipc_server(
|
||||||
|
_tpt_proto: str,
|
||||||
|
debug_mode: bool,
|
||||||
|
loglevel: str,
|
||||||
|
):
|
||||||
|
|
||||||
|
# so we see the socket-listener reporting on console
|
||||||
|
log.get_console_log("INFO")
|
||||||
|
|
||||||
|
rando_addr: tuple = get_rando_addr(
|
||||||
|
tpt_proto=_tpt_proto,
|
||||||
|
)
|
||||||
|
async def main():
|
||||||
|
async with ipc._server.open_ipc_server() as server:
|
||||||
|
|
||||||
|
assert (
|
||||||
|
server._parent_tn
|
||||||
|
and
|
||||||
|
server._parent_tn is server._stream_handler_tn
|
||||||
|
)
|
||||||
|
assert server._no_more_peers.is_set()
|
||||||
|
|
||||||
|
eps: list[ipc._server.Endpoint] = await server.listen_on(
|
||||||
|
accept_addrs=[rando_addr],
|
||||||
|
stream_handler_nursery=None,
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
len(eps) == 1
|
||||||
|
and
|
||||||
|
(ep := eps[0])._listener
|
||||||
|
and
|
||||||
|
not ep.peer_tpts
|
||||||
|
)
|
||||||
|
|
||||||
|
server._parent_tn.cancel_scope.cancel()
|
||||||
|
|
||||||
|
# !TODO! actually make a bg-task connection from a client
|
||||||
|
# using `ipc._chan._connect_chan()`
|
||||||
|
|
||||||
|
with devx.maybe_open_crash_handler(
|
||||||
|
pdb=debug_mode,
|
||||||
|
):
|
||||||
|
trio.run(main)
|
||||||
|
|
@ -0,0 +1,309 @@
|
||||||
|
'''
|
||||||
|
Sketchy network blackoutz, ugly byzantine gens, puedes eschuchar la
|
||||||
|
cancelacion?..
|
||||||
|
|
||||||
|
'''
|
||||||
|
from functools import partial
|
||||||
|
from types import ModuleType
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from _pytest.pathlib import import_path
|
||||||
|
import trio
|
||||||
|
import tractor
|
||||||
|
from tractor import (
|
||||||
|
TransportClosed,
|
||||||
|
)
|
||||||
|
from tractor._testing import (
|
||||||
|
examples_dir,
|
||||||
|
break_ipc,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'pre_aclose_msgstream',
|
||||||
|
[
|
||||||
|
False,
|
||||||
|
True,
|
||||||
|
],
|
||||||
|
ids=[
|
||||||
|
'no_msgstream_aclose',
|
||||||
|
'pre_aclose_msgstream',
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'ipc_break',
|
||||||
|
[
|
||||||
|
# no breaks
|
||||||
|
{
|
||||||
|
'break_parent_ipc_after': False,
|
||||||
|
'break_child_ipc_after': False,
|
||||||
|
},
|
||||||
|
|
||||||
|
# only parent breaks
|
||||||
|
{
|
||||||
|
'break_parent_ipc_after': 500,
|
||||||
|
'break_child_ipc_after': False,
|
||||||
|
},
|
||||||
|
|
||||||
|
# only child breaks
|
||||||
|
{
|
||||||
|
'break_parent_ipc_after': False,
|
||||||
|
'break_child_ipc_after': 500,
|
||||||
|
},
|
||||||
|
|
||||||
|
# both: break parent first
|
||||||
|
{
|
||||||
|
'break_parent_ipc_after': 500,
|
||||||
|
'break_child_ipc_after': 800,
|
||||||
|
},
|
||||||
|
# both: break child first
|
||||||
|
{
|
||||||
|
'break_parent_ipc_after': 800,
|
||||||
|
'break_child_ipc_after': 500,
|
||||||
|
},
|
||||||
|
|
||||||
|
],
|
||||||
|
ids=[
|
||||||
|
'no_break',
|
||||||
|
'break_parent',
|
||||||
|
'break_child',
|
||||||
|
'break_both_parent_first',
|
||||||
|
'break_both_child_first',
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_ipc_channel_break_during_stream(
|
||||||
|
debug_mode: bool,
|
||||||
|
loglevel: str,
|
||||||
|
spawn_backend: str,
|
||||||
|
ipc_break: dict|None,
|
||||||
|
pre_aclose_msgstream: bool,
|
||||||
|
tpt_proto: str,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Ensure we can have an IPC channel break its connection during
|
||||||
|
streaming and it's still possible for the (simulated) user to kill
|
||||||
|
the actor tree using SIGINT.
|
||||||
|
|
||||||
|
We also verify the type of connection error expected in the parent
|
||||||
|
depending on which side if the IPC breaks first.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if spawn_backend != 'trio':
|
||||||
|
if debug_mode:
|
||||||
|
pytest.skip('`debug_mode` only supported on `trio` spawner')
|
||||||
|
|
||||||
|
# non-`trio` spawners should never hit the hang condition that
|
||||||
|
# requires the user to do ctl-c to cancel the actor tree.
|
||||||
|
# expect_final_exc = trio.ClosedResourceError
|
||||||
|
expect_final_exc = TransportClosed
|
||||||
|
|
||||||
|
mod: ModuleType = import_path(
|
||||||
|
examples_dir() / 'advanced_faults'
|
||||||
|
/ 'ipc_failure_during_stream.py',
|
||||||
|
root=examples_dir(),
|
||||||
|
consider_namespace_packages=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# by def we expect KBI from user after a simulated "hang
|
||||||
|
# period" wherein the user eventually hits ctl-c to kill the
|
||||||
|
# root-actor tree.
|
||||||
|
expect_final_exc: BaseException = KeyboardInterrupt
|
||||||
|
expect_final_cause: BaseException|None = None
|
||||||
|
|
||||||
|
if (
|
||||||
|
# only expect EoC if trans is broken on the child side,
|
||||||
|
ipc_break['break_child_ipc_after'] is not False
|
||||||
|
# AND we tell the child to call `MsgStream.aclose()`.
|
||||||
|
and pre_aclose_msgstream
|
||||||
|
):
|
||||||
|
# expect_final_exc = trio.EndOfChannel
|
||||||
|
# ^XXX NOPE! XXX^ since now `.open_stream()` absorbs this
|
||||||
|
# gracefully!
|
||||||
|
expect_final_exc = KeyboardInterrupt
|
||||||
|
|
||||||
|
# NOTE when ONLY the child breaks or it breaks BEFORE the
|
||||||
|
# parent we expect the parent to get a closed resource error
|
||||||
|
# on the next `MsgStream.receive()` and then fail out and
|
||||||
|
# cancel the child from there.
|
||||||
|
#
|
||||||
|
# ONLY CHILD breaks
|
||||||
|
if (
|
||||||
|
ipc_break['break_child_ipc_after']
|
||||||
|
and
|
||||||
|
ipc_break['break_parent_ipc_after'] is False
|
||||||
|
):
|
||||||
|
# NOTE: we DO NOT expect this any more since
|
||||||
|
# the child side's channel will be broken silently
|
||||||
|
# and nothing on the parent side will indicate this!
|
||||||
|
# expect_final_exc = trio.ClosedResourceError
|
||||||
|
|
||||||
|
# NOTE: child will send a 'stop' msg before it breaks
|
||||||
|
# the transport channel BUT, that will be absorbed by the
|
||||||
|
# `ctx.open_stream()` block and thus the `.open_context()`
|
||||||
|
# should hang, after which the test script simulates
|
||||||
|
# a user sending ctl-c by raising a KBI.
|
||||||
|
if pre_aclose_msgstream:
|
||||||
|
expect_final_exc = KeyboardInterrupt
|
||||||
|
if tpt_proto == 'uds':
|
||||||
|
expect_final_exc = TransportClosed
|
||||||
|
expect_final_cause = trio.BrokenResourceError
|
||||||
|
|
||||||
|
# XXX OLD XXX
|
||||||
|
# if child calls `MsgStream.aclose()` then expect EoC.
|
||||||
|
# ^ XXX not any more ^ since eoc is always absorbed
|
||||||
|
# gracefully and NOT bubbled to the `.open_context()`
|
||||||
|
# block!
|
||||||
|
# expect_final_exc = trio.EndOfChannel
|
||||||
|
|
||||||
|
# BOTH but, CHILD breaks FIRST
|
||||||
|
elif (
|
||||||
|
ipc_break['break_child_ipc_after'] is not False
|
||||||
|
and (
|
||||||
|
ipc_break['break_parent_ipc_after']
|
||||||
|
> ipc_break['break_child_ipc_after']
|
||||||
|
)
|
||||||
|
):
|
||||||
|
if pre_aclose_msgstream:
|
||||||
|
expect_final_exc = KeyboardInterrupt
|
||||||
|
|
||||||
|
if tpt_proto == 'uds':
|
||||||
|
expect_final_exc = TransportClosed
|
||||||
|
expect_final_cause = trio.BrokenResourceError
|
||||||
|
|
||||||
|
# NOTE when the parent IPC side dies (even if the child does as well
|
||||||
|
# but the child fails BEFORE the parent) we always expect the
|
||||||
|
# IPC layer to raise a closed-resource, NEVER do we expect
|
||||||
|
# a stop msg since the parent-side ctx apis will error out
|
||||||
|
# IMMEDIATELY before the child ever sends any 'stop' msg.
|
||||||
|
#
|
||||||
|
# ONLY PARENT breaks
|
||||||
|
elif (
|
||||||
|
ipc_break['break_parent_ipc_after']
|
||||||
|
and
|
||||||
|
ipc_break['break_child_ipc_after'] is False
|
||||||
|
):
|
||||||
|
expect_final_exc = tractor.TransportClosed
|
||||||
|
expect_final_cause = trio.ClosedResourceError
|
||||||
|
|
||||||
|
# BOTH but, PARENT breaks FIRST
|
||||||
|
elif (
|
||||||
|
ipc_break['break_parent_ipc_after'] is not False
|
||||||
|
and (
|
||||||
|
ipc_break['break_child_ipc_after']
|
||||||
|
>
|
||||||
|
ipc_break['break_parent_ipc_after']
|
||||||
|
)
|
||||||
|
):
|
||||||
|
expect_final_exc = tractor.TransportClosed
|
||||||
|
expect_final_cause = trio.ClosedResourceError
|
||||||
|
|
||||||
|
with pytest.raises(
|
||||||
|
expected_exception=(
|
||||||
|
expect_final_exc,
|
||||||
|
ExceptionGroup,
|
||||||
|
),
|
||||||
|
) as excinfo:
|
||||||
|
try:
|
||||||
|
trio.run(
|
||||||
|
partial(
|
||||||
|
mod.main,
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
start_method=spawn_backend,
|
||||||
|
loglevel=loglevel,
|
||||||
|
pre_close=pre_aclose_msgstream,
|
||||||
|
tpt_proto=tpt_proto,
|
||||||
|
**ipc_break,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except KeyboardInterrupt as _kbi:
|
||||||
|
kbi = _kbi
|
||||||
|
if expect_final_exc is not KeyboardInterrupt:
|
||||||
|
pytest.fail(
|
||||||
|
'Rxed unexpected KBI !?\n'
|
||||||
|
f'{repr(kbi)}'
|
||||||
|
)
|
||||||
|
|
||||||
|
raise
|
||||||
|
|
||||||
|
except tractor.TransportClosed as _tc:
|
||||||
|
tc = _tc
|
||||||
|
if expect_final_exc is KeyboardInterrupt:
|
||||||
|
pytest.fail(
|
||||||
|
'Unexpected transport failure !?\n'
|
||||||
|
f'{repr(tc)}'
|
||||||
|
)
|
||||||
|
cause: Exception = tc.__cause__
|
||||||
|
assert (
|
||||||
|
# type(cause) is trio.ClosedResourceError
|
||||||
|
type(cause) is expect_final_cause
|
||||||
|
|
||||||
|
# TODO, should we expect a certain exc-message (per
|
||||||
|
# tpt) as well??
|
||||||
|
# and
|
||||||
|
# cause.args[0] == 'another task closed this fd'
|
||||||
|
)
|
||||||
|
|
||||||
|
raise
|
||||||
|
|
||||||
|
# get raw instance from pytest wrapper
|
||||||
|
value = excinfo.value
|
||||||
|
if isinstance(value, ExceptionGroup):
|
||||||
|
excs = value.exceptions
|
||||||
|
assert len(excs) == 1
|
||||||
|
final_exc = excs[0]
|
||||||
|
assert isinstance(final_exc, expect_final_exc)
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def break_ipc_after_started(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
) -> None:
|
||||||
|
await ctx.started()
|
||||||
|
async with ctx.open_stream() as stream:
|
||||||
|
|
||||||
|
# TODO: make a test which verifies the error
|
||||||
|
# for this, i.e. raises a `MsgTypeError`
|
||||||
|
# await ctx.chan.send(None)
|
||||||
|
|
||||||
|
await break_ipc(
|
||||||
|
stream=stream,
|
||||||
|
pre_close=True,
|
||||||
|
)
|
||||||
|
print('child broke IPC and terminating')
|
||||||
|
|
||||||
|
|
||||||
|
def test_stream_closed_right_after_ipc_break_and_zombie_lord_engages():
|
||||||
|
'''
|
||||||
|
Verify that is a subactor's IPC goes down just after bringing up
|
||||||
|
a stream the parent can trigger a SIGINT and the child will be
|
||||||
|
reaped out-of-IPC by the localhost process supervision machinery:
|
||||||
|
aka "zombie lord".
|
||||||
|
|
||||||
|
'''
|
||||||
|
async def main():
|
||||||
|
with trio.fail_after(3):
|
||||||
|
async with tractor.open_nursery() as an:
|
||||||
|
portal = await an.start_actor(
|
||||||
|
'ipc_breaker',
|
||||||
|
enable_modules=[__name__],
|
||||||
|
)
|
||||||
|
|
||||||
|
with trio.move_on_after(1):
|
||||||
|
async with (
|
||||||
|
portal.open_context(
|
||||||
|
break_ipc_after_started
|
||||||
|
) as (ctx, sent),
|
||||||
|
):
|
||||||
|
async with ctx.open_stream():
|
||||||
|
await trio.sleep(0.5)
|
||||||
|
|
||||||
|
print('parent waiting on context')
|
||||||
|
|
||||||
|
print(
|
||||||
|
'parent exited context\n'
|
||||||
|
'parent raising KBI..\n'
|
||||||
|
)
|
||||||
|
raise KeyboardInterrupt
|
||||||
|
|
||||||
|
with pytest.raises(KeyboardInterrupt):
|
||||||
|
trio.run(main)
|
||||||
|
|
@ -6,6 +6,7 @@ from collections import Counter
|
||||||
import itertools
|
import itertools
|
||||||
import platform
|
import platform
|
||||||
|
|
||||||
|
import pytest
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
|
|
@ -14,7 +15,7 @@ def is_win():
|
||||||
return platform.system() == 'Windows'
|
return platform.system() == 'Windows'
|
||||||
|
|
||||||
|
|
||||||
_registry: dict[str, set[tractor.ReceiveMsgStream]] = {
|
_registry: dict[str, set[tractor.MsgStream]] = {
|
||||||
'even': set(),
|
'even': set(),
|
||||||
'odd': set(),
|
'odd': set(),
|
||||||
}
|
}
|
||||||
|
|
@ -143,8 +144,16 @@ def test_dynamic_pub_sub():
|
||||||
|
|
||||||
try:
|
try:
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
except trio.TooSlowError:
|
except (
|
||||||
pass
|
trio.TooSlowError,
|
||||||
|
ExceptionGroup,
|
||||||
|
) as err:
|
||||||
|
if isinstance(err, ExceptionGroup):
|
||||||
|
for suberr in err.exceptions:
|
||||||
|
if isinstance(suberr, trio.TooSlowError):
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
pytest.fail('Never got a `TooSlowError` ?')
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
|
|
@ -298,44 +307,76 @@ async def inf_streamer(
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
ctx.open_stream() as stream,
|
ctx.open_stream() as stream,
|
||||||
trio.open_nursery() as n,
|
|
||||||
|
# XXX TODO, INTERESTING CASE!!
|
||||||
|
# - if we don't collapse the eg then the embedded
|
||||||
|
# `trio.EndOfChannel` doesn't propagate directly to the above
|
||||||
|
# .open_stream() parent, resulting in it also raising instead
|
||||||
|
# of gracefully absorbing as normal.. so how to handle?
|
||||||
|
tractor.trionics.collapse_eg(),
|
||||||
|
trio.open_nursery() as tn,
|
||||||
):
|
):
|
||||||
async def bail_on_sentinel():
|
async def close_stream_on_sentinel():
|
||||||
async for msg in stream:
|
async for msg in stream:
|
||||||
if msg == 'done':
|
if msg == 'done':
|
||||||
|
print(
|
||||||
|
'streamer RXed "done" sentinel msg!\n'
|
||||||
|
'CLOSING `MsgStream`!'
|
||||||
|
)
|
||||||
await stream.aclose()
|
await stream.aclose()
|
||||||
else:
|
else:
|
||||||
print(f'streamer received {msg}')
|
print(f'streamer received {msg}')
|
||||||
|
else:
|
||||||
|
print('streamer exited recv loop')
|
||||||
|
|
||||||
# start termination detector
|
# start termination detector
|
||||||
n.start_soon(bail_on_sentinel)
|
tn.start_soon(close_stream_on_sentinel)
|
||||||
|
|
||||||
for val in itertools.count():
|
cap: int = 10000 # so that we don't spin forever when bug..
|
||||||
|
for val in range(cap):
|
||||||
try:
|
try:
|
||||||
|
print(f'streamer sending {val}')
|
||||||
await stream.send(val)
|
await stream.send(val)
|
||||||
|
if val > cap:
|
||||||
|
raise RuntimeError(
|
||||||
|
'Streamer never cancelled by setinel?'
|
||||||
|
)
|
||||||
|
await trio.sleep(0.001)
|
||||||
|
|
||||||
|
# close out the stream gracefully
|
||||||
except trio.ClosedResourceError:
|
except trio.ClosedResourceError:
|
||||||
# close out the stream gracefully
|
print('transport closed on streamer side!')
|
||||||
|
assert stream.closed
|
||||||
break
|
break
|
||||||
|
else:
|
||||||
|
raise RuntimeError(
|
||||||
|
'Streamer not cancelled before finished sending?'
|
||||||
|
)
|
||||||
|
|
||||||
print('terminating streamer')
|
print('streamer exited .open_streamer() block')
|
||||||
|
|
||||||
|
|
||||||
def test_local_task_fanout_from_stream():
|
def test_local_task_fanout_from_stream(
|
||||||
|
debug_mode: bool,
|
||||||
|
):
|
||||||
'''
|
'''
|
||||||
Single stream with multiple local consumer tasks using the
|
Single stream with multiple local consumer tasks using the
|
||||||
``MsgStream.subscribe()` api.
|
``MsgStream.subscribe()` api.
|
||||||
|
|
||||||
Ensure all tasks receive all values after stream completes sending.
|
Ensure all tasks receive all values after stream completes
|
||||||
|
sending.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
consumers = 22
|
consumers: int = 22
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
|
|
||||||
counts = Counter()
|
counts = Counter()
|
||||||
|
|
||||||
async with tractor.open_nursery() as tn:
|
async with tractor.open_nursery(
|
||||||
p = await tn.start_actor(
|
debug_mode=debug_mode,
|
||||||
|
) as tn:
|
||||||
|
p: tractor.Portal = await tn.start_actor(
|
||||||
'inf_streamer',
|
'inf_streamer',
|
||||||
enable_modules=[__name__],
|
enable_modules=[__name__],
|
||||||
)
|
)
|
||||||
|
|
@ -343,7 +384,6 @@ def test_local_task_fanout_from_stream():
|
||||||
p.open_context(inf_streamer) as (ctx, _),
|
p.open_context(inf_streamer) as (ctx, _),
|
||||||
ctx.open_stream() as stream,
|
ctx.open_stream() as stream,
|
||||||
):
|
):
|
||||||
|
|
||||||
async def pull_and_count(name: str):
|
async def pull_and_count(name: str):
|
||||||
# name = trio.lowlevel.current_task().name
|
# name = trio.lowlevel.current_task().name
|
||||||
async with stream.subscribe() as recver:
|
async with stream.subscribe() as recver:
|
||||||
|
|
@ -352,7 +392,7 @@ def test_local_task_fanout_from_stream():
|
||||||
tractor.trionics.BroadcastReceiver
|
tractor.trionics.BroadcastReceiver
|
||||||
)
|
)
|
||||||
async for val in recver:
|
async for val in recver:
|
||||||
# print(f'{name}: {val}')
|
print(f'bx {name} rx: {val}')
|
||||||
counts[name] += 1
|
counts[name] += 1
|
||||||
|
|
||||||
print(f'{name} bcaster ended')
|
print(f'{name} bcaster ended')
|
||||||
|
|
@ -362,10 +402,14 @@ def test_local_task_fanout_from_stream():
|
||||||
with trio.fail_after(3):
|
with trio.fail_after(3):
|
||||||
async with trio.open_nursery() as nurse:
|
async with trio.open_nursery() as nurse:
|
||||||
for i in range(consumers):
|
for i in range(consumers):
|
||||||
nurse.start_soon(pull_and_count, i)
|
nurse.start_soon(
|
||||||
|
pull_and_count,
|
||||||
|
i,
|
||||||
|
)
|
||||||
|
|
||||||
|
# delay to let bcast consumers pull msgs
|
||||||
await trio.sleep(0.5)
|
await trio.sleep(0.5)
|
||||||
print('\nterminating')
|
print('terminating nursery of bcast rxer consumers!')
|
||||||
await stream.send('done')
|
await stream.send('done')
|
||||||
|
|
||||||
print('closed stream connection')
|
print('closed stream connection')
|
||||||
|
|
|
||||||
|
|
@ -8,15 +8,13 @@ import platform
|
||||||
import time
|
import time
|
||||||
from itertools import repeat
|
from itertools import repeat
|
||||||
|
|
||||||
from exceptiongroup import (
|
|
||||||
BaseExceptionGroup,
|
|
||||||
ExceptionGroup,
|
|
||||||
)
|
|
||||||
import pytest
|
import pytest
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
from tractor._testing import (
|
||||||
from conftest import tractor_test, no_windows
|
tractor_test,
|
||||||
|
)
|
||||||
|
from .conftest import no_windows
|
||||||
|
|
||||||
|
|
||||||
def is_win():
|
def is_win():
|
||||||
|
|
@ -47,17 +45,19 @@ async def do_nuthin():
|
||||||
],
|
],
|
||||||
ids=['no_args', 'unexpected_args'],
|
ids=['no_args', 'unexpected_args'],
|
||||||
)
|
)
|
||||||
def test_remote_error(arb_addr, args_err):
|
def test_remote_error(reg_addr, args_err):
|
||||||
"""Verify an error raised in a subactor that is propagated
|
'''
|
||||||
|
Verify an error raised in a subactor that is propagated
|
||||||
to the parent nursery, contains the underlying boxed builtin
|
to the parent nursery, contains the underlying boxed builtin
|
||||||
error type info and causes cancellation and reraising all the
|
error type info and causes cancellation and reraising all the
|
||||||
way up the stack.
|
way up the stack.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
args, errtype = args_err
|
args, errtype = args_err
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
arbiter_addr=arb_addr,
|
registry_addrs=[reg_addr],
|
||||||
) as nursery:
|
) as nursery:
|
||||||
|
|
||||||
# on a remote type error caused by bad input args
|
# on a remote type error caused by bad input args
|
||||||
|
|
@ -65,7 +65,9 @@ def test_remote_error(arb_addr, args_err):
|
||||||
# an exception group outside the nursery since the error
|
# an exception group outside the nursery since the error
|
||||||
# here and the far end task error are one in the same?
|
# here and the far end task error are one in the same?
|
||||||
portal = await nursery.run_in_actor(
|
portal = await nursery.run_in_actor(
|
||||||
assert_err, name='errorer', **args
|
assert_err,
|
||||||
|
name='errorer',
|
||||||
|
**args
|
||||||
)
|
)
|
||||||
|
|
||||||
# get result(s) from main task
|
# get result(s) from main task
|
||||||
|
|
@ -75,7 +77,7 @@ def test_remote_error(arb_addr, args_err):
|
||||||
# of this actor nursery.
|
# of this actor nursery.
|
||||||
await portal.result()
|
await portal.result()
|
||||||
except tractor.RemoteActorError as err:
|
except tractor.RemoteActorError as err:
|
||||||
assert err.type == errtype
|
assert err.boxed_type == errtype
|
||||||
print("Look Maa that actor failed hard, hehh")
|
print("Look Maa that actor failed hard, hehh")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
@ -84,20 +86,33 @@ def test_remote_error(arb_addr, args_err):
|
||||||
with pytest.raises(tractor.RemoteActorError) as excinfo:
|
with pytest.raises(tractor.RemoteActorError) as excinfo:
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
||||||
assert excinfo.value.type == errtype
|
assert excinfo.value.boxed_type == errtype
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# the root task will also error on the `.result()` call
|
# the root task will also error on the `Portal.result()`
|
||||||
# so we expect an error from there AND the child.
|
# call so we expect an error from there AND the child.
|
||||||
with pytest.raises(BaseExceptionGroup) as excinfo:
|
# |_ tho seems like on new `trio` this doesn't always
|
||||||
|
# happen?
|
||||||
|
with pytest.raises((
|
||||||
|
BaseExceptionGroup,
|
||||||
|
tractor.RemoteActorError,
|
||||||
|
)) as excinfo:
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
||||||
# ensure boxed errors
|
# ensure boxed errors are `errtype`
|
||||||
for exc in excinfo.value.exceptions:
|
err: BaseException = excinfo.value
|
||||||
assert exc.type == errtype
|
if isinstance(err, BaseExceptionGroup):
|
||||||
|
suberrs: list[BaseException] = err.exceptions
|
||||||
|
else:
|
||||||
|
suberrs: list[BaseException] = [err]
|
||||||
|
|
||||||
|
for exc in suberrs:
|
||||||
|
assert exc.boxed_type == errtype
|
||||||
|
|
||||||
|
|
||||||
def test_multierror(arb_addr):
|
def test_multierror(
|
||||||
|
reg_addr: tuple[str, int],
|
||||||
|
):
|
||||||
'''
|
'''
|
||||||
Verify we raise a ``BaseExceptionGroup`` out of a nursery where
|
Verify we raise a ``BaseExceptionGroup`` out of a nursery where
|
||||||
more then one actor errors.
|
more then one actor errors.
|
||||||
|
|
@ -105,7 +120,7 @@ def test_multierror(arb_addr):
|
||||||
'''
|
'''
|
||||||
async def main():
|
async def main():
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
arbiter_addr=arb_addr,
|
registry_addrs=[reg_addr],
|
||||||
) as nursery:
|
) as nursery:
|
||||||
|
|
||||||
await nursery.run_in_actor(assert_err, name='errorer1')
|
await nursery.run_in_actor(assert_err, name='errorer1')
|
||||||
|
|
@ -115,7 +130,7 @@ def test_multierror(arb_addr):
|
||||||
try:
|
try:
|
||||||
await portal2.result()
|
await portal2.result()
|
||||||
except tractor.RemoteActorError as err:
|
except tractor.RemoteActorError as err:
|
||||||
assert err.type == AssertionError
|
assert err.boxed_type is AssertionError
|
||||||
print("Look Maa that first actor failed hard, hehh")
|
print("Look Maa that first actor failed hard, hehh")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
@ -130,14 +145,14 @@ def test_multierror(arb_addr):
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
'num_subactors', range(25, 26),
|
'num_subactors', range(25, 26),
|
||||||
)
|
)
|
||||||
def test_multierror_fast_nursery(arb_addr, start_method, num_subactors, delay):
|
def test_multierror_fast_nursery(reg_addr, start_method, num_subactors, delay):
|
||||||
"""Verify we raise a ``BaseExceptionGroup`` out of a nursery where
|
"""Verify we raise a ``BaseExceptionGroup`` out of a nursery where
|
||||||
more then one actor errors and also with a delay before failure
|
more then one actor errors and also with a delay before failure
|
||||||
to test failure during an ongoing spawning.
|
to test failure during an ongoing spawning.
|
||||||
"""
|
"""
|
||||||
async def main():
|
async def main():
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
arbiter_addr=arb_addr,
|
registry_addrs=[reg_addr],
|
||||||
) as nursery:
|
) as nursery:
|
||||||
|
|
||||||
for i in range(num_subactors):
|
for i in range(num_subactors):
|
||||||
|
|
@ -167,7 +182,7 @@ def test_multierror_fast_nursery(arb_addr, start_method, num_subactors, delay):
|
||||||
|
|
||||||
for exc in exceptions:
|
for exc in exceptions:
|
||||||
assert isinstance(exc, tractor.RemoteActorError)
|
assert isinstance(exc, tractor.RemoteActorError)
|
||||||
assert exc.type == AssertionError
|
assert exc.boxed_type is AssertionError
|
||||||
|
|
||||||
|
|
||||||
async def do_nothing():
|
async def do_nothing():
|
||||||
|
|
@ -175,15 +190,20 @@ async def do_nothing():
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('mechanism', ['nursery_cancel', KeyboardInterrupt])
|
@pytest.mark.parametrize('mechanism', ['nursery_cancel', KeyboardInterrupt])
|
||||||
def test_cancel_single_subactor(arb_addr, mechanism):
|
def test_cancel_single_subactor(reg_addr, mechanism):
|
||||||
"""Ensure a ``ActorNursery.start_actor()`` spawned subactor
|
'''
|
||||||
|
Ensure a ``ActorNursery.start_actor()`` spawned subactor
|
||||||
cancels when the nursery is cancelled.
|
cancels when the nursery is cancelled.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
async def spawn_actor():
|
async def spawn_actor():
|
||||||
"""Spawn an actor that blocks indefinitely.
|
'''
|
||||||
"""
|
Spawn an actor that blocks indefinitely then cancel via
|
||||||
|
either `ActorNursery.cancel()` or an exception raise.
|
||||||
|
|
||||||
|
'''
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
arbiter_addr=arb_addr,
|
registry_addrs=[reg_addr],
|
||||||
) as nursery:
|
) as nursery:
|
||||||
|
|
||||||
portal = await nursery.start_actor(
|
portal = await nursery.start_actor(
|
||||||
|
|
@ -216,7 +236,10 @@ async def stream_forever():
|
||||||
async def test_cancel_infinite_streamer(start_method):
|
async def test_cancel_infinite_streamer(start_method):
|
||||||
|
|
||||||
# stream for at most 1 seconds
|
# stream for at most 1 seconds
|
||||||
with trio.move_on_after(1) as cancel_scope:
|
with (
|
||||||
|
trio.fail_after(4),
|
||||||
|
trio.move_on_after(1) as cancel_scope
|
||||||
|
):
|
||||||
async with tractor.open_nursery() as n:
|
async with tractor.open_nursery() as n:
|
||||||
portal = await n.start_actor(
|
portal = await n.start_actor(
|
||||||
'donny',
|
'donny',
|
||||||
|
|
@ -264,20 +287,32 @@ async def test_cancel_infinite_streamer(start_method):
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@tractor_test
|
@tractor_test
|
||||||
async def test_some_cancels_all(num_actors_and_errs, start_method, loglevel):
|
async def test_some_cancels_all(
|
||||||
"""Verify a subset of failed subactors causes all others in
|
num_actors_and_errs: tuple,
|
||||||
|
start_method: str,
|
||||||
|
loglevel: str,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Verify a subset of failed subactors causes all others in
|
||||||
the nursery to be cancelled just like the strategy in trio.
|
the nursery to be cancelled just like the strategy in trio.
|
||||||
|
|
||||||
This is the first and only supervisory strategy at the moment.
|
This is the first and only supervisory strategy at the moment.
|
||||||
"""
|
|
||||||
num_actors, first_err, err_type, ria_func, da_func = num_actors_and_errs
|
'''
|
||||||
|
(
|
||||||
|
num_actors,
|
||||||
|
first_err,
|
||||||
|
err_type,
|
||||||
|
ria_func,
|
||||||
|
da_func,
|
||||||
|
) = num_actors_and_errs
|
||||||
try:
|
try:
|
||||||
async with tractor.open_nursery() as n:
|
async with tractor.open_nursery() as an:
|
||||||
|
|
||||||
# spawn the same number of deamon actors which should be cancelled
|
# spawn the same number of deamon actors which should be cancelled
|
||||||
dactor_portals = []
|
dactor_portals = []
|
||||||
for i in range(num_actors):
|
for i in range(num_actors):
|
||||||
dactor_portals.append(await n.start_actor(
|
dactor_portals.append(await an.start_actor(
|
||||||
f'deamon_{i}',
|
f'deamon_{i}',
|
||||||
enable_modules=[__name__],
|
enable_modules=[__name__],
|
||||||
))
|
))
|
||||||
|
|
@ -287,7 +322,7 @@ async def test_some_cancels_all(num_actors_and_errs, start_method, loglevel):
|
||||||
for i in range(num_actors):
|
for i in range(num_actors):
|
||||||
# start actor(s) that will fail immediately
|
# start actor(s) that will fail immediately
|
||||||
riactor_portals.append(
|
riactor_portals.append(
|
||||||
await n.run_in_actor(
|
await an.run_in_actor(
|
||||||
func,
|
func,
|
||||||
name=f'actor_{i}',
|
name=f'actor_{i}',
|
||||||
**kwargs
|
**kwargs
|
||||||
|
|
@ -303,7 +338,7 @@ async def test_some_cancels_all(num_actors_and_errs, start_method, loglevel):
|
||||||
await portal.run(func, **kwargs)
|
await portal.run(func, **kwargs)
|
||||||
|
|
||||||
except tractor.RemoteActorError as err:
|
except tractor.RemoteActorError as err:
|
||||||
assert err.type == err_type
|
assert err.boxed_type == err_type
|
||||||
# we only expect this first error to propogate
|
# we only expect this first error to propogate
|
||||||
# (all other daemons are cancelled before they
|
# (all other daemons are cancelled before they
|
||||||
# can be scheduled)
|
# can be scheduled)
|
||||||
|
|
@ -317,19 +352,20 @@ async def test_some_cancels_all(num_actors_and_errs, start_method, loglevel):
|
||||||
|
|
||||||
# should error here with a ``RemoteActorError`` or ``MultiError``
|
# should error here with a ``RemoteActorError`` or ``MultiError``
|
||||||
|
|
||||||
except first_err as err:
|
except first_err as _err:
|
||||||
|
err = _err
|
||||||
if isinstance(err, BaseExceptionGroup):
|
if isinstance(err, BaseExceptionGroup):
|
||||||
assert len(err.exceptions) == num_actors
|
assert len(err.exceptions) == num_actors
|
||||||
for exc in err.exceptions:
|
for exc in err.exceptions:
|
||||||
if isinstance(exc, tractor.RemoteActorError):
|
if isinstance(exc, tractor.RemoteActorError):
|
||||||
assert exc.type == err_type
|
assert exc.boxed_type == err_type
|
||||||
else:
|
else:
|
||||||
assert isinstance(exc, trio.Cancelled)
|
assert isinstance(exc, trio.Cancelled)
|
||||||
elif isinstance(err, tractor.RemoteActorError):
|
elif isinstance(err, tractor.RemoteActorError):
|
||||||
assert err.type == err_type
|
assert err.boxed_type == err_type
|
||||||
|
|
||||||
assert n.cancelled is True
|
assert an.cancelled is True
|
||||||
assert not n._children
|
assert not an._children
|
||||||
else:
|
else:
|
||||||
pytest.fail("Should have gotten a remote assertion error?")
|
pytest.fail("Should have gotten a remote assertion error?")
|
||||||
|
|
||||||
|
|
@ -405,7 +441,7 @@ async def test_nested_multierrors(loglevel, start_method):
|
||||||
elif isinstance(subexc, tractor.RemoteActorError):
|
elif isinstance(subexc, tractor.RemoteActorError):
|
||||||
# on windows it seems we can't exactly be sure wtf
|
# on windows it seems we can't exactly be sure wtf
|
||||||
# will happen..
|
# will happen..
|
||||||
assert subexc.type in (
|
assert subexc.boxed_type in (
|
||||||
tractor.RemoteActorError,
|
tractor.RemoteActorError,
|
||||||
trio.Cancelled,
|
trio.Cancelled,
|
||||||
BaseExceptionGroup,
|
BaseExceptionGroup,
|
||||||
|
|
@ -415,7 +451,7 @@ async def test_nested_multierrors(loglevel, start_method):
|
||||||
for subsub in subexc.exceptions:
|
for subsub in subexc.exceptions:
|
||||||
|
|
||||||
if subsub in (tractor.RemoteActorError,):
|
if subsub in (tractor.RemoteActorError,):
|
||||||
subsub = subsub.type
|
subsub = subsub.boxed_type
|
||||||
|
|
||||||
assert type(subsub) in (
|
assert type(subsub) in (
|
||||||
trio.Cancelled,
|
trio.Cancelled,
|
||||||
|
|
@ -430,16 +466,16 @@ async def test_nested_multierrors(loglevel, start_method):
|
||||||
# we get back the (sent) cancel signal instead
|
# we get back the (sent) cancel signal instead
|
||||||
if is_win():
|
if is_win():
|
||||||
if isinstance(subexc, tractor.RemoteActorError):
|
if isinstance(subexc, tractor.RemoteActorError):
|
||||||
assert subexc.type in (
|
assert subexc.boxed_type in (
|
||||||
BaseExceptionGroup,
|
BaseExceptionGroup,
|
||||||
tractor.RemoteActorError
|
tractor.RemoteActorError
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
assert isinstance(subexc, BaseExceptionGroup)
|
assert isinstance(subexc, BaseExceptionGroup)
|
||||||
else:
|
else:
|
||||||
assert subexc.type is ExceptionGroup
|
assert subexc.boxed_type is ExceptionGroup
|
||||||
else:
|
else:
|
||||||
assert subexc.type in (
|
assert subexc.boxed_type in (
|
||||||
tractor.RemoteActorError,
|
tractor.RemoteActorError,
|
||||||
trio.Cancelled
|
trio.Cancelled
|
||||||
)
|
)
|
||||||
|
|
@ -484,7 +520,9 @@ def test_cancel_via_SIGINT_other_task(
|
||||||
if is_win(): # smh
|
if is_win(): # smh
|
||||||
timeout += 1
|
timeout += 1
|
||||||
|
|
||||||
async def spawn_and_sleep_forever(task_status=trio.TASK_STATUS_IGNORED):
|
async def spawn_and_sleep_forever(
|
||||||
|
task_status=trio.TASK_STATUS_IGNORED
|
||||||
|
):
|
||||||
async with tractor.open_nursery() as tn:
|
async with tractor.open_nursery() as tn:
|
||||||
for i in range(3):
|
for i in range(3):
|
||||||
await tn.run_in_actor(
|
await tn.run_in_actor(
|
||||||
|
|
@ -497,8 +535,15 @@ def test_cancel_via_SIGINT_other_task(
|
||||||
async def main():
|
async def main():
|
||||||
# should never timeout since SIGINT should cancel the current program
|
# should never timeout since SIGINT should cancel the current program
|
||||||
with trio.fail_after(timeout):
|
with trio.fail_after(timeout):
|
||||||
async with trio.open_nursery() as n:
|
async with (
|
||||||
await n.start(spawn_and_sleep_forever)
|
|
||||||
|
# XXX ?TODO? why no work!?
|
||||||
|
# tractor.trionics.collapse_eg(),
|
||||||
|
trio.open_nursery(
|
||||||
|
strict_exception_groups=False,
|
||||||
|
) as tn,
|
||||||
|
):
|
||||||
|
await tn.start(spawn_and_sleep_forever)
|
||||||
if 'mp' in spawn_backend:
|
if 'mp' in spawn_backend:
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
os.kill(pid, signal.SIGINT)
|
os.kill(pid, signal.SIGINT)
|
||||||
|
|
@ -509,38 +554,123 @@ def test_cancel_via_SIGINT_other_task(
|
||||||
|
|
||||||
async def spin_for(period=3):
|
async def spin_for(period=3):
|
||||||
"Sync sleep."
|
"Sync sleep."
|
||||||
|
print(f'sync sleeping in sub-sub for {period}\n')
|
||||||
time.sleep(period)
|
time.sleep(period)
|
||||||
|
|
||||||
|
|
||||||
async def spawn():
|
async def spawn_sub_with_sync_blocking_task():
|
||||||
async with tractor.open_nursery() as tn:
|
async with tractor.open_nursery() as an:
|
||||||
await tn.run_in_actor(
|
print('starting sync blocking subactor..\n')
|
||||||
|
await an.run_in_actor(
|
||||||
spin_for,
|
spin_for,
|
||||||
name='sleeper',
|
name='sleeper',
|
||||||
)
|
)
|
||||||
|
print('exiting first subactor layer..\n')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'man_cancel_outer',
|
||||||
|
[
|
||||||
|
False, # passes if delay != 2
|
||||||
|
|
||||||
|
# always causes an unexpected eg-w-embedded-assert-err?
|
||||||
|
pytest.param(True,
|
||||||
|
marks=pytest.mark.xfail(
|
||||||
|
reason=(
|
||||||
|
'always causes an unexpected eg-w-embedded-assert-err?'
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
@no_windows
|
@no_windows
|
||||||
def test_cancel_while_childs_child_in_sync_sleep(
|
def test_cancel_while_childs_child_in_sync_sleep(
|
||||||
loglevel,
|
loglevel: str,
|
||||||
start_method,
|
start_method: str,
|
||||||
spawn_backend,
|
spawn_backend: str,
|
||||||
|
debug_mode: bool,
|
||||||
|
reg_addr: tuple,
|
||||||
|
man_cancel_outer: bool,
|
||||||
):
|
):
|
||||||
"""Verify that a child cancelled while executing sync code is torn
|
'''
|
||||||
|
Verify that a child cancelled while executing sync code is torn
|
||||||
down even when that cancellation is triggered by the parent
|
down even when that cancellation is triggered by the parent
|
||||||
2 nurseries "up".
|
2 nurseries "up".
|
||||||
"""
|
|
||||||
|
Though the grandchild should stay blocking its actor runtime, its
|
||||||
|
parent should issue a "zombie reaper" to hard kill it after
|
||||||
|
sufficient timeout.
|
||||||
|
|
||||||
|
'''
|
||||||
if start_method == 'forkserver':
|
if start_method == 'forkserver':
|
||||||
pytest.skip("Forksever sux hard at resuming from sync sleep...")
|
pytest.skip("Forksever sux hard at resuming from sync sleep...")
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
with trio.fail_after(2):
|
#
|
||||||
async with tractor.open_nursery() as tn:
|
# XXX BIG TODO NOTE XXX
|
||||||
await tn.run_in_actor(
|
#
|
||||||
spawn,
|
# it seems there's a strange race that can happen
|
||||||
name='spawn',
|
# where where the fail-after will trigger outer scope
|
||||||
|
# .cancel() which then causes the inner scope to raise,
|
||||||
|
#
|
||||||
|
# BaseExceptionGroup('Exceptions from Trio nursery', [
|
||||||
|
# BaseExceptionGroup('Exceptions from Trio nursery',
|
||||||
|
# [
|
||||||
|
# Cancelled(),
|
||||||
|
# Cancelled(),
|
||||||
|
# ]
|
||||||
|
# ),
|
||||||
|
# AssertionError('assert 0')
|
||||||
|
# ])
|
||||||
|
#
|
||||||
|
# WHY THIS DOESN'T MAKE SENSE:
|
||||||
|
# ---------------------------
|
||||||
|
# - it should raise too-slow-error when too slow..
|
||||||
|
# * verified that using simple-cs and manually cancelling
|
||||||
|
# you get same outcome -> indicates that the fail-after
|
||||||
|
# can have its TooSlowError overriden!
|
||||||
|
# |_ to check this it's easy, simplly decrease the timeout
|
||||||
|
# as per the var below.
|
||||||
|
#
|
||||||
|
# - when using the manual simple-cs the outcome is different
|
||||||
|
# DESPITE the `assert 0` which means regardless of the
|
||||||
|
# inner scope effectively failing in the same way, the
|
||||||
|
# bubbling up **is NOT the same**.
|
||||||
|
#
|
||||||
|
# delays trigger diff outcomes..
|
||||||
|
# ---------------------------
|
||||||
|
# as seen by uncommenting various lines below there is from
|
||||||
|
# my POV an unexpected outcome due to the delay=2 case.
|
||||||
|
#
|
||||||
|
# delay = 1 # no AssertionError in eg, TooSlowError raised.
|
||||||
|
# delay = 2 # is AssertionError in eg AND no TooSlowError !?
|
||||||
|
delay = 4 # is AssertionError in eg AND no _cs cancellation.
|
||||||
|
|
||||||
|
with trio.fail_after(delay) as _cs:
|
||||||
|
# with trio.CancelScope() as cs:
|
||||||
|
# ^XXX^ can be used instead to see same outcome.
|
||||||
|
|
||||||
|
async with (
|
||||||
|
# tractor.trionics.collapse_eg(), # doesn't help
|
||||||
|
tractor.open_nursery(
|
||||||
|
hide_tb=False,
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
registry_addrs=[reg_addr],
|
||||||
|
) as an,
|
||||||
|
):
|
||||||
|
await an.run_in_actor(
|
||||||
|
spawn_sub_with_sync_blocking_task,
|
||||||
|
name='sync_blocking_sub',
|
||||||
)
|
)
|
||||||
await trio.sleep(1)
|
await trio.sleep(1)
|
||||||
|
|
||||||
|
if man_cancel_outer:
|
||||||
|
print('Cancelling manually in root')
|
||||||
|
_cs.cancel()
|
||||||
|
|
||||||
|
# trigger exc-srced taskc down
|
||||||
|
# the actor tree.
|
||||||
|
print('RAISING IN ROOT')
|
||||||
assert 0
|
assert 0
|
||||||
|
|
||||||
with pytest.raises(AssertionError):
|
with pytest.raises(AssertionError):
|
||||||
|
|
@ -590,6 +720,12 @@ def test_fast_graceful_cancel_when_spawn_task_in_soft_proc_wait_for_daemon(
|
||||||
nurse.start_soon(delayed_kbi)
|
nurse.start_soon(delayed_kbi)
|
||||||
|
|
||||||
await p.run(do_nuthin)
|
await p.run(do_nuthin)
|
||||||
|
|
||||||
|
# need to explicitly re-raise the lone kbi..now
|
||||||
|
except* KeyboardInterrupt as kbi_eg:
|
||||||
|
assert (len(excs := kbi_eg.exceptions) == 1)
|
||||||
|
raise excs[0]
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
duration = time.time() - start
|
duration = time.time() - start
|
||||||
if duration > timeout:
|
if duration > timeout:
|
||||||
|
|
|
||||||
|
|
@ -6,14 +6,15 @@ sub-sub-actor daemons.
|
||||||
'''
|
'''
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
import asyncio
|
import asyncio
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import (
|
||||||
|
asynccontextmanager as acm,
|
||||||
|
aclosing,
|
||||||
|
)
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
|
||||||
import tractor
|
import tractor
|
||||||
from tractor import RemoteActorError
|
from tractor import RemoteActorError
|
||||||
from async_generator import aclosing
|
|
||||||
|
|
||||||
|
|
||||||
async def aio_streamer(
|
async def aio_streamer(
|
||||||
|
|
@ -94,8 +95,8 @@ async def trio_main(
|
||||||
|
|
||||||
# stash a "service nursery" as "actor local" (aka a Python global)
|
# stash a "service nursery" as "actor local" (aka a Python global)
|
||||||
global _nursery
|
global _nursery
|
||||||
n = _nursery
|
tn = _nursery
|
||||||
assert n
|
assert tn
|
||||||
|
|
||||||
async def consume_stream():
|
async def consume_stream():
|
||||||
async with wrapper_mngr() as stream:
|
async with wrapper_mngr() as stream:
|
||||||
|
|
@ -103,10 +104,10 @@ async def trio_main(
|
||||||
print(msg)
|
print(msg)
|
||||||
|
|
||||||
# run 2 tasks to ensure broadcaster chan use
|
# run 2 tasks to ensure broadcaster chan use
|
||||||
n.start_soon(consume_stream)
|
tn.start_soon(consume_stream)
|
||||||
n.start_soon(consume_stream)
|
tn.start_soon(consume_stream)
|
||||||
|
|
||||||
n.start_soon(trio_sleep_and_err)
|
tn.start_soon(trio_sleep_and_err)
|
||||||
|
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
@ -116,8 +117,11 @@ async def open_actor_local_nursery(
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
):
|
):
|
||||||
global _nursery
|
global _nursery
|
||||||
async with trio.open_nursery() as n:
|
async with (
|
||||||
_nursery = n
|
tractor.trionics.collapse_eg(),
|
||||||
|
trio.open_nursery() as tn
|
||||||
|
):
|
||||||
|
_nursery = tn
|
||||||
await ctx.started()
|
await ctx.started()
|
||||||
await trio.sleep(10)
|
await trio.sleep(10)
|
||||||
# await trio.sleep(1)
|
# await trio.sleep(1)
|
||||||
|
|
@ -131,7 +135,7 @@ async def open_actor_local_nursery(
|
||||||
# never yields back.. aka a scenario where the
|
# never yields back.. aka a scenario where the
|
||||||
# ``tractor.context`` task IS NOT in the service n's cancel
|
# ``tractor.context`` task IS NOT in the service n's cancel
|
||||||
# scope.
|
# scope.
|
||||||
n.cancel_scope.cancel()
|
tn.cancel_scope.cancel()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
|
@ -141,7 +145,7 @@ async def open_actor_local_nursery(
|
||||||
)
|
)
|
||||||
def test_actor_managed_trio_nursery_task_error_cancels_aio(
|
def test_actor_managed_trio_nursery_task_error_cancels_aio(
|
||||||
asyncio_mode: bool,
|
asyncio_mode: bool,
|
||||||
arb_addr
|
reg_addr: tuple,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Verify that a ``trio`` nursery created managed in a child actor
|
Verify that a ``trio`` nursery created managed in a child actor
|
||||||
|
|
@ -156,7 +160,7 @@ def test_actor_managed_trio_nursery_task_error_cancels_aio(
|
||||||
async with tractor.open_nursery() as n:
|
async with tractor.open_nursery() as n:
|
||||||
p = await n.start_actor(
|
p = await n.start_actor(
|
||||||
'nursery_mngr',
|
'nursery_mngr',
|
||||||
infect_asyncio=asyncio_mode,
|
infect_asyncio=asyncio_mode, # TODO, is this enabling debug mode?
|
||||||
enable_modules=[__name__],
|
enable_modules=[__name__],
|
||||||
)
|
)
|
||||||
async with (
|
async with (
|
||||||
|
|
@ -170,4 +174,4 @@ def test_actor_managed_trio_nursery_task_error_cancels_aio(
|
||||||
|
|
||||||
# verify boxed error
|
# verify boxed error
|
||||||
err = excinfo.value
|
err = excinfo.value
|
||||||
assert isinstance(err.type(), NameError)
|
assert err.boxed_type is NameError
|
||||||
|
|
|
||||||
|
|
@ -5,9 +5,7 @@ import trio
|
||||||
import tractor
|
import tractor
|
||||||
from tractor import open_actor_cluster
|
from tractor import open_actor_cluster
|
||||||
from tractor.trionics import gather_contexts
|
from tractor.trionics import gather_contexts
|
||||||
|
from tractor._testing import tractor_test
|
||||||
from conftest import tractor_test
|
|
||||||
|
|
||||||
|
|
||||||
MESSAGE = 'tractoring at full speed'
|
MESSAGE = 'tractoring at full speed'
|
||||||
|
|
||||||
|
|
@ -15,26 +13,24 @@ MESSAGE = 'tractoring at full speed'
|
||||||
def test_empty_mngrs_input_raises() -> None:
|
def test_empty_mngrs_input_raises() -> None:
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
with trio.fail_after(1):
|
with trio.fail_after(3):
|
||||||
async with (
|
async with (
|
||||||
open_actor_cluster(
|
open_actor_cluster(
|
||||||
modules=[__name__],
|
modules=[__name__],
|
||||||
|
|
||||||
# NOTE: ensure we can passthrough runtime opts
|
# NOTE: ensure we can passthrough runtime opts
|
||||||
loglevel='info',
|
loglevel='cancel',
|
||||||
# debug_mode=True,
|
debug_mode=False,
|
||||||
|
|
||||||
) as portals,
|
) as portals,
|
||||||
|
|
||||||
gather_contexts(
|
gather_contexts(mngrs=()),
|
||||||
# NOTE: it's the use of inline-generator syntax
|
|
||||||
# here that causes the empty input.
|
|
||||||
mngrs=(
|
|
||||||
p.open_context(worker) for p in portals.values()
|
|
||||||
),
|
|
||||||
),
|
|
||||||
):
|
):
|
||||||
assert 0
|
# should fail before this?
|
||||||
|
assert portals
|
||||||
|
|
||||||
|
# test should fail if we mk it here!
|
||||||
|
assert 0, 'Should have raised val-err !?'
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
@ -49,7 +45,7 @@ async def worker(
|
||||||
await ctx.started()
|
await ctx.started()
|
||||||
|
|
||||||
async with ctx.open_stream(
|
async with ctx.open_stream(
|
||||||
backpressure=True,
|
allow_overruns=True,
|
||||||
) as stream:
|
) as stream:
|
||||||
|
|
||||||
# TODO: this with the below assert causes a hang bug?
|
# TODO: this with the below assert causes a hang bug?
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,945 +0,0 @@
|
||||||
"""
|
|
||||||
That "native" debug mode better work!
|
|
||||||
|
|
||||||
All these tests can be understood (somewhat) by running the equivalent
|
|
||||||
`examples/debugging/` scripts manually.
|
|
||||||
|
|
||||||
TODO:
|
|
||||||
- none of these tests have been run successfully on windows yet but
|
|
||||||
there's been manual testing that verified it works.
|
|
||||||
- wonder if any of it'll work on OS X?
|
|
||||||
|
|
||||||
"""
|
|
||||||
import itertools
|
|
||||||
from os import path
|
|
||||||
from typing import Optional
|
|
||||||
import platform
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
import pexpect
|
|
||||||
from pexpect.exceptions import (
|
|
||||||
TIMEOUT,
|
|
||||||
EOF,
|
|
||||||
)
|
|
||||||
|
|
||||||
from conftest import repodir, _ci_env
|
|
||||||
|
|
||||||
# TODO: The next great debugger audit could be done by you!
|
|
||||||
# - recurrent entry to breakpoint() from single actor *after* and an
|
|
||||||
# error in another task?
|
|
||||||
# - root error before child errors
|
|
||||||
# - root error after child errors
|
|
||||||
# - root error before child breakpoint
|
|
||||||
# - root error after child breakpoint
|
|
||||||
# - recurrent root errors
|
|
||||||
|
|
||||||
|
|
||||||
if platform.system() == 'Windows':
|
|
||||||
pytest.skip(
|
|
||||||
'Debugger tests have no windows support (yet)',
|
|
||||||
allow_module_level=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def examples_dir():
|
|
||||||
"""Return the abspath to the examples directory.
|
|
||||||
"""
|
|
||||||
return path.join(repodir(), 'examples', 'debugging/')
|
|
||||||
|
|
||||||
|
|
||||||
def mk_cmd(ex_name: str) -> str:
|
|
||||||
"""Generate a command suitable to pass to ``pexpect.spawn()``.
|
|
||||||
"""
|
|
||||||
return ' '.join(
|
|
||||||
['python',
|
|
||||||
path.join(examples_dir(), f'{ex_name}.py')]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: was trying to this xfail style but some weird bug i see in CI
|
|
||||||
# that's happening at collect time.. pretty soon gonna dump actions i'm
|
|
||||||
# thinkin...
|
|
||||||
# in CI we skip tests which >= depth 1 actor trees due to there
|
|
||||||
# still being an oustanding issue with relaying the debug-mode-state
|
|
||||||
# through intermediary parents.
|
|
||||||
has_nested_actors = pytest.mark.has_nested_actors
|
|
||||||
# .xfail(
|
|
||||||
# os.environ.get('CI', False),
|
|
||||||
# reason=(
|
|
||||||
# 'This test uses nested actors and fails in CI\n'
|
|
||||||
# 'The test seems to run fine locally but until we solve the '
|
|
||||||
# 'following issue this CI test will be xfail:\n'
|
|
||||||
# 'https://github.com/goodboy/tractor/issues/320'
|
|
||||||
# )
|
|
||||||
# )
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def spawn(
|
|
||||||
start_method,
|
|
||||||
testdir,
|
|
||||||
arb_addr,
|
|
||||||
) -> 'pexpect.spawn':
|
|
||||||
|
|
||||||
if start_method != 'trio':
|
|
||||||
pytest.skip(
|
|
||||||
"Debugger tests are only supported on the trio backend"
|
|
||||||
)
|
|
||||||
|
|
||||||
def _spawn(cmd):
|
|
||||||
return testdir.spawn(
|
|
||||||
cmd=mk_cmd(cmd),
|
|
||||||
expect_timeout=3,
|
|
||||||
)
|
|
||||||
|
|
||||||
return _spawn
|
|
||||||
|
|
||||||
|
|
||||||
PROMPT = r"\(Pdb\+\+\)"
|
|
||||||
|
|
||||||
|
|
||||||
def expect(
|
|
||||||
child,
|
|
||||||
|
|
||||||
# prompt by default
|
|
||||||
patt: str = PROMPT,
|
|
||||||
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Expect wrapper that prints last seen console
|
|
||||||
data before failing.
|
|
||||||
|
|
||||||
'''
|
|
||||||
try:
|
|
||||||
child.expect(
|
|
||||||
patt,
|
|
||||||
**kwargs,
|
|
||||||
)
|
|
||||||
except TIMEOUT:
|
|
||||||
before = str(child.before.decode())
|
|
||||||
print(before)
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def assert_before(
|
|
||||||
child,
|
|
||||||
patts: list[str],
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
before = str(child.before.decode())
|
|
||||||
|
|
||||||
for patt in patts:
|
|
||||||
try:
|
|
||||||
assert patt in before
|
|
||||||
except AssertionError:
|
|
||||||
print(before)
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(
|
|
||||||
params=[False, True],
|
|
||||||
ids='ctl-c={}'.format,
|
|
||||||
)
|
|
||||||
def ctlc(
|
|
||||||
request,
|
|
||||||
ci_env: bool,
|
|
||||||
|
|
||||||
) -> bool:
|
|
||||||
|
|
||||||
use_ctlc = request.param
|
|
||||||
|
|
||||||
if (
|
|
||||||
sys.version_info <= (3, 10)
|
|
||||||
and use_ctlc
|
|
||||||
):
|
|
||||||
# on 3.9 it seems the REPL UX
|
|
||||||
# is highly unreliable and frankly annoying
|
|
||||||
# to test for. It does work from manual testing
|
|
||||||
# but i just don't think it's wroth it to try
|
|
||||||
# and get this working especially since we want to
|
|
||||||
# be 3.10+ mega-asap.
|
|
||||||
pytest.skip('Py3.9 and `pdbpp` son no bueno..')
|
|
||||||
|
|
||||||
node = request.node
|
|
||||||
markers = node.own_markers
|
|
||||||
for mark in markers:
|
|
||||||
if mark.name == 'has_nested_actors':
|
|
||||||
pytest.skip(
|
|
||||||
f'Test {node} has nested actors and fails with Ctrl-C.\n'
|
|
||||||
f'The test can sometimes run fine locally but until'
|
|
||||||
' we solve' 'this issue this CI test will be xfail:\n'
|
|
||||||
'https://github.com/goodboy/tractor/issues/320'
|
|
||||||
)
|
|
||||||
|
|
||||||
if use_ctlc:
|
|
||||||
# XXX: disable pygments highlighting for auto-tests
|
|
||||||
# since some envs (like actions CI) will struggle
|
|
||||||
# the the added color-char encoding..
|
|
||||||
from tractor._debug import TractorConfig
|
|
||||||
TractorConfig.use_pygements = False
|
|
||||||
|
|
||||||
yield use_ctlc
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
'user_in_out',
|
|
||||||
[
|
|
||||||
('c', 'AssertionError'),
|
|
||||||
('q', 'AssertionError'),
|
|
||||||
],
|
|
||||||
ids=lambda item: f'{item[0]} -> {item[1]}',
|
|
||||||
)
|
|
||||||
def test_root_actor_error(spawn, user_in_out):
|
|
||||||
"""Demonstrate crash handler entering pdbpp from basic error in root actor.
|
|
||||||
"""
|
|
||||||
user_input, expect_err_str = user_in_out
|
|
||||||
|
|
||||||
child = spawn('root_actor_error')
|
|
||||||
|
|
||||||
# scan for the pdbpp prompt
|
|
||||||
expect(child, PROMPT)
|
|
||||||
|
|
||||||
before = str(child.before.decode())
|
|
||||||
|
|
||||||
# make sure expected logging and error arrives
|
|
||||||
assert "Attaching to pdb in crashed actor: ('root'" in before
|
|
||||||
assert 'AssertionError' in before
|
|
||||||
|
|
||||||
# send user command
|
|
||||||
child.sendline(user_input)
|
|
||||||
|
|
||||||
# process should exit
|
|
||||||
expect(child, EOF)
|
|
||||||
assert expect_err_str in str(child.before)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
'user_in_out',
|
|
||||||
[
|
|
||||||
('c', None),
|
|
||||||
('q', 'bdb.BdbQuit'),
|
|
||||||
],
|
|
||||||
ids=lambda item: f'{item[0]} -> {item[1]}',
|
|
||||||
)
|
|
||||||
def test_root_actor_bp(spawn, user_in_out):
|
|
||||||
"""Demonstrate breakpoint from in root actor.
|
|
||||||
"""
|
|
||||||
user_input, expect_err_str = user_in_out
|
|
||||||
child = spawn('root_actor_breakpoint')
|
|
||||||
|
|
||||||
# scan for the pdbpp prompt
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
assert 'Error' not in str(child.before)
|
|
||||||
|
|
||||||
# send user command
|
|
||||||
child.sendline(user_input)
|
|
||||||
child.expect('\r\n')
|
|
||||||
|
|
||||||
# process should exit
|
|
||||||
child.expect(pexpect.EOF)
|
|
||||||
|
|
||||||
if expect_err_str is None:
|
|
||||||
assert 'Error' not in str(child.before)
|
|
||||||
else:
|
|
||||||
assert expect_err_str in str(child.before)
|
|
||||||
|
|
||||||
|
|
||||||
def do_ctlc(
|
|
||||||
child,
|
|
||||||
count: int = 3,
|
|
||||||
delay: float = 0.1,
|
|
||||||
patt: Optional[str] = None,
|
|
||||||
|
|
||||||
# expect repl UX to reprint the prompt after every
|
|
||||||
# ctrl-c send.
|
|
||||||
# XXX: no idea but, in CI this never seems to work even on 3.10 so
|
|
||||||
# needs some further investigation potentially...
|
|
||||||
expect_prompt: bool = not _ci_env,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
# make sure ctl-c sends don't do anything but repeat output
|
|
||||||
for _ in range(count):
|
|
||||||
time.sleep(delay)
|
|
||||||
child.sendcontrol('c')
|
|
||||||
|
|
||||||
# TODO: figure out why this makes CI fail..
|
|
||||||
# if you run this test manually it works just fine..
|
|
||||||
if expect_prompt:
|
|
||||||
before = str(child.before.decode())
|
|
||||||
time.sleep(delay)
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
time.sleep(delay)
|
|
||||||
|
|
||||||
if patt:
|
|
||||||
# should see the last line on console
|
|
||||||
assert patt in before
|
|
||||||
|
|
||||||
|
|
||||||
def test_root_actor_bp_forever(
|
|
||||||
spawn,
|
|
||||||
ctlc: bool,
|
|
||||||
):
|
|
||||||
"Re-enter a breakpoint from the root actor-task."
|
|
||||||
child = spawn('root_actor_breakpoint_forever')
|
|
||||||
|
|
||||||
# do some "next" commands to demonstrate recurrent breakpoint
|
|
||||||
# entries
|
|
||||||
for _ in range(10):
|
|
||||||
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
child.sendline('next')
|
|
||||||
|
|
||||||
# do one continue which should trigger a
|
|
||||||
# new task to lock the tty
|
|
||||||
child.sendline('continue')
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
# seems that if we hit ctrl-c too fast the
|
|
||||||
# sigint guard machinery might not kick in..
|
|
||||||
time.sleep(0.001)
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
# XXX: this previously caused a bug!
|
|
||||||
child.sendline('n')
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
child.sendline('n')
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
# quit out of the loop
|
|
||||||
child.sendline('q')
|
|
||||||
child.expect(pexpect.EOF)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
'do_next',
|
|
||||||
(True, False),
|
|
||||||
ids='do_next={}'.format,
|
|
||||||
)
|
|
||||||
def test_subactor_error(
|
|
||||||
spawn,
|
|
||||||
ctlc: bool,
|
|
||||||
do_next: bool,
|
|
||||||
):
|
|
||||||
'''
|
|
||||||
Single subactor raising an error
|
|
||||||
|
|
||||||
'''
|
|
||||||
child = spawn('subactor_error')
|
|
||||||
|
|
||||||
# scan for the pdbpp prompt
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
before = str(child.before.decode())
|
|
||||||
assert "Attaching to pdb in crashed actor: ('name_error'" in before
|
|
||||||
|
|
||||||
if do_next:
|
|
||||||
child.sendline('n')
|
|
||||||
|
|
||||||
else:
|
|
||||||
# make sure ctl-c sends don't do anything but repeat output
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(
|
|
||||||
child,
|
|
||||||
)
|
|
||||||
|
|
||||||
# send user command and (in this case it's the same for 'continue'
|
|
||||||
# vs. 'quit') the debugger should enter a second time in the nursery
|
|
||||||
# creating actor
|
|
||||||
child.sendline('continue')
|
|
||||||
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
before = str(child.before.decode())
|
|
||||||
|
|
||||||
# root actor gets debugger engaged
|
|
||||||
assert "Attaching to pdb in crashed actor: ('root'" in before
|
|
||||||
# error is a remote error propagated from the subactor
|
|
||||||
assert "RemoteActorError: ('name_error'" in before
|
|
||||||
|
|
||||||
# another round
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
child.sendline('c')
|
|
||||||
child.expect('\r\n')
|
|
||||||
|
|
||||||
# process should exit
|
|
||||||
child.expect(pexpect.EOF)
|
|
||||||
|
|
||||||
|
|
||||||
def test_subactor_breakpoint(
|
|
||||||
spawn,
|
|
||||||
ctlc: bool,
|
|
||||||
):
|
|
||||||
"Single subactor with an infinite breakpoint loop"
|
|
||||||
|
|
||||||
child = spawn('subactor_breakpoint')
|
|
||||||
|
|
||||||
# scan for the pdbpp prompt
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
before = str(child.before.decode())
|
|
||||||
assert "Attaching pdb to actor: ('breakpoint_forever'" in before
|
|
||||||
|
|
||||||
# do some "next" commands to demonstrate recurrent breakpoint
|
|
||||||
# entries
|
|
||||||
for _ in range(10):
|
|
||||||
child.sendline('next')
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
# now run some "continues" to show re-entries
|
|
||||||
for _ in range(5):
|
|
||||||
child.sendline('continue')
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
before = str(child.before.decode())
|
|
||||||
assert "Attaching pdb to actor: ('breakpoint_forever'" in before
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
# finally quit the loop
|
|
||||||
child.sendline('q')
|
|
||||||
|
|
||||||
# child process should exit but parent will capture pdb.BdbQuit
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
before = str(child.before.decode())
|
|
||||||
assert "RemoteActorError: ('breakpoint_forever'" in before
|
|
||||||
assert 'bdb.BdbQuit' in before
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
# quit the parent
|
|
||||||
child.sendline('c')
|
|
||||||
|
|
||||||
# process should exit
|
|
||||||
child.expect(pexpect.EOF)
|
|
||||||
|
|
||||||
before = str(child.before.decode())
|
|
||||||
assert "RemoteActorError: ('breakpoint_forever'" in before
|
|
||||||
assert 'bdb.BdbQuit' in before
|
|
||||||
|
|
||||||
|
|
||||||
@has_nested_actors
|
|
||||||
def test_multi_subactors(
|
|
||||||
spawn,
|
|
||||||
ctlc: bool,
|
|
||||||
):
|
|
||||||
'''
|
|
||||||
Multiple subactors, both erroring and
|
|
||||||
breakpointing as well as a nested subactor erroring.
|
|
||||||
|
|
||||||
'''
|
|
||||||
child = spawn(r'multi_subactors')
|
|
||||||
|
|
||||||
# scan for the pdbpp prompt
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
before = str(child.before.decode())
|
|
||||||
assert "Attaching pdb to actor: ('breakpoint_forever'" in before
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
# do some "next" commands to demonstrate recurrent breakpoint
|
|
||||||
# entries
|
|
||||||
for _ in range(10):
|
|
||||||
child.sendline('next')
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
# continue to next error
|
|
||||||
child.sendline('c')
|
|
||||||
|
|
||||||
# first name_error failure
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
before = str(child.before.decode())
|
|
||||||
assert "Attaching to pdb in crashed actor: ('name_error'" in before
|
|
||||||
assert "NameError" in before
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
# continue again
|
|
||||||
child.sendline('c')
|
|
||||||
|
|
||||||
# 2nd name_error failure
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
# TODO: will we ever get the race where this crash will show up?
|
|
||||||
# blocklist strat now prevents this crash
|
|
||||||
# assert_before(child, [
|
|
||||||
# "Attaching to pdb in crashed actor: ('name_error_1'",
|
|
||||||
# "NameError",
|
|
||||||
# ])
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
# breakpoint loop should re-engage
|
|
||||||
child.sendline('c')
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
before = str(child.before.decode())
|
|
||||||
assert "Attaching pdb to actor: ('breakpoint_forever'" in before
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
# wait for spawn error to show up
|
|
||||||
spawn_err = "Attaching to pdb in crashed actor: ('spawn_error'"
|
|
||||||
start = time.time()
|
|
||||||
while (
|
|
||||||
spawn_err not in before
|
|
||||||
and (time.time() - start) < 3 # timeout eventually
|
|
||||||
):
|
|
||||||
child.sendline('c')
|
|
||||||
time.sleep(0.1)
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
before = str(child.before.decode())
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
# 2nd depth nursery should trigger
|
|
||||||
# (XXX: this below if guard is technically a hack that makes the
|
|
||||||
# nested case seem to work locally on linux but ideally in the long
|
|
||||||
# run this can be dropped.)
|
|
||||||
if not ctlc:
|
|
||||||
assert_before(child, [
|
|
||||||
spawn_err,
|
|
||||||
"RemoteActorError: ('name_error_1'",
|
|
||||||
])
|
|
||||||
|
|
||||||
# now run some "continues" to show re-entries
|
|
||||||
for _ in range(5):
|
|
||||||
child.sendline('c')
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
# quit the loop and expect parent to attach
|
|
||||||
child.sendline('q')
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
before = str(child.before.decode())
|
|
||||||
|
|
||||||
assert_before(child, [
|
|
||||||
# debugger attaches to root
|
|
||||||
"Attaching to pdb in crashed actor: ('root'",
|
|
||||||
|
|
||||||
# expect a multierror with exceptions for each sub-actor
|
|
||||||
"RemoteActorError: ('breakpoint_forever'",
|
|
||||||
"RemoteActorError: ('name_error'",
|
|
||||||
"RemoteActorError: ('spawn_error'",
|
|
||||||
"RemoteActorError: ('name_error_1'",
|
|
||||||
'bdb.BdbQuit',
|
|
||||||
])
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
# process should exit
|
|
||||||
child.sendline('c')
|
|
||||||
child.expect(pexpect.EOF)
|
|
||||||
|
|
||||||
# repeat of previous multierror for final output
|
|
||||||
assert_before(child, [
|
|
||||||
"RemoteActorError: ('breakpoint_forever'",
|
|
||||||
"RemoteActorError: ('name_error'",
|
|
||||||
"RemoteActorError: ('spawn_error'",
|
|
||||||
"RemoteActorError: ('name_error_1'",
|
|
||||||
'bdb.BdbQuit',
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
def test_multi_daemon_subactors(
|
|
||||||
spawn,
|
|
||||||
loglevel: str,
|
|
||||||
ctlc: bool
|
|
||||||
):
|
|
||||||
'''
|
|
||||||
Multiple daemon subactors, both erroring and breakpointing within a
|
|
||||||
stream.
|
|
||||||
|
|
||||||
'''
|
|
||||||
child = spawn('multi_daemon_subactors')
|
|
||||||
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
# there can be a race for which subactor will acquire
|
|
||||||
# the root's tty lock first so anticipate either crash
|
|
||||||
# message on the first entry.
|
|
||||||
|
|
||||||
bp_forever_msg = "Attaching pdb to actor: ('bp_forever'"
|
|
||||||
name_error_msg = "NameError: name 'doggypants' is not defined"
|
|
||||||
|
|
||||||
before = str(child.before.decode())
|
|
||||||
if bp_forever_msg in before:
|
|
||||||
next_msg = name_error_msg
|
|
||||||
|
|
||||||
elif name_error_msg in before:
|
|
||||||
next_msg = bp_forever_msg
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise ValueError("Neither log msg was found !?")
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
# NOTE: previously since we did not have clobber prevention
|
|
||||||
# in the root actor this final resume could result in the debugger
|
|
||||||
# tearing down since both child actors would be cancelled and it was
|
|
||||||
# unlikely that `bp_forever` would re-acquire the tty lock again.
|
|
||||||
# Now, we should have a final resumption in the root plus a possible
|
|
||||||
# second entry by `bp_forever`.
|
|
||||||
|
|
||||||
child.sendline('c')
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
assert_before(child, [next_msg])
|
|
||||||
|
|
||||||
# XXX: hooray the root clobbering the child here was fixed!
|
|
||||||
# IMO, this demonstrates the true power of SC system design.
|
|
||||||
|
|
||||||
# now the root actor won't clobber the bp_forever child
|
|
||||||
# during it's first access to the debug lock, but will instead
|
|
||||||
# wait for the lock to release, by the edge triggered
|
|
||||||
# ``_debug.Lock.no_remote_has_tty`` event before sending cancel messages
|
|
||||||
# (via portals) to its underlings B)
|
|
||||||
|
|
||||||
# at some point here there should have been some warning msg from
|
|
||||||
# the root announcing it avoided a clobber of the child's lock, but
|
|
||||||
# it seems unreliable in testing here to gnab it:
|
|
||||||
# assert "in use by child ('bp_forever'," in before
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
# expect another breakpoint actor entry
|
|
||||||
child.sendline('c')
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
try:
|
|
||||||
assert_before(child, [bp_forever_msg])
|
|
||||||
except AssertionError:
|
|
||||||
assert_before(child, [name_error_msg])
|
|
||||||
|
|
||||||
else:
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
# should crash with the 2nd name error (simulates
|
|
||||||
# a retry) and then the root eventually (boxed) errors
|
|
||||||
# after 1 or more further bp actor entries.
|
|
||||||
|
|
||||||
child.sendline('c')
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
assert_before(child, [name_error_msg])
|
|
||||||
|
|
||||||
# wait for final error in root
|
|
||||||
# where it crashs with boxed error
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
child.sendline('c')
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
assert_before(
|
|
||||||
child,
|
|
||||||
[bp_forever_msg]
|
|
||||||
)
|
|
||||||
except AssertionError:
|
|
||||||
break
|
|
||||||
|
|
||||||
assert_before(
|
|
||||||
child,
|
|
||||||
[
|
|
||||||
# boxed error raised in root task
|
|
||||||
"Attaching to pdb in crashed actor: ('root'",
|
|
||||||
"_exceptions.RemoteActorError: ('name_error'",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
child.sendline('c')
|
|
||||||
child.expect(pexpect.EOF)
|
|
||||||
|
|
||||||
|
|
||||||
@has_nested_actors
|
|
||||||
def test_multi_subactors_root_errors(
|
|
||||||
spawn,
|
|
||||||
ctlc: bool
|
|
||||||
):
|
|
||||||
'''
|
|
||||||
Multiple subactors, both erroring and breakpointing as well as
|
|
||||||
a nested subactor erroring.
|
|
||||||
|
|
||||||
'''
|
|
||||||
child = spawn('multi_subactor_root_errors')
|
|
||||||
|
|
||||||
# scan for the pdbpp prompt
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
# at most one subactor should attach before the root is cancelled
|
|
||||||
before = str(child.before.decode())
|
|
||||||
assert "NameError: name 'doggypants' is not defined" in before
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
# continue again to catch 2nd name error from
|
|
||||||
# actor 'name_error_1' (which is 2nd depth).
|
|
||||||
child.sendline('c')
|
|
||||||
|
|
||||||
# due to block list strat from #337, this will no longer
|
|
||||||
# propagate before the root errors and cancels the spawner sub-tree.
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
# only if the blocking condition doesn't kick in fast enough
|
|
||||||
before = str(child.before.decode())
|
|
||||||
if "Debug lock blocked for ['name_error_1'" not in before:
|
|
||||||
|
|
||||||
assert_before(child, [
|
|
||||||
"Attaching to pdb in crashed actor: ('name_error_1'",
|
|
||||||
"NameError",
|
|
||||||
])
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
child.sendline('c')
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
# check if the spawner crashed or was blocked from debug
|
|
||||||
# and if this intermediary attached check the boxed error
|
|
||||||
before = str(child.before.decode())
|
|
||||||
if "Attaching to pdb in crashed actor: ('spawn_error'" in before:
|
|
||||||
|
|
||||||
assert_before(child, [
|
|
||||||
# boxed error from spawner's child
|
|
||||||
"RemoteActorError: ('name_error_1'",
|
|
||||||
"NameError",
|
|
||||||
])
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
child.sendline('c')
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
# expect a root actor crash
|
|
||||||
assert_before(child, [
|
|
||||||
"RemoteActorError: ('name_error'",
|
|
||||||
"NameError",
|
|
||||||
|
|
||||||
# error from root actor and root task that created top level nursery
|
|
||||||
"Attaching to pdb in crashed actor: ('root'",
|
|
||||||
"AssertionError",
|
|
||||||
])
|
|
||||||
|
|
||||||
child.sendline('c')
|
|
||||||
child.expect(pexpect.EOF)
|
|
||||||
|
|
||||||
assert_before(child, [
|
|
||||||
# "Attaching to pdb in crashed actor: ('root'",
|
|
||||||
# boxed error from previous step
|
|
||||||
"RemoteActorError: ('name_error'",
|
|
||||||
"NameError",
|
|
||||||
"AssertionError",
|
|
||||||
'assert 0',
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
@has_nested_actors
|
|
||||||
def test_multi_nested_subactors_error_through_nurseries(
|
|
||||||
spawn,
|
|
||||||
|
|
||||||
# TODO: address debugger issue for nested tree:
|
|
||||||
# https://github.com/goodboy/tractor/issues/320
|
|
||||||
# ctlc: bool,
|
|
||||||
):
|
|
||||||
"""Verify deeply nested actors that error trigger debugger entries
|
|
||||||
at each actor nurserly (level) all the way up the tree.
|
|
||||||
|
|
||||||
"""
|
|
||||||
# NOTE: previously, inside this script was a bug where if the
|
|
||||||
# parent errors before a 2-levels-lower actor has released the lock,
|
|
||||||
# the parent tries to cancel it but it's stuck in the debugger?
|
|
||||||
# A test (below) has now been added to explicitly verify this is
|
|
||||||
# fixed.
|
|
||||||
|
|
||||||
child = spawn('multi_nested_subactors_error_up_through_nurseries')
|
|
||||||
|
|
||||||
timed_out_early: bool = False
|
|
||||||
|
|
||||||
for send_char in itertools.cycle(['c', 'q']):
|
|
||||||
try:
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
child.sendline(send_char)
|
|
||||||
time.sleep(0.01)
|
|
||||||
|
|
||||||
except EOF:
|
|
||||||
break
|
|
||||||
|
|
||||||
assert_before(child, [
|
|
||||||
|
|
||||||
# boxed source errors
|
|
||||||
"NameError: name 'doggypants' is not defined",
|
|
||||||
"tractor._exceptions.RemoteActorError: ('name_error'",
|
|
||||||
"bdb.BdbQuit",
|
|
||||||
|
|
||||||
# first level subtrees
|
|
||||||
"tractor._exceptions.RemoteActorError: ('spawner0'",
|
|
||||||
# "tractor._exceptions.RemoteActorError: ('spawner1'",
|
|
||||||
|
|
||||||
# propagation of errors up through nested subtrees
|
|
||||||
"tractor._exceptions.RemoteActorError: ('spawn_until_0'",
|
|
||||||
"tractor._exceptions.RemoteActorError: ('spawn_until_1'",
|
|
||||||
"tractor._exceptions.RemoteActorError: ('spawn_until_2'",
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.timeout(15)
|
|
||||||
@has_nested_actors
|
|
||||||
def test_root_nursery_cancels_before_child_releases_tty_lock(
|
|
||||||
spawn,
|
|
||||||
start_method,
|
|
||||||
ctlc: bool,
|
|
||||||
):
|
|
||||||
'''
|
|
||||||
Test that when the root sends a cancel message before a nested child
|
|
||||||
has unblocked (which can happen when it has the tty lock and is
|
|
||||||
engaged in pdb) it is indeed cancelled after exiting the debugger.
|
|
||||||
|
|
||||||
'''
|
|
||||||
timed_out_early = False
|
|
||||||
|
|
||||||
child = spawn('root_cancelled_but_child_is_in_tty_lock')
|
|
||||||
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
before = str(child.before.decode())
|
|
||||||
assert "NameError: name 'doggypants' is not defined" in before
|
|
||||||
assert "tractor._exceptions.RemoteActorError: ('name_error'" not in before
|
|
||||||
time.sleep(0.5)
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
child.sendline('c')
|
|
||||||
|
|
||||||
for i in range(4):
|
|
||||||
time.sleep(0.5)
|
|
||||||
try:
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
except (
|
|
||||||
EOF,
|
|
||||||
TIMEOUT,
|
|
||||||
):
|
|
||||||
# races all over..
|
|
||||||
|
|
||||||
print(f"Failed early on {i}?")
|
|
||||||
before = str(child.before.decode())
|
|
||||||
|
|
||||||
timed_out_early = True
|
|
||||||
|
|
||||||
# race conditions on how fast the continue is sent?
|
|
||||||
break
|
|
||||||
|
|
||||||
before = str(child.before.decode())
|
|
||||||
assert "NameError: name 'doggypants' is not defined" in before
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
child.sendline('c')
|
|
||||||
time.sleep(0.1)
|
|
||||||
|
|
||||||
for i in range(3):
|
|
||||||
try:
|
|
||||||
child.expect(pexpect.EOF, timeout=0.5)
|
|
||||||
break
|
|
||||||
except TIMEOUT:
|
|
||||||
child.sendline('c')
|
|
||||||
time.sleep(0.1)
|
|
||||||
print('child was able to grab tty lock again?')
|
|
||||||
else:
|
|
||||||
print('giving up on child releasing, sending `quit` cmd')
|
|
||||||
child.sendline('q')
|
|
||||||
expect(child, EOF)
|
|
||||||
|
|
||||||
if not timed_out_early:
|
|
||||||
before = str(child.before.decode())
|
|
||||||
assert_before(child, [
|
|
||||||
"tractor._exceptions.RemoteActorError: ('spawner0'",
|
|
||||||
"tractor._exceptions.RemoteActorError: ('name_error'",
|
|
||||||
"NameError: name 'doggypants' is not defined",
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
def test_root_cancels_child_context_during_startup(
|
|
||||||
spawn,
|
|
||||||
ctlc: bool,
|
|
||||||
):
|
|
||||||
'''Verify a fast fail in the root doesn't lock up the child reaping
|
|
||||||
and all while using the new context api.
|
|
||||||
|
|
||||||
'''
|
|
||||||
child = spawn('fast_error_in_root_after_spawn')
|
|
||||||
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
before = str(child.before.decode())
|
|
||||||
assert "AssertionError" in before
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
child.sendline('c')
|
|
||||||
child.expect(pexpect.EOF)
|
|
||||||
|
|
||||||
|
|
||||||
def test_different_debug_mode_per_actor(
|
|
||||||
spawn,
|
|
||||||
ctlc: bool,
|
|
||||||
):
|
|
||||||
child = spawn('per_actor_debug')
|
|
||||||
child.expect(r"\(Pdb\+\+\)")
|
|
||||||
|
|
||||||
# only one actor should enter the debugger
|
|
||||||
before = str(child.before.decode())
|
|
||||||
assert "Attaching to pdb in crashed actor: ('debugged_boi'" in before
|
|
||||||
assert "RuntimeError" in before
|
|
||||||
|
|
||||||
if ctlc:
|
|
||||||
do_ctlc(child)
|
|
||||||
|
|
||||||
child.sendline('c')
|
|
||||||
child.expect(pexpect.EOF)
|
|
||||||
|
|
||||||
before = str(child.before.decode())
|
|
||||||
|
|
||||||
# NOTE: this debugged actor error currently WON'T show up since the
|
|
||||||
# root will actually cancel and terminate the nursery before the error
|
|
||||||
# msg reported back from the debug mode actor is processed.
|
|
||||||
# assert "tractor._exceptions.RemoteActorError: ('debugged_boi'" in before
|
|
||||||
|
|
||||||
assert "tractor._exceptions.RemoteActorError: ('crash_boi'" in before
|
|
||||||
|
|
||||||
# the crash boi should not have made a debugger request but
|
|
||||||
# instead crashed completely
|
|
||||||
assert "tractor._exceptions.RemoteActorError: ('crash_boi'" in before
|
|
||||||
assert "RuntimeError" in before
|
|
||||||
|
|
@ -7,27 +7,29 @@ import platform
|
||||||
from functools import partial
|
from functools import partial
|
||||||
import itertools
|
import itertools
|
||||||
|
|
||||||
|
import psutil
|
||||||
import pytest
|
import pytest
|
||||||
|
import subprocess
|
||||||
import tractor
|
import tractor
|
||||||
|
from tractor.trionics import collapse_eg
|
||||||
|
from tractor._testing import tractor_test
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from conftest import tractor_test
|
|
||||||
|
|
||||||
|
|
||||||
@tractor_test
|
@tractor_test
|
||||||
async def test_reg_then_unreg(arb_addr):
|
async def test_reg_then_unreg(reg_addr):
|
||||||
actor = tractor.current_actor()
|
actor = tractor.current_actor()
|
||||||
assert actor.is_arbiter
|
assert actor.is_arbiter
|
||||||
assert len(actor._registry) == 1 # only self is registered
|
assert len(actor._registry) == 1 # only self is registered
|
||||||
|
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
arbiter_addr=arb_addr,
|
registry_addrs=[reg_addr],
|
||||||
) as n:
|
) as n:
|
||||||
|
|
||||||
portal = await n.start_actor('actor', enable_modules=[__name__])
|
portal = await n.start_actor('actor', enable_modules=[__name__])
|
||||||
uid = portal.channel.uid
|
uid = portal.channel.uid
|
||||||
|
|
||||||
async with tractor.get_arbiter(*arb_addr) as aportal:
|
async with tractor.get_registry(reg_addr) as aportal:
|
||||||
# this local actor should be the arbiter
|
# this local actor should be the arbiter
|
||||||
assert actor is aportal.actor
|
assert actor is aportal.actor
|
||||||
|
|
||||||
|
|
@ -53,15 +55,27 @@ async def hi():
|
||||||
return the_line.format(tractor.current_actor().name)
|
return the_line.format(tractor.current_actor().name)
|
||||||
|
|
||||||
|
|
||||||
async def say_hello(other_actor):
|
async def say_hello(
|
||||||
|
other_actor: str,
|
||||||
|
reg_addr: tuple[str, int],
|
||||||
|
):
|
||||||
await trio.sleep(1) # wait for other actor to spawn
|
await trio.sleep(1) # wait for other actor to spawn
|
||||||
async with tractor.find_actor(other_actor) as portal:
|
async with tractor.find_actor(
|
||||||
|
other_actor,
|
||||||
|
registry_addrs=[reg_addr],
|
||||||
|
) as portal:
|
||||||
assert portal is not None
|
assert portal is not None
|
||||||
return await portal.run(__name__, 'hi')
|
return await portal.run(__name__, 'hi')
|
||||||
|
|
||||||
|
|
||||||
async def say_hello_use_wait(other_actor):
|
async def say_hello_use_wait(
|
||||||
async with tractor.wait_for_actor(other_actor) as portal:
|
other_actor: str,
|
||||||
|
reg_addr: tuple[str, int],
|
||||||
|
):
|
||||||
|
async with tractor.wait_for_actor(
|
||||||
|
other_actor,
|
||||||
|
registry_addr=reg_addr,
|
||||||
|
) as portal:
|
||||||
assert portal is not None
|
assert portal is not None
|
||||||
result = await portal.run(__name__, 'hi')
|
result = await portal.run(__name__, 'hi')
|
||||||
return result
|
return result
|
||||||
|
|
@ -69,21 +83,29 @@ async def say_hello_use_wait(other_actor):
|
||||||
|
|
||||||
@tractor_test
|
@tractor_test
|
||||||
@pytest.mark.parametrize('func', [say_hello, say_hello_use_wait])
|
@pytest.mark.parametrize('func', [say_hello, say_hello_use_wait])
|
||||||
async def test_trynamic_trio(func, start_method, arb_addr):
|
async def test_trynamic_trio(
|
||||||
"""Main tractor entry point, the "master" process (for now
|
func,
|
||||||
acts as the "director").
|
start_method,
|
||||||
"""
|
reg_addr,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Root actor acting as the "director" and running one-shot-task-actors
|
||||||
|
for the directed subs.
|
||||||
|
|
||||||
|
'''
|
||||||
async with tractor.open_nursery() as n:
|
async with tractor.open_nursery() as n:
|
||||||
print("Alright... Action!")
|
print("Alright... Action!")
|
||||||
|
|
||||||
donny = await n.run_in_actor(
|
donny = await n.run_in_actor(
|
||||||
func,
|
func,
|
||||||
other_actor='gretchen',
|
other_actor='gretchen',
|
||||||
|
reg_addr=reg_addr,
|
||||||
name='donny',
|
name='donny',
|
||||||
)
|
)
|
||||||
gretchen = await n.run_in_actor(
|
gretchen = await n.run_in_actor(
|
||||||
func,
|
func,
|
||||||
other_actor='donny',
|
other_actor='donny',
|
||||||
|
reg_addr=reg_addr,
|
||||||
name='gretchen',
|
name='gretchen',
|
||||||
)
|
)
|
||||||
print(await gretchen.result())
|
print(await gretchen.result())
|
||||||
|
|
@ -131,17 +153,27 @@ async def unpack_reg(actor_or_portal):
|
||||||
|
|
||||||
|
|
||||||
async def spawn_and_check_registry(
|
async def spawn_and_check_registry(
|
||||||
arb_addr: tuple,
|
reg_addr: tuple,
|
||||||
use_signal: bool,
|
use_signal: bool,
|
||||||
|
debug_mode: bool = False,
|
||||||
remote_arbiter: bool = False,
|
remote_arbiter: bool = False,
|
||||||
with_streaming: bool = False,
|
with_streaming: bool = False,
|
||||||
|
maybe_daemon: tuple[
|
||||||
|
subprocess.Popen,
|
||||||
|
psutil.Process,
|
||||||
|
]|None = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
|
if maybe_daemon:
|
||||||
|
popen, proc = maybe_daemon
|
||||||
|
# breakpoint()
|
||||||
|
|
||||||
async with tractor.open_root_actor(
|
async with tractor.open_root_actor(
|
||||||
arbiter_addr=arb_addr,
|
registry_addrs=[reg_addr],
|
||||||
|
debug_mode=debug_mode,
|
||||||
):
|
):
|
||||||
async with tractor.get_arbiter(*arb_addr) as portal:
|
async with tractor.get_registry(reg_addr) as portal:
|
||||||
# runtime needs to be up to call this
|
# runtime needs to be up to call this
|
||||||
actor = tractor.current_actor()
|
actor = tractor.current_actor()
|
||||||
|
|
||||||
|
|
@ -157,28 +189,30 @@ async def spawn_and_check_registry(
|
||||||
extra = 2 # local root actor + remote arbiter
|
extra = 2 # local root actor + remote arbiter
|
||||||
|
|
||||||
# ensure current actor is registered
|
# ensure current actor is registered
|
||||||
registry = await get_reg()
|
registry: dict = await get_reg()
|
||||||
assert actor.uid in registry
|
assert actor.uid in registry
|
||||||
|
|
||||||
try:
|
try:
|
||||||
async with tractor.open_nursery() as n:
|
async with tractor.open_nursery() as an:
|
||||||
async with trio.open_nursery() as trion:
|
async with (
|
||||||
|
collapse_eg(),
|
||||||
|
trio.open_nursery() as trion,
|
||||||
|
):
|
||||||
portals = {}
|
portals = {}
|
||||||
for i in range(3):
|
for i in range(3):
|
||||||
name = f'a{i}'
|
name = f'a{i}'
|
||||||
if with_streaming:
|
if with_streaming:
|
||||||
portals[name] = await n.start_actor(
|
portals[name] = await an.start_actor(
|
||||||
name=name, enable_modules=[__name__])
|
name=name, enable_modules=[__name__])
|
||||||
|
|
||||||
else: # no streaming
|
else: # no streaming
|
||||||
portals[name] = await n.run_in_actor(
|
portals[name] = await an.run_in_actor(
|
||||||
trio.sleep_forever, name=name)
|
trio.sleep_forever, name=name)
|
||||||
|
|
||||||
# wait on last actor to come up
|
# wait on last actor to come up
|
||||||
async with tractor.wait_for_actor(name):
|
async with tractor.wait_for_actor(name):
|
||||||
registry = await get_reg()
|
registry = await get_reg()
|
||||||
for uid in n._children:
|
for uid in an._children:
|
||||||
assert uid in registry
|
assert uid in registry
|
||||||
|
|
||||||
assert len(portals) + extra == len(registry)
|
assert len(portals) + extra == len(registry)
|
||||||
|
|
@ -211,20 +245,24 @@ async def spawn_and_check_registry(
|
||||||
@pytest.mark.parametrize('use_signal', [False, True])
|
@pytest.mark.parametrize('use_signal', [False, True])
|
||||||
@pytest.mark.parametrize('with_streaming', [False, True])
|
@pytest.mark.parametrize('with_streaming', [False, True])
|
||||||
def test_subactors_unregister_on_cancel(
|
def test_subactors_unregister_on_cancel(
|
||||||
|
debug_mode: bool,
|
||||||
start_method,
|
start_method,
|
||||||
use_signal,
|
use_signal,
|
||||||
arb_addr,
|
reg_addr,
|
||||||
with_streaming,
|
with_streaming,
|
||||||
):
|
):
|
||||||
"""Verify that cancelling a nursery results in all subactors
|
'''
|
||||||
|
Verify that cancelling a nursery results in all subactors
|
||||||
deregistering themselves with the arbiter.
|
deregistering themselves with the arbiter.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
with pytest.raises(KeyboardInterrupt):
|
with pytest.raises(KeyboardInterrupt):
|
||||||
trio.run(
|
trio.run(
|
||||||
partial(
|
partial(
|
||||||
spawn_and_check_registry,
|
spawn_and_check_registry,
|
||||||
arb_addr,
|
reg_addr,
|
||||||
use_signal,
|
use_signal,
|
||||||
|
debug_mode=debug_mode,
|
||||||
remote_arbiter=False,
|
remote_arbiter=False,
|
||||||
with_streaming=with_streaming,
|
with_streaming=with_streaming,
|
||||||
),
|
),
|
||||||
|
|
@ -234,10 +272,11 @@ def test_subactors_unregister_on_cancel(
|
||||||
@pytest.mark.parametrize('use_signal', [False, True])
|
@pytest.mark.parametrize('use_signal', [False, True])
|
||||||
@pytest.mark.parametrize('with_streaming', [False, True])
|
@pytest.mark.parametrize('with_streaming', [False, True])
|
||||||
def test_subactors_unregister_on_cancel_remote_daemon(
|
def test_subactors_unregister_on_cancel_remote_daemon(
|
||||||
daemon,
|
daemon: subprocess.Popen,
|
||||||
|
debug_mode: bool,
|
||||||
start_method,
|
start_method,
|
||||||
use_signal,
|
use_signal,
|
||||||
arb_addr,
|
reg_addr,
|
||||||
with_streaming,
|
with_streaming,
|
||||||
):
|
):
|
||||||
"""Verify that cancelling a nursery results in all subactors
|
"""Verify that cancelling a nursery results in all subactors
|
||||||
|
|
@ -248,10 +287,15 @@ def test_subactors_unregister_on_cancel_remote_daemon(
|
||||||
trio.run(
|
trio.run(
|
||||||
partial(
|
partial(
|
||||||
spawn_and_check_registry,
|
spawn_and_check_registry,
|
||||||
arb_addr,
|
reg_addr,
|
||||||
use_signal,
|
use_signal,
|
||||||
|
debug_mode=debug_mode,
|
||||||
remote_arbiter=True,
|
remote_arbiter=True,
|
||||||
with_streaming=with_streaming,
|
with_streaming=with_streaming,
|
||||||
|
maybe_daemon=(
|
||||||
|
daemon,
|
||||||
|
psutil.Process(daemon.pid)
|
||||||
|
),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -262,7 +306,7 @@ async def streamer(agen):
|
||||||
|
|
||||||
|
|
||||||
async def close_chans_before_nursery(
|
async def close_chans_before_nursery(
|
||||||
arb_addr: tuple,
|
reg_addr: tuple,
|
||||||
use_signal: bool,
|
use_signal: bool,
|
||||||
remote_arbiter: bool = False,
|
remote_arbiter: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
@ -275,9 +319,9 @@ async def close_chans_before_nursery(
|
||||||
entries_at_end = 1
|
entries_at_end = 1
|
||||||
|
|
||||||
async with tractor.open_root_actor(
|
async with tractor.open_root_actor(
|
||||||
arbiter_addr=arb_addr,
|
registry_addrs=[reg_addr],
|
||||||
):
|
):
|
||||||
async with tractor.get_arbiter(*arb_addr) as aportal:
|
async with tractor.get_registry(reg_addr) as aportal:
|
||||||
try:
|
try:
|
||||||
get_reg = partial(unpack_reg, aportal)
|
get_reg = partial(unpack_reg, aportal)
|
||||||
|
|
||||||
|
|
@ -295,9 +339,12 @@ async def close_chans_before_nursery(
|
||||||
async with portal2.open_stream_from(
|
async with portal2.open_stream_from(
|
||||||
stream_forever
|
stream_forever
|
||||||
) as agen2:
|
) as agen2:
|
||||||
async with trio.open_nursery() as n:
|
async with (
|
||||||
n.start_soon(streamer, agen1)
|
collapse_eg(),
|
||||||
n.start_soon(cancel, use_signal, .5)
|
trio.open_nursery() as tn,
|
||||||
|
):
|
||||||
|
tn.start_soon(streamer, agen1)
|
||||||
|
tn.start_soon(cancel, use_signal, .5)
|
||||||
try:
|
try:
|
||||||
await streamer(agen2)
|
await streamer(agen2)
|
||||||
finally:
|
finally:
|
||||||
|
|
@ -329,7 +376,7 @@ async def close_chans_before_nursery(
|
||||||
def test_close_channel_explicit(
|
def test_close_channel_explicit(
|
||||||
start_method,
|
start_method,
|
||||||
use_signal,
|
use_signal,
|
||||||
arb_addr,
|
reg_addr,
|
||||||
):
|
):
|
||||||
"""Verify that closing a stream explicitly and killing the actor's
|
"""Verify that closing a stream explicitly and killing the actor's
|
||||||
"root nursery" **before** the containing nursery tears down also
|
"root nursery" **before** the containing nursery tears down also
|
||||||
|
|
@ -339,7 +386,7 @@ def test_close_channel_explicit(
|
||||||
trio.run(
|
trio.run(
|
||||||
partial(
|
partial(
|
||||||
close_chans_before_nursery,
|
close_chans_before_nursery,
|
||||||
arb_addr,
|
reg_addr,
|
||||||
use_signal,
|
use_signal,
|
||||||
remote_arbiter=False,
|
remote_arbiter=False,
|
||||||
),
|
),
|
||||||
|
|
@ -348,10 +395,10 @@ def test_close_channel_explicit(
|
||||||
|
|
||||||
@pytest.mark.parametrize('use_signal', [False, True])
|
@pytest.mark.parametrize('use_signal', [False, True])
|
||||||
def test_close_channel_explicit_remote_arbiter(
|
def test_close_channel_explicit_remote_arbiter(
|
||||||
daemon,
|
daemon: subprocess.Popen,
|
||||||
start_method,
|
start_method,
|
||||||
use_signal,
|
use_signal,
|
||||||
arb_addr,
|
reg_addr,
|
||||||
):
|
):
|
||||||
"""Verify that closing a stream explicitly and killing the actor's
|
"""Verify that closing a stream explicitly and killing the actor's
|
||||||
"root nursery" **before** the containing nursery tears down also
|
"root nursery" **before** the containing nursery tears down also
|
||||||
|
|
@ -361,7 +408,7 @@ def test_close_channel_explicit_remote_arbiter(
|
||||||
trio.run(
|
trio.run(
|
||||||
partial(
|
partial(
|
||||||
close_chans_before_nursery,
|
close_chans_before_nursery,
|
||||||
arb_addr,
|
reg_addr,
|
||||||
use_signal,
|
use_signal,
|
||||||
remote_arbiter=True,
|
remote_arbiter=True,
|
||||||
),
|
),
|
||||||
|
|
|
||||||
|
|
@ -11,18 +11,17 @@ import platform
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
from tractor._testing import (
|
||||||
from conftest import repodir
|
examples_dir,
|
||||||
|
)
|
||||||
|
|
||||||
def examples_dir():
|
|
||||||
"""Return the abspath to the examples directory.
|
|
||||||
"""
|
|
||||||
return os.path.join(repodir(), 'examples')
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def run_example_in_subproc(loglevel, testdir, arb_addr):
|
def run_example_in_subproc(
|
||||||
|
loglevel: str,
|
||||||
|
testdir: pytest.Pytester,
|
||||||
|
reg_addr: tuple[str, int],
|
||||||
|
):
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def run(script_code):
|
def run(script_code):
|
||||||
|
|
@ -32,8 +31,8 @@ def run_example_in_subproc(loglevel, testdir, arb_addr):
|
||||||
# on windows we need to create a special __main__.py which will
|
# on windows we need to create a special __main__.py which will
|
||||||
# be executed with ``python -m <modulename>`` on windows..
|
# be executed with ``python -m <modulename>`` on windows..
|
||||||
shutil.copyfile(
|
shutil.copyfile(
|
||||||
os.path.join(examples_dir(), '__main__.py'),
|
examples_dir() / '__main__.py',
|
||||||
os.path.join(str(testdir), '__main__.py')
|
str(testdir / '__main__.py'),
|
||||||
)
|
)
|
||||||
|
|
||||||
# drop the ``if __name__ == '__main__'`` guard onwards from
|
# drop the ``if __name__ == '__main__'`` guard onwards from
|
||||||
|
|
@ -67,6 +66,9 @@ def run_example_in_subproc(loglevel, testdir, arb_addr):
|
||||||
# due to backpressure!!!
|
# due to backpressure!!!
|
||||||
proc = testdir.popen(
|
proc = testdir.popen(
|
||||||
cmdargs,
|
cmdargs,
|
||||||
|
stdin=subprocess.PIPE,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
assert not proc.returncode
|
assert not proc.returncode
|
||||||
|
|
@ -82,26 +84,37 @@ def run_example_in_subproc(loglevel, testdir, arb_addr):
|
||||||
|
|
||||||
# walk yields: (dirpath, dirnames, filenames)
|
# walk yields: (dirpath, dirnames, filenames)
|
||||||
[
|
[
|
||||||
(p[0], f) for p in os.walk(examples_dir()) for f in p[2]
|
(p[0], f)
|
||||||
|
for p in os.walk(examples_dir())
|
||||||
|
for f in p[2]
|
||||||
|
|
||||||
if '__' not in f
|
if (
|
||||||
and f[0] != '_'
|
'__' not in f
|
||||||
and 'debugging' not in p[0]
|
and f[0] != '_'
|
||||||
and 'integration' not in p[0]
|
and 'debugging' not in p[0]
|
||||||
|
and 'integration' not in p[0]
|
||||||
|
and 'advanced_faults' not in p[0]
|
||||||
|
and 'multihost' not in p[0]
|
||||||
|
and 'trio' not in p[0]
|
||||||
|
)
|
||||||
],
|
],
|
||||||
|
|
||||||
ids=lambda t: t[1],
|
ids=lambda t: t[1],
|
||||||
)
|
)
|
||||||
def test_example(run_example_in_subproc, example_script):
|
def test_example(
|
||||||
"""Load and run scripts from this repo's ``examples/`` dir as a user
|
run_example_in_subproc,
|
||||||
|
example_script,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Load and run scripts from this repo's ``examples/`` dir as a user
|
||||||
would copy and pasing them into their editor.
|
would copy and pasing them into their editor.
|
||||||
|
|
||||||
On windows a little more "finessing" is done to make
|
On windows a little more "finessing" is done to make
|
||||||
``multiprocessing`` play nice: we copy the ``__main__.py`` into the
|
``multiprocessing`` play nice: we copy the ``__main__.py`` into the
|
||||||
test directory and invoke the script as a module with ``python -m
|
test directory and invoke the script as a module with ``python -m
|
||||||
test_example``.
|
test_example``.
|
||||||
"""
|
|
||||||
ex_file = os.path.join(*example_script)
|
'''
|
||||||
|
ex_file: str = os.path.join(*example_script)
|
||||||
|
|
||||||
if 'rpc_bidir_streaming' in ex_file and sys.version_info < (3, 9):
|
if 'rpc_bidir_streaming' in ex_file and sys.version_info < (3, 9):
|
||||||
pytest.skip("2-way streaming example requires py3.9 async with syntax")
|
pytest.skip("2-way streaming example requires py3.9 async with syntax")
|
||||||
|
|
@ -110,10 +123,14 @@ def test_example(run_example_in_subproc, example_script):
|
||||||
code = ex.read()
|
code = ex.read()
|
||||||
|
|
||||||
with run_example_in_subproc(code) as proc:
|
with run_example_in_subproc(code) as proc:
|
||||||
proc.wait()
|
err = None
|
||||||
err, _ = proc.stderr.read(), proc.stdout.read()
|
try:
|
||||||
# print(f'STDERR: {err}')
|
if not proc.poll():
|
||||||
# print(f'STDOUT: {out}')
|
_, err = proc.communicate(timeout=15)
|
||||||
|
|
||||||
|
except subprocess.TimeoutExpired as e:
|
||||||
|
proc.kill()
|
||||||
|
err = e.stderr
|
||||||
|
|
||||||
# if we get some gnarly output let's aggregate and raise
|
# if we get some gnarly output let's aggregate and raise
|
||||||
if err:
|
if err:
|
||||||
|
|
@ -127,7 +144,8 @@ def test_example(run_example_in_subproc, example_script):
|
||||||
# shouldn't eventually once we figure out what's
|
# shouldn't eventually once we figure out what's
|
||||||
# a better way to be explicit about aio side
|
# a better way to be explicit about aio side
|
||||||
# cancels?
|
# cancels?
|
||||||
and 'asyncio.exceptions.CancelledError' not in last_error
|
and
|
||||||
|
'asyncio.exceptions.CancelledError' not in last_error
|
||||||
):
|
):
|
||||||
raise Exception(errmsg)
|
raise Exception(errmsg)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,946 @@
|
||||||
|
'''
|
||||||
|
Low-level functional audits for our
|
||||||
|
"capability based messaging"-spec feats.
|
||||||
|
|
||||||
|
B~)
|
||||||
|
|
||||||
|
'''
|
||||||
|
from contextlib import (
|
||||||
|
contextmanager as cm,
|
||||||
|
# nullcontext,
|
||||||
|
)
|
||||||
|
import importlib
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Type,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
|
from msgspec import (
|
||||||
|
# structs,
|
||||||
|
# msgpack,
|
||||||
|
Raw,
|
||||||
|
# Struct,
|
||||||
|
ValidationError,
|
||||||
|
)
|
||||||
|
import pytest
|
||||||
|
import trio
|
||||||
|
|
||||||
|
import tractor
|
||||||
|
from tractor import (
|
||||||
|
Actor,
|
||||||
|
# _state,
|
||||||
|
MsgTypeError,
|
||||||
|
Context,
|
||||||
|
)
|
||||||
|
from tractor.msg import (
|
||||||
|
_codec,
|
||||||
|
_ctxvar_MsgCodec,
|
||||||
|
_exts,
|
||||||
|
|
||||||
|
NamespacePath,
|
||||||
|
MsgCodec,
|
||||||
|
MsgDec,
|
||||||
|
mk_codec,
|
||||||
|
mk_dec,
|
||||||
|
apply_codec,
|
||||||
|
current_codec,
|
||||||
|
)
|
||||||
|
from tractor.msg.types import (
|
||||||
|
log,
|
||||||
|
Started,
|
||||||
|
# _payload_msgs,
|
||||||
|
# PayloadMsg,
|
||||||
|
# mk_msg_spec,
|
||||||
|
)
|
||||||
|
from tractor.msg._ops import (
|
||||||
|
limit_plds,
|
||||||
|
)
|
||||||
|
|
||||||
|
def enc_nsp(obj: Any) -> Any:
|
||||||
|
actor: Actor = tractor.current_actor(
|
||||||
|
err_on_no_runtime=False,
|
||||||
|
)
|
||||||
|
uid: tuple[str, str]|None = None if not actor else actor.uid
|
||||||
|
print(f'{uid} ENC HOOK')
|
||||||
|
|
||||||
|
match obj:
|
||||||
|
# case NamespacePath()|str():
|
||||||
|
case NamespacePath():
|
||||||
|
encoded: str = str(obj)
|
||||||
|
print(
|
||||||
|
f'----- ENCODING `NamespacePath` as `str` ------\n'
|
||||||
|
f'|_obj:{type(obj)!r} = {obj!r}\n'
|
||||||
|
f'|_encoded: str = {encoded!r}\n'
|
||||||
|
)
|
||||||
|
# if type(obj) != NamespacePath:
|
||||||
|
# breakpoint()
|
||||||
|
return encoded
|
||||||
|
case _:
|
||||||
|
logmsg: str = (
|
||||||
|
f'{uid}\n'
|
||||||
|
'FAILED ENCODE\n'
|
||||||
|
f'obj-> `{obj}: {type(obj)}`\n'
|
||||||
|
)
|
||||||
|
raise NotImplementedError(logmsg)
|
||||||
|
|
||||||
|
|
||||||
|
def dec_nsp(
|
||||||
|
obj_type: Type,
|
||||||
|
obj: Any,
|
||||||
|
|
||||||
|
) -> Any:
|
||||||
|
# breakpoint()
|
||||||
|
actor: Actor = tractor.current_actor(
|
||||||
|
err_on_no_runtime=False,
|
||||||
|
)
|
||||||
|
uid: tuple[str, str]|None = None if not actor else actor.uid
|
||||||
|
print(
|
||||||
|
f'{uid}\n'
|
||||||
|
'CUSTOM DECODE\n'
|
||||||
|
f'type-arg-> {obj_type}\n'
|
||||||
|
f'obj-arg-> `{obj}`: {type(obj)}\n'
|
||||||
|
)
|
||||||
|
nsp = None
|
||||||
|
# XXX, never happens right?
|
||||||
|
if obj_type is Raw:
|
||||||
|
breakpoint()
|
||||||
|
|
||||||
|
if (
|
||||||
|
obj_type is NamespacePath
|
||||||
|
and isinstance(obj, str)
|
||||||
|
and ':' in obj
|
||||||
|
):
|
||||||
|
nsp = NamespacePath(obj)
|
||||||
|
# TODO: we could built a generic handler using
|
||||||
|
# JUST matching the obj_type part?
|
||||||
|
# nsp = obj_type(obj)
|
||||||
|
|
||||||
|
if nsp:
|
||||||
|
print(f'Returning NSP instance: {nsp}')
|
||||||
|
return nsp
|
||||||
|
|
||||||
|
logmsg: str = (
|
||||||
|
f'{uid}\n'
|
||||||
|
'FAILED DECODE\n'
|
||||||
|
f'type-> {obj_type}\n'
|
||||||
|
f'obj-arg-> `{obj}`: {type(obj)}\n\n'
|
||||||
|
f'current codec:\n'
|
||||||
|
f'{current_codec()}\n'
|
||||||
|
)
|
||||||
|
# TODO: figure out the ignore subsys for this!
|
||||||
|
# -[ ] option whether to defense-relay backc the msg
|
||||||
|
# inside an `Invalid`/`Ignore`
|
||||||
|
# -[ ] how to make this handling pluggable such that a
|
||||||
|
# `Channel`/`MsgTransport` can intercept and process
|
||||||
|
# back msgs either via exception handling or some other
|
||||||
|
# signal?
|
||||||
|
log.warning(logmsg)
|
||||||
|
# NOTE: this delivers the invalid
|
||||||
|
# value up to `msgspec`'s decoding
|
||||||
|
# machinery for error raising.
|
||||||
|
return obj
|
||||||
|
# raise NotImplementedError(logmsg)
|
||||||
|
|
||||||
|
|
||||||
|
def ex_func(*args):
|
||||||
|
'''
|
||||||
|
A mod level func we can ref and load via our `NamespacePath`
|
||||||
|
python-object pointer `str` subtype.
|
||||||
|
|
||||||
|
'''
|
||||||
|
print(f'ex_func({args})')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'add_codec_hooks',
|
||||||
|
[
|
||||||
|
True,
|
||||||
|
False,
|
||||||
|
],
|
||||||
|
ids=['use_codec_hooks', 'no_codec_hooks'],
|
||||||
|
)
|
||||||
|
def test_custom_extension_types(
|
||||||
|
debug_mode: bool,
|
||||||
|
add_codec_hooks: bool
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Verify that a `MsgCodec` (used for encoding all outbound IPC msgs
|
||||||
|
and decoding all inbound `PayloadMsg`s) and a paired `MsgDec`
|
||||||
|
(used for decoding the `PayloadMsg.pld: Raw` received within a given
|
||||||
|
task's ipc `Context` scope) can both send and receive "extension types"
|
||||||
|
as supported via custom converter hooks passed to `msgspec`.
|
||||||
|
|
||||||
|
'''
|
||||||
|
nsp_pld_dec: MsgDec = mk_dec(
|
||||||
|
spec=None, # ONLY support the ext type
|
||||||
|
dec_hook=dec_nsp if add_codec_hooks else None,
|
||||||
|
ext_types=[NamespacePath],
|
||||||
|
)
|
||||||
|
nsp_codec: MsgCodec = mk_codec(
|
||||||
|
# ipc_pld_spec=Raw, # default!
|
||||||
|
|
||||||
|
# NOTE XXX: the encode hook MUST be used no matter what since
|
||||||
|
# our `NamespacePath` is not any of a `Any` native type nor
|
||||||
|
# a `msgspec.Struct` subtype - so `msgspec` has no way to know
|
||||||
|
# how to encode it unless we provide the custom hook.
|
||||||
|
#
|
||||||
|
# AGAIN that is, regardless of whether we spec an
|
||||||
|
# `Any`-decoded-pld the enc has no knowledge (by default)
|
||||||
|
# how to enc `NamespacePath` (nsp), so we add a custom
|
||||||
|
# hook to do that ALWAYS.
|
||||||
|
enc_hook=enc_nsp if add_codec_hooks else None,
|
||||||
|
|
||||||
|
# XXX NOTE: pretty sure this is mutex with the `type=` to
|
||||||
|
# `Decoder`? so it won't work in tandem with the
|
||||||
|
# `ipc_pld_spec` passed above?
|
||||||
|
ext_types=[NamespacePath],
|
||||||
|
|
||||||
|
# TODO? is it useful to have the `.pld` decoded *prior* to
|
||||||
|
# the `PldRx`?? like perf or mem related?
|
||||||
|
# ext_dec=nsp_pld_dec,
|
||||||
|
)
|
||||||
|
if add_codec_hooks:
|
||||||
|
assert nsp_codec.dec.dec_hook is None
|
||||||
|
|
||||||
|
# TODO? if we pass `ext_dec` above?
|
||||||
|
# assert nsp_codec.dec.dec_hook is dec_nsp
|
||||||
|
|
||||||
|
assert nsp_codec.enc.enc_hook is enc_nsp
|
||||||
|
|
||||||
|
nsp = NamespacePath.from_ref(ex_func)
|
||||||
|
|
||||||
|
try:
|
||||||
|
nsp_bytes: bytes = nsp_codec.encode(nsp)
|
||||||
|
nsp_rt_sin_msg = nsp_pld_dec.decode(nsp_bytes)
|
||||||
|
nsp_rt_sin_msg.load_ref() is ex_func
|
||||||
|
except TypeError:
|
||||||
|
if not add_codec_hooks:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
msg_bytes: bytes = nsp_codec.encode(
|
||||||
|
Started(
|
||||||
|
cid='cid',
|
||||||
|
pld=nsp,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# since the ext-type obj should also be set as the msg.pld
|
||||||
|
assert nsp_bytes in msg_bytes
|
||||||
|
started_rt: Started = nsp_codec.decode(msg_bytes)
|
||||||
|
pld: Raw = started_rt.pld
|
||||||
|
assert isinstance(pld, Raw)
|
||||||
|
nsp_rt: NamespacePath = nsp_pld_dec.decode(pld)
|
||||||
|
assert isinstance(nsp_rt, NamespacePath)
|
||||||
|
# in obj comparison terms they should be the same
|
||||||
|
assert nsp_rt == nsp
|
||||||
|
# ensure we've decoded to ext type!
|
||||||
|
assert nsp_rt.load_ref() is ex_func
|
||||||
|
|
||||||
|
except TypeError:
|
||||||
|
if not add_codec_hooks:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def sleep_forever_in_sub(
|
||||||
|
ctx: Context,
|
||||||
|
) -> None:
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
||||||
|
def mk_custom_codec(
|
||||||
|
add_hooks: bool,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
MsgCodec, # encode to send
|
||||||
|
MsgDec, # pld receive-n-decode
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Create custom `msgpack` enc/dec-hooks and set a `Decoder`
|
||||||
|
which only loads `pld_spec` (like `NamespacePath`) types.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
# XXX NOTE XXX: despite defining `NamespacePath` as a type
|
||||||
|
# field on our `PayloadMsg.pld`, we still need a enc/dec_hook() pair
|
||||||
|
# to cast to/from that type on the wire. See the docs:
|
||||||
|
# https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types
|
||||||
|
|
||||||
|
# if pld_spec is Any:
|
||||||
|
# pld_spec = Raw
|
||||||
|
|
||||||
|
nsp_codec: MsgCodec = mk_codec(
|
||||||
|
# ipc_pld_spec=Raw, # default!
|
||||||
|
|
||||||
|
# NOTE XXX: the encode hook MUST be used no matter what since
|
||||||
|
# our `NamespacePath` is not any of a `Any` native type nor
|
||||||
|
# a `msgspec.Struct` subtype - so `msgspec` has no way to know
|
||||||
|
# how to encode it unless we provide the custom hook.
|
||||||
|
#
|
||||||
|
# AGAIN that is, regardless of whether we spec an
|
||||||
|
# `Any`-decoded-pld the enc has no knowledge (by default)
|
||||||
|
# how to enc `NamespacePath` (nsp), so we add a custom
|
||||||
|
# hook to do that ALWAYS.
|
||||||
|
enc_hook=enc_nsp if add_hooks else None,
|
||||||
|
|
||||||
|
# XXX NOTE: pretty sure this is mutex with the `type=` to
|
||||||
|
# `Decoder`? so it won't work in tandem with the
|
||||||
|
# `ipc_pld_spec` passed above?
|
||||||
|
ext_types=[NamespacePath],
|
||||||
|
)
|
||||||
|
# dec_hook=dec_nsp if add_hooks else None,
|
||||||
|
return nsp_codec
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'limit_plds_args',
|
||||||
|
[
|
||||||
|
(
|
||||||
|
{'dec_hook': None, 'ext_types': None},
|
||||||
|
None,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{'dec_hook': dec_nsp, 'ext_types': None},
|
||||||
|
TypeError,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{'dec_hook': dec_nsp, 'ext_types': [NamespacePath]},
|
||||||
|
None,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{'dec_hook': dec_nsp, 'ext_types': [NamespacePath|None]},
|
||||||
|
None,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
ids=[
|
||||||
|
'no_hook_no_ext_types',
|
||||||
|
'only_hook',
|
||||||
|
'hook_and_ext_types',
|
||||||
|
'hook_and_ext_types_w_null',
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_pld_limiting_usage(
|
||||||
|
limit_plds_args: tuple[dict, Exception|None],
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Verify `dec_hook()` and `ext_types` need to either both be
|
||||||
|
provided or we raise a explanator type-error.
|
||||||
|
|
||||||
|
'''
|
||||||
|
kwargs, maybe_err = limit_plds_args
|
||||||
|
async def main():
|
||||||
|
async with tractor.open_nursery() as an: # just to open runtime
|
||||||
|
|
||||||
|
# XXX SHOULD NEVER WORK outside an ipc ctx scope!
|
||||||
|
try:
|
||||||
|
with limit_plds(**kwargs):
|
||||||
|
pass
|
||||||
|
except RuntimeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
p: tractor.Portal = await an.start_actor(
|
||||||
|
'sub',
|
||||||
|
enable_modules=[__name__],
|
||||||
|
)
|
||||||
|
async with (
|
||||||
|
p.open_context(
|
||||||
|
sleep_forever_in_sub
|
||||||
|
) as (ctx, first),
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
with limit_plds(**kwargs):
|
||||||
|
pass
|
||||||
|
except maybe_err as exc:
|
||||||
|
assert type(exc) is maybe_err
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def chk_codec_applied(
|
||||||
|
expect_codec: MsgCodec|None,
|
||||||
|
enter_value: MsgCodec|None = None,
|
||||||
|
|
||||||
|
) -> MsgCodec:
|
||||||
|
'''
|
||||||
|
buncha sanity checks ensuring that the IPC channel's
|
||||||
|
context-vars are set to the expected codec and that are
|
||||||
|
ctx-var wrapper APIs match the same.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# TODO: play with tricyle again, bc this is supposed to work
|
||||||
|
# the way we want?
|
||||||
|
#
|
||||||
|
# TreeVar
|
||||||
|
# task: trio.Task = trio.lowlevel.current_task()
|
||||||
|
# curr_codec = _ctxvar_MsgCodec.get_in(task)
|
||||||
|
|
||||||
|
# ContextVar
|
||||||
|
# task_ctx: Context = task.context
|
||||||
|
# assert _ctxvar_MsgCodec in task_ctx
|
||||||
|
# curr_codec: MsgCodec = task.context[_ctxvar_MsgCodec]
|
||||||
|
if expect_codec is None:
|
||||||
|
assert enter_value is None
|
||||||
|
return
|
||||||
|
|
||||||
|
# NOTE: currently we use this!
|
||||||
|
# RunVar
|
||||||
|
curr_codec: MsgCodec = current_codec()
|
||||||
|
last_read_codec = _ctxvar_MsgCodec.get()
|
||||||
|
# assert curr_codec is last_read_codec
|
||||||
|
|
||||||
|
assert (
|
||||||
|
(same_codec := expect_codec) is
|
||||||
|
# returned from `mk_codec()`
|
||||||
|
|
||||||
|
# yielded value from `apply_codec()`
|
||||||
|
|
||||||
|
# read from current task's `contextvars.Context`
|
||||||
|
curr_codec is
|
||||||
|
last_read_codec
|
||||||
|
|
||||||
|
# the default `msgspec` settings
|
||||||
|
is not _codec._def_msgspec_codec
|
||||||
|
is not _codec._def_tractor_codec
|
||||||
|
)
|
||||||
|
|
||||||
|
if enter_value:
|
||||||
|
assert enter_value is same_codec
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def send_back_values(
|
||||||
|
ctx: Context,
|
||||||
|
rent_pld_spec_type_strs: list[str],
|
||||||
|
add_hooks: bool,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Setup up a custom codec to load instances of `NamespacePath`
|
||||||
|
and ensure we can round trip a func ref with our parent.
|
||||||
|
|
||||||
|
'''
|
||||||
|
uid: tuple = tractor.current_actor().uid
|
||||||
|
|
||||||
|
# init state in sub-actor should be default
|
||||||
|
chk_codec_applied(
|
||||||
|
expect_codec=_codec._def_tractor_codec,
|
||||||
|
)
|
||||||
|
|
||||||
|
# load pld spec from input str
|
||||||
|
rent_pld_spec = _exts.dec_type_union(
|
||||||
|
rent_pld_spec_type_strs,
|
||||||
|
mods=[
|
||||||
|
importlib.import_module(__name__),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
rent_pld_spec_types: set[Type] = _codec.unpack_spec_types(
|
||||||
|
rent_pld_spec,
|
||||||
|
)
|
||||||
|
|
||||||
|
# ONLY add ext-hooks if the rent specified a non-std type!
|
||||||
|
add_hooks: bool = (
|
||||||
|
NamespacePath in rent_pld_spec_types
|
||||||
|
and
|
||||||
|
add_hooks
|
||||||
|
)
|
||||||
|
|
||||||
|
# same as on parent side config.
|
||||||
|
nsp_codec: MsgCodec|None = None
|
||||||
|
if add_hooks:
|
||||||
|
nsp_codec = mk_codec(
|
||||||
|
enc_hook=enc_nsp,
|
||||||
|
ext_types=[NamespacePath],
|
||||||
|
)
|
||||||
|
|
||||||
|
with (
|
||||||
|
maybe_apply_codec(nsp_codec) as codec,
|
||||||
|
limit_plds(
|
||||||
|
rent_pld_spec,
|
||||||
|
dec_hook=dec_nsp if add_hooks else None,
|
||||||
|
ext_types=[NamespacePath] if add_hooks else None,
|
||||||
|
) as pld_dec,
|
||||||
|
):
|
||||||
|
# ?XXX? SHOULD WE NOT be swapping the global codec since it
|
||||||
|
# breaks `Context.started()` roundtripping checks??
|
||||||
|
chk_codec_applied(
|
||||||
|
expect_codec=nsp_codec,
|
||||||
|
enter_value=codec,
|
||||||
|
)
|
||||||
|
|
||||||
|
# ?TODO, mismatch case(s)?
|
||||||
|
#
|
||||||
|
# ensure pld spec matches on both sides
|
||||||
|
ctx_pld_dec: MsgDec = ctx._pld_rx._pld_dec
|
||||||
|
assert pld_dec is ctx_pld_dec
|
||||||
|
child_pld_spec: Type = pld_dec.spec
|
||||||
|
child_pld_spec_types: set[Type] = _codec.unpack_spec_types(
|
||||||
|
child_pld_spec,
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
child_pld_spec_types.issuperset(
|
||||||
|
rent_pld_spec_types
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# ?TODO, try loop for each of the types in pld-superset?
|
||||||
|
#
|
||||||
|
# for send_value in [
|
||||||
|
# nsp,
|
||||||
|
# str(nsp),
|
||||||
|
# None,
|
||||||
|
# ]:
|
||||||
|
nsp = NamespacePath.from_ref(ex_func)
|
||||||
|
try:
|
||||||
|
print(
|
||||||
|
f'{uid}: attempting to `.started({nsp})`\n'
|
||||||
|
f'\n'
|
||||||
|
f'rent_pld_spec: {rent_pld_spec}\n'
|
||||||
|
f'child_pld_spec: {child_pld_spec}\n'
|
||||||
|
f'codec: {codec}\n'
|
||||||
|
)
|
||||||
|
# await tractor.pause()
|
||||||
|
await ctx.started(nsp)
|
||||||
|
|
||||||
|
except tractor.MsgTypeError as _mte:
|
||||||
|
mte = _mte
|
||||||
|
|
||||||
|
# false -ve case
|
||||||
|
if add_hooks:
|
||||||
|
raise RuntimeError(
|
||||||
|
f'EXPECTED to `.started()` value given spec ??\n\n'
|
||||||
|
f'child_pld_spec -> {child_pld_spec}\n'
|
||||||
|
f'value = {nsp}: {type(nsp)}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
# true -ve case
|
||||||
|
raise mte
|
||||||
|
|
||||||
|
# TODO: maybe we should add our own wrapper error so as to
|
||||||
|
# be interchange-lib agnostic?
|
||||||
|
# -[ ] the error type is wtv is raised from the hook so we
|
||||||
|
# could also require a type-class of errors for
|
||||||
|
# indicating whether the hook-failure can be handled by
|
||||||
|
# a nasty-dialog-unprot sub-sys?
|
||||||
|
except TypeError as typerr:
|
||||||
|
# false -ve
|
||||||
|
if add_hooks:
|
||||||
|
raise RuntimeError('Should have been able to send `nsp`??')
|
||||||
|
|
||||||
|
# true -ve
|
||||||
|
print('Failed to send `nsp` due to no ext hooks set!')
|
||||||
|
raise typerr
|
||||||
|
|
||||||
|
# now try sending a set of valid and invalid plds to ensure
|
||||||
|
# the pld spec is respected.
|
||||||
|
sent: list[Any] = []
|
||||||
|
async with ctx.open_stream() as ipc:
|
||||||
|
print(
|
||||||
|
f'{uid}: streaming all pld types to rent..'
|
||||||
|
)
|
||||||
|
|
||||||
|
# for send_value, expect_send in iter_send_val_items:
|
||||||
|
for send_value in [
|
||||||
|
nsp,
|
||||||
|
str(nsp),
|
||||||
|
None,
|
||||||
|
]:
|
||||||
|
send_type: Type = type(send_value)
|
||||||
|
print(
|
||||||
|
f'{uid}: SENDING NEXT pld\n'
|
||||||
|
f'send_type: {send_type}\n'
|
||||||
|
f'send_value: {send_value}\n'
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
await ipc.send(send_value)
|
||||||
|
sent.append(send_value)
|
||||||
|
|
||||||
|
except ValidationError as valerr:
|
||||||
|
print(f'{uid} FAILED TO SEND {send_value}!')
|
||||||
|
|
||||||
|
# false -ve
|
||||||
|
if add_hooks:
|
||||||
|
raise RuntimeError(
|
||||||
|
f'EXPECTED to roundtrip value given spec:\n'
|
||||||
|
f'rent_pld_spec -> {rent_pld_spec}\n'
|
||||||
|
f'child_pld_spec -> {child_pld_spec}\n'
|
||||||
|
f'value = {send_value}: {send_type}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
# true -ve
|
||||||
|
raise valerr
|
||||||
|
# continue
|
||||||
|
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f'{uid}: finished sending all values\n'
|
||||||
|
'Should be exiting stream block!\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
print(f'{uid}: exited streaming block!')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@cm
|
||||||
|
def maybe_apply_codec(codec: MsgCodec|None) -> MsgCodec|None:
|
||||||
|
if codec is None:
|
||||||
|
yield None
|
||||||
|
return
|
||||||
|
|
||||||
|
with apply_codec(codec) as codec:
|
||||||
|
yield codec
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'pld_spec',
|
||||||
|
[
|
||||||
|
Any,
|
||||||
|
NamespacePath,
|
||||||
|
NamespacePath|None, # the "maybe" spec Bo
|
||||||
|
],
|
||||||
|
ids=[
|
||||||
|
'any_type',
|
||||||
|
'only_nsp_ext',
|
||||||
|
'maybe_nsp_ext',
|
||||||
|
]
|
||||||
|
)
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'add_hooks',
|
||||||
|
[
|
||||||
|
True,
|
||||||
|
False,
|
||||||
|
],
|
||||||
|
ids=[
|
||||||
|
'use_codec_hooks',
|
||||||
|
'no_codec_hooks',
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_ext_types_over_ipc(
|
||||||
|
debug_mode: bool,
|
||||||
|
pld_spec: Union[Type],
|
||||||
|
add_hooks: bool,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Ensure we can support extension types coverted using
|
||||||
|
`enc/dec_hook()`s passed to the `.msg.limit_plds()` API
|
||||||
|
and that sane errors happen when we try do the same without
|
||||||
|
the codec hooks.
|
||||||
|
|
||||||
|
'''
|
||||||
|
pld_types: set[Type] = _codec.unpack_spec_types(pld_spec)
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
|
||||||
|
# sanity check the default pld-spec beforehand
|
||||||
|
chk_codec_applied(
|
||||||
|
expect_codec=_codec._def_tractor_codec,
|
||||||
|
)
|
||||||
|
|
||||||
|
# extension type we want to send as msg payload
|
||||||
|
nsp = NamespacePath.from_ref(ex_func)
|
||||||
|
|
||||||
|
# ^NOTE, 2 cases:
|
||||||
|
# - codec hooks noto added -> decode nsp as `str`
|
||||||
|
# - codec with hooks -> decode nsp as `NamespacePath`
|
||||||
|
nsp_codec: MsgCodec|None = None
|
||||||
|
if (
|
||||||
|
NamespacePath in pld_types
|
||||||
|
and
|
||||||
|
add_hooks
|
||||||
|
):
|
||||||
|
nsp_codec = mk_codec(
|
||||||
|
enc_hook=enc_nsp,
|
||||||
|
ext_types=[NamespacePath],
|
||||||
|
)
|
||||||
|
|
||||||
|
async with tractor.open_nursery(
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
) as an:
|
||||||
|
p: tractor.Portal = await an.start_actor(
|
||||||
|
'sub',
|
||||||
|
enable_modules=[__name__],
|
||||||
|
)
|
||||||
|
with (
|
||||||
|
maybe_apply_codec(nsp_codec) as codec,
|
||||||
|
):
|
||||||
|
chk_codec_applied(
|
||||||
|
expect_codec=nsp_codec,
|
||||||
|
enter_value=codec,
|
||||||
|
)
|
||||||
|
rent_pld_spec_type_strs: list[str] = _exts.enc_type_union(pld_spec)
|
||||||
|
|
||||||
|
# XXX should raise an mte (`MsgTypeError`)
|
||||||
|
# when `add_hooks == False` bc the input
|
||||||
|
# `expect_ipc_send` kwarg has a nsp which can't be
|
||||||
|
# serialized!
|
||||||
|
#
|
||||||
|
# TODO:can we ensure this happens from the
|
||||||
|
# `Return`-side (aka the sub) as well?
|
||||||
|
try:
|
||||||
|
ctx: tractor.Context
|
||||||
|
ipc: tractor.MsgStream
|
||||||
|
async with (
|
||||||
|
|
||||||
|
# XXX should raise an mte (`MsgTypeError`)
|
||||||
|
# when `add_hooks == False`..
|
||||||
|
p.open_context(
|
||||||
|
send_back_values,
|
||||||
|
# expect_debug=debug_mode,
|
||||||
|
rent_pld_spec_type_strs=rent_pld_spec_type_strs,
|
||||||
|
add_hooks=add_hooks,
|
||||||
|
# expect_ipc_send=expect_ipc_send,
|
||||||
|
) as (ctx, first),
|
||||||
|
|
||||||
|
ctx.open_stream() as ipc,
|
||||||
|
):
|
||||||
|
with (
|
||||||
|
limit_plds(
|
||||||
|
pld_spec,
|
||||||
|
dec_hook=dec_nsp if add_hooks else None,
|
||||||
|
ext_types=[NamespacePath] if add_hooks else None,
|
||||||
|
) as pld_dec,
|
||||||
|
):
|
||||||
|
ctx_pld_dec: MsgDec = ctx._pld_rx._pld_dec
|
||||||
|
assert pld_dec is ctx_pld_dec
|
||||||
|
|
||||||
|
# if (
|
||||||
|
# not add_hooks
|
||||||
|
# and
|
||||||
|
# NamespacePath in
|
||||||
|
# ):
|
||||||
|
# pytest.fail('ctx should fail to open without custom enc_hook!?')
|
||||||
|
|
||||||
|
await ipc.send(nsp)
|
||||||
|
nsp_rt = await ipc.receive()
|
||||||
|
|
||||||
|
assert nsp_rt == nsp
|
||||||
|
assert nsp_rt.load_ref() is ex_func
|
||||||
|
|
||||||
|
# this test passes bc we can go no further!
|
||||||
|
except MsgTypeError as mte:
|
||||||
|
# if not add_hooks:
|
||||||
|
# # teardown nursery
|
||||||
|
# await p.cancel_actor()
|
||||||
|
# return
|
||||||
|
|
||||||
|
raise mte
|
||||||
|
|
||||||
|
await p.cancel_actor()
|
||||||
|
|
||||||
|
if (
|
||||||
|
NamespacePath in pld_types
|
||||||
|
and
|
||||||
|
add_hooks
|
||||||
|
):
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
else:
|
||||||
|
with pytest.raises(
|
||||||
|
expected_exception=tractor.RemoteActorError,
|
||||||
|
) as excinfo:
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
exc = excinfo.value
|
||||||
|
# bc `.started(nsp: NamespacePath)` will raise
|
||||||
|
assert exc.boxed_type is TypeError
|
||||||
|
|
||||||
|
|
||||||
|
# def chk_pld_type(
|
||||||
|
# payload_spec: Type[Struct]|Any,
|
||||||
|
# pld: Any,
|
||||||
|
|
||||||
|
# expect_roundtrip: bool|None = None,
|
||||||
|
|
||||||
|
# ) -> bool:
|
||||||
|
|
||||||
|
# pld_val_type: Type = type(pld)
|
||||||
|
|
||||||
|
# # TODO: verify that the overridden subtypes
|
||||||
|
# # DO NOT have modified type-annots from original!
|
||||||
|
# # 'Start', .pld: FuncSpec
|
||||||
|
# # 'StartAck', .pld: IpcCtxSpec
|
||||||
|
# # 'Stop', .pld: UNSEt
|
||||||
|
# # 'Error', .pld: ErrorData
|
||||||
|
|
||||||
|
# codec: MsgCodec = mk_codec(
|
||||||
|
# # NOTE: this ONLY accepts `PayloadMsg.pld` fields of a specified
|
||||||
|
# # type union.
|
||||||
|
# ipc_pld_spec=payload_spec,
|
||||||
|
# )
|
||||||
|
|
||||||
|
# # make a one-off dec to compare with our `MsgCodec` instance
|
||||||
|
# # which does the below `mk_msg_spec()` call internally
|
||||||
|
# ipc_msg_spec: Union[Type[Struct]]
|
||||||
|
# msg_types: list[PayloadMsg[payload_spec]]
|
||||||
|
# (
|
||||||
|
# ipc_msg_spec,
|
||||||
|
# msg_types,
|
||||||
|
# ) = mk_msg_spec(
|
||||||
|
# payload_type_union=payload_spec,
|
||||||
|
# )
|
||||||
|
# _enc = msgpack.Encoder()
|
||||||
|
# _dec = msgpack.Decoder(
|
||||||
|
# type=ipc_msg_spec or Any, # like `PayloadMsg[Any]`
|
||||||
|
# )
|
||||||
|
|
||||||
|
# assert (
|
||||||
|
# payload_spec
|
||||||
|
# ==
|
||||||
|
# codec.pld_spec
|
||||||
|
# )
|
||||||
|
|
||||||
|
# # assert codec.dec == dec
|
||||||
|
# #
|
||||||
|
# # ^-XXX-^ not sure why these aren't "equal" but when cast
|
||||||
|
# # to `str` they seem to match ?? .. kk
|
||||||
|
|
||||||
|
# assert (
|
||||||
|
# str(ipc_msg_spec)
|
||||||
|
# ==
|
||||||
|
# str(codec.msg_spec)
|
||||||
|
# ==
|
||||||
|
# str(_dec.type)
|
||||||
|
# ==
|
||||||
|
# str(codec.dec.type)
|
||||||
|
# )
|
||||||
|
|
||||||
|
# # verify the boxed-type for all variable payload-type msgs.
|
||||||
|
# if not msg_types:
|
||||||
|
# breakpoint()
|
||||||
|
|
||||||
|
# roundtrip: bool|None = None
|
||||||
|
# pld_spec_msg_names: list[str] = [
|
||||||
|
# td.__name__ for td in _payload_msgs
|
||||||
|
# ]
|
||||||
|
# for typedef in msg_types:
|
||||||
|
|
||||||
|
# skip_runtime_msg: bool = typedef.__name__ not in pld_spec_msg_names
|
||||||
|
# if skip_runtime_msg:
|
||||||
|
# continue
|
||||||
|
|
||||||
|
# pld_field = structs.fields(typedef)[1]
|
||||||
|
# assert pld_field.type is payload_spec # TODO-^ does this need to work to get all subtypes to adhere?
|
||||||
|
|
||||||
|
# kwargs: dict[str, Any] = {
|
||||||
|
# 'cid': '666',
|
||||||
|
# 'pld': pld,
|
||||||
|
# }
|
||||||
|
# enc_msg: PayloadMsg = typedef(**kwargs)
|
||||||
|
|
||||||
|
# _wire_bytes: bytes = _enc.encode(enc_msg)
|
||||||
|
# wire_bytes: bytes = codec.enc.encode(enc_msg)
|
||||||
|
# assert _wire_bytes == wire_bytes
|
||||||
|
|
||||||
|
# ve: ValidationError|None = None
|
||||||
|
# try:
|
||||||
|
# dec_msg = codec.dec.decode(wire_bytes)
|
||||||
|
# _dec_msg = _dec.decode(wire_bytes)
|
||||||
|
|
||||||
|
# # decoded msg and thus payload should be exactly same!
|
||||||
|
# assert (roundtrip := (
|
||||||
|
# _dec_msg
|
||||||
|
# ==
|
||||||
|
# dec_msg
|
||||||
|
# ==
|
||||||
|
# enc_msg
|
||||||
|
# ))
|
||||||
|
|
||||||
|
# if (
|
||||||
|
# expect_roundtrip is not None
|
||||||
|
# and expect_roundtrip != roundtrip
|
||||||
|
# ):
|
||||||
|
# breakpoint()
|
||||||
|
|
||||||
|
# assert (
|
||||||
|
# pld
|
||||||
|
# ==
|
||||||
|
# dec_msg.pld
|
||||||
|
# ==
|
||||||
|
# enc_msg.pld
|
||||||
|
# )
|
||||||
|
# # assert (roundtrip := (_dec_msg == enc_msg))
|
||||||
|
|
||||||
|
# except ValidationError as _ve:
|
||||||
|
# ve = _ve
|
||||||
|
# roundtrip: bool = False
|
||||||
|
# if pld_val_type is payload_spec:
|
||||||
|
# raise ValueError(
|
||||||
|
# 'Got `ValidationError` despite type-var match!?\n'
|
||||||
|
# f'pld_val_type: {pld_val_type}\n'
|
||||||
|
# f'payload_type: {payload_spec}\n'
|
||||||
|
# ) from ve
|
||||||
|
|
||||||
|
# else:
|
||||||
|
# # ow we good cuz the pld spec mismatched.
|
||||||
|
# print(
|
||||||
|
# 'Got expected `ValidationError` since,\n'
|
||||||
|
# f'{pld_val_type} is not {payload_spec}\n'
|
||||||
|
# )
|
||||||
|
# else:
|
||||||
|
# if (
|
||||||
|
# payload_spec is not Any
|
||||||
|
# and
|
||||||
|
# pld_val_type is not payload_spec
|
||||||
|
# ):
|
||||||
|
# raise ValueError(
|
||||||
|
# 'DID NOT `ValidationError` despite expected type match!?\n'
|
||||||
|
# f'pld_val_type: {pld_val_type}\n'
|
||||||
|
# f'payload_type: {payload_spec}\n'
|
||||||
|
# )
|
||||||
|
|
||||||
|
# # full code decode should always be attempted!
|
||||||
|
# if roundtrip is None:
|
||||||
|
# breakpoint()
|
||||||
|
|
||||||
|
# return roundtrip
|
||||||
|
|
||||||
|
|
||||||
|
# ?TODO? maybe remove since covered in the newer `test_pldrx_limiting`
|
||||||
|
# via end-2-end testing of all this?
|
||||||
|
# -[ ] IOW do we really NEED this lowlevel unit testing?
|
||||||
|
#
|
||||||
|
# def test_limit_msgspec(
|
||||||
|
# debug_mode: bool,
|
||||||
|
# ):
|
||||||
|
# '''
|
||||||
|
# Internals unit testing to verify that type-limiting an IPC ctx's
|
||||||
|
# msg spec with `Pldrx.limit_plds()` results in various
|
||||||
|
# encapsulated `msgspec` object settings and state.
|
||||||
|
|
||||||
|
# '''
|
||||||
|
# async def main():
|
||||||
|
# async with tractor.open_root_actor(
|
||||||
|
# debug_mode=debug_mode,
|
||||||
|
# ):
|
||||||
|
# # ensure we can round-trip a boxing `PayloadMsg`
|
||||||
|
# assert chk_pld_type(
|
||||||
|
# payload_spec=Any,
|
||||||
|
# pld=None,
|
||||||
|
# expect_roundtrip=True,
|
||||||
|
# )
|
||||||
|
|
||||||
|
# # verify that a mis-typed payload value won't decode
|
||||||
|
# assert not chk_pld_type(
|
||||||
|
# payload_spec=int,
|
||||||
|
# pld='doggy',
|
||||||
|
# )
|
||||||
|
|
||||||
|
# # parametrize the boxed `.pld` type as a custom-struct
|
||||||
|
# # and ensure that parametrization propagates
|
||||||
|
# # to all payload-msg-spec-able subtypes!
|
||||||
|
# class CustomPayload(Struct):
|
||||||
|
# name: str
|
||||||
|
# value: Any
|
||||||
|
|
||||||
|
# assert not chk_pld_type(
|
||||||
|
# payload_spec=CustomPayload,
|
||||||
|
# pld='doggy',
|
||||||
|
# )
|
||||||
|
|
||||||
|
# assert chk_pld_type(
|
||||||
|
# payload_spec=CustomPayload,
|
||||||
|
# pld=CustomPayload(name='doggy', value='urmom')
|
||||||
|
# )
|
||||||
|
|
||||||
|
# # yah, we can `.pause_from_sync()` now!
|
||||||
|
# # breakpoint()
|
||||||
|
|
||||||
|
# trio.run(main)
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
|
@ -9,7 +9,7 @@ import trio
|
||||||
import tractor
|
import tractor
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from conftest import tractor_test
|
from tractor._testing import tractor_test
|
||||||
|
|
||||||
|
|
||||||
def test_must_define_ctx():
|
def test_must_define_ctx():
|
||||||
|
|
@ -38,10 +38,13 @@ async def async_gen_stream(sequence):
|
||||||
assert cs.cancelled_caught
|
assert cs.cancelled_caught
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: deprecated either remove entirely
|
||||||
|
# or re-impl in terms of `MsgStream` one-sides
|
||||||
|
# wrapper, but at least remove `Portal.open_stream_from()`
|
||||||
@tractor.stream
|
@tractor.stream
|
||||||
async def context_stream(
|
async def context_stream(
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
sequence
|
sequence: list[int],
|
||||||
):
|
):
|
||||||
for i in sequence:
|
for i in sequence:
|
||||||
await ctx.send_yield(i)
|
await ctx.send_yield(i)
|
||||||
|
|
@ -55,7 +58,7 @@ async def context_stream(
|
||||||
|
|
||||||
|
|
||||||
async def stream_from_single_subactor(
|
async def stream_from_single_subactor(
|
||||||
arb_addr,
|
reg_addr,
|
||||||
start_method,
|
start_method,
|
||||||
stream_func,
|
stream_func,
|
||||||
):
|
):
|
||||||
|
|
@ -64,7 +67,7 @@ async def stream_from_single_subactor(
|
||||||
# only one per host address, spawns an actor if None
|
# only one per host address, spawns an actor if None
|
||||||
|
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
arbiter_addr=arb_addr,
|
registry_addrs=[reg_addr],
|
||||||
start_method=start_method,
|
start_method=start_method,
|
||||||
) as nursery:
|
) as nursery:
|
||||||
|
|
||||||
|
|
@ -115,13 +118,13 @@ async def stream_from_single_subactor(
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
'stream_func', [async_gen_stream, context_stream]
|
'stream_func', [async_gen_stream, context_stream]
|
||||||
)
|
)
|
||||||
def test_stream_from_single_subactor(arb_addr, start_method, stream_func):
|
def test_stream_from_single_subactor(reg_addr, start_method, stream_func):
|
||||||
"""Verify streaming from a spawned async generator.
|
"""Verify streaming from a spawned async generator.
|
||||||
"""
|
"""
|
||||||
trio.run(
|
trio.run(
|
||||||
partial(
|
partial(
|
||||||
stream_from_single_subactor,
|
stream_from_single_subactor,
|
||||||
arb_addr,
|
reg_addr,
|
||||||
start_method,
|
start_method,
|
||||||
stream_func=stream_func,
|
stream_func=stream_func,
|
||||||
),
|
),
|
||||||
|
|
@ -225,33 +228,51 @@ async def a_quadruple_example():
|
||||||
return result_stream
|
return result_stream
|
||||||
|
|
||||||
|
|
||||||
async def cancel_after(wait, arb_addr):
|
async def cancel_after(wait, reg_addr):
|
||||||
async with tractor.open_root_actor(arbiter_addr=arb_addr):
|
async with tractor.open_root_actor(registry_addrs=[reg_addr]):
|
||||||
with trio.move_on_after(wait):
|
with trio.move_on_after(wait):
|
||||||
return await a_quadruple_example()
|
return await a_quadruple_example()
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='module')
|
@pytest.fixture(scope='module')
|
||||||
def time_quad_ex(arb_addr, ci_env, spawn_backend):
|
def time_quad_ex(
|
||||||
|
reg_addr: tuple,
|
||||||
|
ci_env: bool,
|
||||||
|
spawn_backend: str,
|
||||||
|
):
|
||||||
if spawn_backend == 'mp':
|
if spawn_backend == 'mp':
|
||||||
"""no idea but the mp *nix runs are flaking out here often...
|
'''
|
||||||
"""
|
no idea but the mp *nix runs are flaking out here often...
|
||||||
|
|
||||||
|
'''
|
||||||
pytest.skip("Test is too flaky on mp in CI")
|
pytest.skip("Test is too flaky on mp in CI")
|
||||||
|
|
||||||
timeout = 7 if platform.system() in ('Windows', 'Darwin') else 4
|
timeout = 7 if platform.system() in ('Windows', 'Darwin') else 4
|
||||||
start = time.time()
|
start = time.time()
|
||||||
results = trio.run(cancel_after, timeout, arb_addr)
|
results = trio.run(cancel_after, timeout, reg_addr)
|
||||||
diff = time.time() - start
|
diff = time.time() - start
|
||||||
assert results
|
assert results
|
||||||
return results, diff
|
return results, diff
|
||||||
|
|
||||||
|
|
||||||
def test_a_quadruple_example(time_quad_ex, ci_env, spawn_backend):
|
def test_a_quadruple_example(
|
||||||
"""This also serves as a kind of "we'd like to be this fast test"."""
|
time_quad_ex: tuple,
|
||||||
|
ci_env: bool,
|
||||||
|
spawn_backend: str,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
This also serves as a kind of "we'd like to be this fast test".
|
||||||
|
|
||||||
|
'''
|
||||||
results, diff = time_quad_ex
|
results, diff = time_quad_ex
|
||||||
assert results
|
assert results
|
||||||
this_fast = 6 if platform.system() in ('Windows', 'Darwin') else 2.666
|
this_fast = (
|
||||||
|
6 if platform.system() in (
|
||||||
|
'Windows',
|
||||||
|
'Darwin',
|
||||||
|
)
|
||||||
|
else 3
|
||||||
|
)
|
||||||
assert diff < this_fast
|
assert diff < this_fast
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -260,14 +281,14 @@ def test_a_quadruple_example(time_quad_ex, ci_env, spawn_backend):
|
||||||
list(map(lambda i: i/10, range(3, 9)))
|
list(map(lambda i: i/10, range(3, 9)))
|
||||||
)
|
)
|
||||||
def test_not_fast_enough_quad(
|
def test_not_fast_enough_quad(
|
||||||
arb_addr, time_quad_ex, cancel_delay, ci_env, spawn_backend
|
reg_addr, time_quad_ex, cancel_delay, ci_env, spawn_backend
|
||||||
):
|
):
|
||||||
"""Verify we can cancel midway through the quad example and all actors
|
"""Verify we can cancel midway through the quad example and all actors
|
||||||
cancel gracefully.
|
cancel gracefully.
|
||||||
"""
|
"""
|
||||||
results, diff = time_quad_ex
|
results, diff = time_quad_ex
|
||||||
delay = max(diff - cancel_delay, 0)
|
delay = max(diff - cancel_delay, 0)
|
||||||
results = trio.run(cancel_after, delay, arb_addr)
|
results = trio.run(cancel_after, delay, reg_addr)
|
||||||
system = platform.system()
|
system = platform.system()
|
||||||
if system in ('Windows', 'Darwin') and results is not None:
|
if system in ('Windows', 'Darwin') and results is not None:
|
||||||
# In CI envoirments it seems later runs are quicker then the first
|
# In CI envoirments it seems later runs are quicker then the first
|
||||||
|
|
@ -280,7 +301,7 @@ def test_not_fast_enough_quad(
|
||||||
|
|
||||||
@tractor_test
|
@tractor_test
|
||||||
async def test_respawn_consumer_task(
|
async def test_respawn_consumer_task(
|
||||||
arb_addr,
|
reg_addr,
|
||||||
spawn_backend,
|
spawn_backend,
|
||||||
loglevel,
|
loglevel,
|
||||||
):
|
):
|
||||||
|
|
@ -7,7 +7,7 @@ import pytest
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
from conftest import tractor_test
|
from tractor._testing import tractor_test
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.trio
|
@pytest.mark.trio
|
||||||
|
|
@ -24,7 +24,7 @@ async def test_no_runtime():
|
||||||
|
|
||||||
|
|
||||||
@tractor_test
|
@tractor_test
|
||||||
async def test_self_is_registered(arb_addr):
|
async def test_self_is_registered(reg_addr):
|
||||||
"Verify waiting on the arbiter to register itself using the standard api."
|
"Verify waiting on the arbiter to register itself using the standard api."
|
||||||
actor = tractor.current_actor()
|
actor = tractor.current_actor()
|
||||||
assert actor.is_arbiter
|
assert actor.is_arbiter
|
||||||
|
|
@ -34,20 +34,20 @@ async def test_self_is_registered(arb_addr):
|
||||||
|
|
||||||
|
|
||||||
@tractor_test
|
@tractor_test
|
||||||
async def test_self_is_registered_localportal(arb_addr):
|
async def test_self_is_registered_localportal(reg_addr):
|
||||||
"Verify waiting on the arbiter to register itself using a local portal."
|
"Verify waiting on the arbiter to register itself using a local portal."
|
||||||
actor = tractor.current_actor()
|
actor = tractor.current_actor()
|
||||||
assert actor.is_arbiter
|
assert actor.is_arbiter
|
||||||
async with tractor.get_arbiter(*arb_addr) as portal:
|
async with tractor.get_registry(reg_addr) as portal:
|
||||||
assert isinstance(portal, tractor._portal.LocalPortal)
|
assert isinstance(portal, tractor._portal.LocalPortal)
|
||||||
|
|
||||||
with trio.fail_after(0.2):
|
with trio.fail_after(0.2):
|
||||||
sockaddr = await portal.run_from_ns(
|
sockaddr = await portal.run_from_ns(
|
||||||
'self', 'wait_for_actor', name='root')
|
'self', 'wait_for_actor', name='root')
|
||||||
assert sockaddr[0] == arb_addr
|
assert sockaddr[0] == reg_addr
|
||||||
|
|
||||||
|
|
||||||
def test_local_actor_async_func(arb_addr):
|
def test_local_actor_async_func(reg_addr):
|
||||||
"""Verify a simple async function in-process.
|
"""Verify a simple async function in-process.
|
||||||
"""
|
"""
|
||||||
nums = []
|
nums = []
|
||||||
|
|
@ -55,7 +55,7 @@ def test_local_actor_async_func(arb_addr):
|
||||||
async def print_loop():
|
async def print_loop():
|
||||||
|
|
||||||
async with tractor.open_root_actor(
|
async with tractor.open_root_actor(
|
||||||
arbiter_addr=arb_addr,
|
registry_addrs=[reg_addr],
|
||||||
):
|
):
|
||||||
# arbiter is started in-proc if dne
|
# arbiter is started in-proc if dne
|
||||||
assert tractor.current_actor().is_arbiter
|
assert tractor.current_actor().is_arbiter
|
||||||
|
|
|
||||||
|
|
@ -7,8 +7,10 @@ import time
|
||||||
import pytest
|
import pytest
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
from conftest import (
|
from tractor._testing import (
|
||||||
tractor_test,
|
tractor_test,
|
||||||
|
)
|
||||||
|
from .conftest import (
|
||||||
sig_prog,
|
sig_prog,
|
||||||
_INT_SIGNAL,
|
_INT_SIGNAL,
|
||||||
_INT_RETURN_CODE,
|
_INT_RETURN_CODE,
|
||||||
|
|
@ -28,9 +30,9 @@ def test_abort_on_sigint(daemon):
|
||||||
|
|
||||||
|
|
||||||
@tractor_test
|
@tractor_test
|
||||||
async def test_cancel_remote_arbiter(daemon, arb_addr):
|
async def test_cancel_remote_arbiter(daemon, reg_addr):
|
||||||
assert not tractor.current_actor().is_arbiter
|
assert not tractor.current_actor().is_arbiter
|
||||||
async with tractor.get_arbiter(*arb_addr) as portal:
|
async with tractor.get_registry(reg_addr) as portal:
|
||||||
await portal.cancel_actor()
|
await portal.cancel_actor()
|
||||||
|
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
|
|
@ -39,16 +41,16 @@ async def test_cancel_remote_arbiter(daemon, arb_addr):
|
||||||
|
|
||||||
# no arbiter socket should exist
|
# no arbiter socket should exist
|
||||||
with pytest.raises(OSError):
|
with pytest.raises(OSError):
|
||||||
async with tractor.get_arbiter(*arb_addr) as portal:
|
async with tractor.get_registry(reg_addr) as portal:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def test_register_duplicate_name(daemon, arb_addr):
|
def test_register_duplicate_name(daemon, reg_addr):
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
|
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
arbiter_addr=arb_addr,
|
registry_addrs=[reg_addr],
|
||||||
) as n:
|
) as n:
|
||||||
|
|
||||||
assert not tractor.current_actor().is_arbiter
|
assert not tractor.current_actor().is_arbiter
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,239 @@
|
||||||
|
'''
|
||||||
|
Define the details of inter-actor "out-of-band" (OoB) cancel
|
||||||
|
semantics, that is how cancellation works when a cancel request comes
|
||||||
|
from the different concurrency (primitive's) "layer" then where the
|
||||||
|
eventual `trio.Task` actually raises a signal.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from functools import partial
|
||||||
|
# from contextlib import asynccontextmanager as acm
|
||||||
|
# import itertools
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import trio
|
||||||
|
import tractor
|
||||||
|
from tractor import ( # typing
|
||||||
|
ActorNursery,
|
||||||
|
Portal,
|
||||||
|
Context,
|
||||||
|
# ContextCancelled,
|
||||||
|
# RemoteActorError,
|
||||||
|
)
|
||||||
|
# from tractor._testing import (
|
||||||
|
# tractor_test,
|
||||||
|
# expect_ctxc,
|
||||||
|
# )
|
||||||
|
|
||||||
|
# XXX TODO cases:
|
||||||
|
# - [ ] peer cancelled itself - so other peers should
|
||||||
|
# get errors reflecting that the peer was itself the .canceller?
|
||||||
|
|
||||||
|
# def test_self_cancel():
|
||||||
|
# '''
|
||||||
|
# 2 cases:
|
||||||
|
# - calls `Actor.cancel()` locally in some task
|
||||||
|
# - calls LocalPortal.cancel_actor()` ?
|
||||||
|
#
|
||||||
|
# things to ensure!
|
||||||
|
# -[ ] the ctxc raised in a child should ideally show the tb of the
|
||||||
|
# underlying `Cancelled` checkpoint, i.e.
|
||||||
|
# `raise scope_error from ctxc`?
|
||||||
|
#
|
||||||
|
# -[ ] a self-cancelled context, if not allowed to block on
|
||||||
|
# `ctx.result()` at some point will hang since the `ctx._scope`
|
||||||
|
# is never `.cancel_called`; cases for this include,
|
||||||
|
# - an `open_ctx()` which never starteds before being OoB actor
|
||||||
|
# cancelled.
|
||||||
|
# |_ parent task will be blocked in `.open_context()` for the
|
||||||
|
# `Started` msg, and when the OoB ctxc arrives `ctx._scope`
|
||||||
|
# will never have been signalled..
|
||||||
|
|
||||||
|
# '''
|
||||||
|
# ...
|
||||||
|
|
||||||
|
# TODO, sanity test against the case in `/examples/trio/lockacquire_not_unmasked.py`
|
||||||
|
# but with the `Lock.acquire()` from a `@context` to ensure the
|
||||||
|
# implicit ignore-case-non-unmasking.
|
||||||
|
#
|
||||||
|
# @tractor.context
|
||||||
|
# async def acquire_actor_global_lock(
|
||||||
|
# ctx: tractor.Context,
|
||||||
|
# ignore_special_cases: bool,
|
||||||
|
# ):
|
||||||
|
|
||||||
|
# async with maybe_unmask_excs(
|
||||||
|
# ignore_special_cases=ignore_special_cases,
|
||||||
|
# ):
|
||||||
|
# await ctx.started('locked')
|
||||||
|
|
||||||
|
# # block til cancelled
|
||||||
|
# await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def sleep_forever(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
# ignore_special_cases: bool,
|
||||||
|
do_started: bool,
|
||||||
|
):
|
||||||
|
|
||||||
|
# async with maybe_unmask_excs(
|
||||||
|
# ignore_special_cases=ignore_special_cases,
|
||||||
|
# ):
|
||||||
|
# await ctx.started('locked')
|
||||||
|
if do_started:
|
||||||
|
await ctx.started()
|
||||||
|
|
||||||
|
# block til cancelled
|
||||||
|
print('sleepin on child-side..')
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'cancel_ctx',
|
||||||
|
[True, False],
|
||||||
|
)
|
||||||
|
def test_cancel_ctx_with_parent_side_entered_in_bg_task(
|
||||||
|
debug_mode: bool,
|
||||||
|
loglevel: str,
|
||||||
|
cancel_ctx: bool,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
The most "basic" out-of-band-task self-cancellation case where
|
||||||
|
`Portal.open_context()` is entered in a bg task and the
|
||||||
|
parent-task (of the containing nursery) calls `Context.cancel()`
|
||||||
|
without the child knowing; the `Context._scope` should be
|
||||||
|
`.cancel_called` when the IPC ctx's child-side relays
|
||||||
|
a `ContextCancelled` with a `.canceller` set to the parent
|
||||||
|
actor('s task).
|
||||||
|
|
||||||
|
'''
|
||||||
|
async def main():
|
||||||
|
with trio.fail_after(
|
||||||
|
2 if not debug_mode else 999,
|
||||||
|
):
|
||||||
|
an: ActorNursery
|
||||||
|
async with (
|
||||||
|
tractor.open_nursery(
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
loglevel='devx',
|
||||||
|
enable_stack_on_sig=True,
|
||||||
|
) as an,
|
||||||
|
trio.open_nursery() as tn,
|
||||||
|
):
|
||||||
|
ptl: Portal = await an.start_actor(
|
||||||
|
'sub',
|
||||||
|
enable_modules=[__name__],
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _open_ctx_async(
|
||||||
|
do_started: bool = True,
|
||||||
|
task_status=trio.TASK_STATUS_IGNORED,
|
||||||
|
):
|
||||||
|
# do we expect to never enter the
|
||||||
|
# `.open_context()` below.
|
||||||
|
if not do_started:
|
||||||
|
task_status.started()
|
||||||
|
|
||||||
|
async with ptl.open_context(
|
||||||
|
sleep_forever,
|
||||||
|
do_started=do_started,
|
||||||
|
) as (ctx, first):
|
||||||
|
task_status.started(ctx)
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
# XXX, this is the key OoB part!
|
||||||
|
#
|
||||||
|
# - start the `.open_context()` in a bg task which
|
||||||
|
# blocks inside the embedded scope-body,
|
||||||
|
#
|
||||||
|
# - when we call `Context.cancel()` it **is
|
||||||
|
# not** from the same task which eventually runs
|
||||||
|
# `.__aexit__()`,
|
||||||
|
#
|
||||||
|
# - since the bg "opener" task will be in
|
||||||
|
# a `trio.sleep_forever()`, it must be interrupted
|
||||||
|
# by the `ContextCancelled` delivered from the
|
||||||
|
# child-side; `Context._scope: CancelScope` MUST
|
||||||
|
# be `.cancel_called`!
|
||||||
|
#
|
||||||
|
print('ASYNC opening IPC context in subtask..')
|
||||||
|
maybe_ctx: Context|None = await tn.start(partial(
|
||||||
|
_open_ctx_async,
|
||||||
|
))
|
||||||
|
|
||||||
|
if (
|
||||||
|
maybe_ctx
|
||||||
|
and
|
||||||
|
cancel_ctx
|
||||||
|
):
|
||||||
|
print('cancelling first IPC ctx!')
|
||||||
|
await maybe_ctx.cancel()
|
||||||
|
|
||||||
|
# XXX, note that despite `maybe_context.cancel()`
|
||||||
|
# being called above, it's the parent (bg) task
|
||||||
|
# which was originally never interrupted in
|
||||||
|
# the `ctx._scope` body due to missing case logic in
|
||||||
|
# `ctx._maybe_cancel_and_set_remote_error()`.
|
||||||
|
#
|
||||||
|
# It didn't matter that the subactor process was
|
||||||
|
# already terminated and reaped, nothing was
|
||||||
|
# cancelling the ctx-parent task's scope!
|
||||||
|
#
|
||||||
|
print('cancelling subactor!')
|
||||||
|
await ptl.cancel_actor()
|
||||||
|
|
||||||
|
if maybe_ctx:
|
||||||
|
try:
|
||||||
|
await maybe_ctx.wait_for_result()
|
||||||
|
except tractor.ContextCancelled as ctxc:
|
||||||
|
assert not cancel_ctx
|
||||||
|
assert (
|
||||||
|
ctxc.canceller
|
||||||
|
==
|
||||||
|
tractor.current_actor().aid.uid
|
||||||
|
)
|
||||||
|
# don't re-raise since it'll trigger
|
||||||
|
# an EG from the above tn.
|
||||||
|
|
||||||
|
if cancel_ctx:
|
||||||
|
# graceful self-cancel
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# ctx parent task should see OoB ctxc due to
|
||||||
|
# `ptl.cancel_actor()`.
|
||||||
|
with pytest.raises(tractor.ContextCancelled) as excinfo:
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
assert 'root' in excinfo.value.canceller[0]
|
||||||
|
|
||||||
|
|
||||||
|
# def test_parent_actor_cancels_subactor_with_gt1_ctxs_open_to_it(
|
||||||
|
# debug_mode: bool,
|
||||||
|
# loglevel: str,
|
||||||
|
# ):
|
||||||
|
# '''
|
||||||
|
# Demos OoB cancellation from the perspective of a ctx opened with
|
||||||
|
# a child subactor where the parent cancels the child at the "actor
|
||||||
|
# layer" using `Portal.cancel_actor()` and thus the
|
||||||
|
# `ContextCancelled.canceller` received by the ctx's parent-side
|
||||||
|
# task will appear to be a "self cancellation" even though that
|
||||||
|
# specific task itself was not cancelled and thus
|
||||||
|
# `Context.cancel_called ==False`.
|
||||||
|
# '''
|
||||||
|
# TODO, do we have an existing implied ctx
|
||||||
|
# cancel test like this?
|
||||||
|
# with trio.move_on_after(0.5):# as cs:
|
||||||
|
# await _open_ctx_async(
|
||||||
|
# do_started=False,
|
||||||
|
# )
|
||||||
|
|
||||||
|
|
||||||
|
# in-line ctx scope should definitely raise
|
||||||
|
# a ctxc with `.canceller = 'root'`
|
||||||
|
# async with ptl.open_context(
|
||||||
|
# sleep_forever,
|
||||||
|
# do_started=True,
|
||||||
|
# ) as pair:
|
||||||
|
|
||||||
|
|
@ -0,0 +1,364 @@
|
||||||
|
'''
|
||||||
|
Audit sub-sys APIs from `.msg._ops`
|
||||||
|
mostly for ensuring correct `contextvars`
|
||||||
|
related settings around IPC contexts.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from contextlib import (
|
||||||
|
asynccontextmanager as acm,
|
||||||
|
)
|
||||||
|
|
||||||
|
from msgspec import (
|
||||||
|
Struct,
|
||||||
|
)
|
||||||
|
import pytest
|
||||||
|
import trio
|
||||||
|
|
||||||
|
import tractor
|
||||||
|
from tractor import (
|
||||||
|
Context,
|
||||||
|
MsgTypeError,
|
||||||
|
current_ipc_ctx,
|
||||||
|
Portal,
|
||||||
|
)
|
||||||
|
from tractor.msg import (
|
||||||
|
_ops as msgops,
|
||||||
|
Return,
|
||||||
|
)
|
||||||
|
from tractor.msg import (
|
||||||
|
_codec,
|
||||||
|
)
|
||||||
|
from tractor.msg.types import (
|
||||||
|
log,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PldMsg(
|
||||||
|
Struct,
|
||||||
|
|
||||||
|
# TODO: with multiple structs in-spec we need to tag them!
|
||||||
|
# -[ ] offer a built-in `PldMsg` type to inherit from which takes
|
||||||
|
# case of these details?
|
||||||
|
#
|
||||||
|
# https://jcristharif.com/msgspec/structs.html#tagged-unions
|
||||||
|
# tag=True,
|
||||||
|
# tag_field='msg_type',
|
||||||
|
):
|
||||||
|
field: str
|
||||||
|
|
||||||
|
|
||||||
|
maybe_msg_spec = PldMsg|None
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def maybe_expect_raises(
|
||||||
|
raises: BaseException|None = None,
|
||||||
|
ensure_in_message: list[str]|None = None,
|
||||||
|
post_mortem: bool = False,
|
||||||
|
timeout: int = 3,
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Async wrapper for ensuring errors propagate from the inner scope.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if tractor._state.debug_mode():
|
||||||
|
timeout += 999
|
||||||
|
|
||||||
|
with trio.fail_after(timeout):
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
except BaseException as _inner_err:
|
||||||
|
inner_err = _inner_err
|
||||||
|
# wasn't-expected to error..
|
||||||
|
if raises is None:
|
||||||
|
raise
|
||||||
|
|
||||||
|
else:
|
||||||
|
assert type(inner_err) is raises
|
||||||
|
|
||||||
|
# maybe check for error txt content
|
||||||
|
if ensure_in_message:
|
||||||
|
part: str
|
||||||
|
err_repr: str = repr(inner_err)
|
||||||
|
for part in ensure_in_message:
|
||||||
|
for i, arg in enumerate(inner_err.args):
|
||||||
|
if part in err_repr:
|
||||||
|
break
|
||||||
|
# if part never matches an arg, then we're
|
||||||
|
# missing a match.
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
'Failed to find error message content?\n\n'
|
||||||
|
f'expected: {ensure_in_message!r}\n'
|
||||||
|
f'part: {part!r}\n\n'
|
||||||
|
f'{inner_err.args}'
|
||||||
|
)
|
||||||
|
|
||||||
|
if post_mortem:
|
||||||
|
await tractor.post_mortem()
|
||||||
|
|
||||||
|
else:
|
||||||
|
if raises:
|
||||||
|
raise RuntimeError(
|
||||||
|
f'Expected a {raises.__name__!r} to be raised?'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context(
|
||||||
|
pld_spec=maybe_msg_spec,
|
||||||
|
)
|
||||||
|
async def child(
|
||||||
|
ctx: Context,
|
||||||
|
started_value: int|PldMsg|None,
|
||||||
|
return_value: str|None,
|
||||||
|
validate_pld_spec: bool,
|
||||||
|
raise_on_started_mte: bool = True,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Call ``Context.started()`` more then once (an error).
|
||||||
|
|
||||||
|
'''
|
||||||
|
expect_started_mte: bool = started_value == 10
|
||||||
|
|
||||||
|
# sanaity check that child RPC context is the current one
|
||||||
|
curr_ctx: Context = current_ipc_ctx()
|
||||||
|
assert ctx is curr_ctx
|
||||||
|
|
||||||
|
rx: msgops.PldRx = ctx._pld_rx
|
||||||
|
curr_pldec: _codec.MsgDec = rx.pld_dec
|
||||||
|
|
||||||
|
ctx_meta: dict = getattr(
|
||||||
|
child,
|
||||||
|
'_tractor_context_meta',
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
if ctx_meta:
|
||||||
|
assert (
|
||||||
|
ctx_meta['pld_spec']
|
||||||
|
is curr_pldec.spec
|
||||||
|
is curr_pldec.pld_spec
|
||||||
|
)
|
||||||
|
|
||||||
|
# 2 cases: hdndle send-side and recv-only validation
|
||||||
|
# - when `raise_on_started_mte == True`, send validate
|
||||||
|
# - else, parent-recv-side only validation
|
||||||
|
mte: MsgTypeError|None = None
|
||||||
|
try:
|
||||||
|
await ctx.started(
|
||||||
|
value=started_value,
|
||||||
|
validate_pld_spec=validate_pld_spec,
|
||||||
|
)
|
||||||
|
|
||||||
|
except MsgTypeError as _mte:
|
||||||
|
mte = _mte
|
||||||
|
log.exception('started()` raised an MTE!\n')
|
||||||
|
if not expect_started_mte:
|
||||||
|
raise RuntimeError(
|
||||||
|
'Child-ctx-task SHOULD NOT HAVE raised an MTE for\n\n'
|
||||||
|
f'{started_value!r}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
boxed_div: str = '------ - ------'
|
||||||
|
assert boxed_div not in mte._message
|
||||||
|
assert boxed_div not in mte.tb_str
|
||||||
|
assert boxed_div not in repr(mte)
|
||||||
|
assert boxed_div not in str(mte)
|
||||||
|
mte_repr: str = repr(mte)
|
||||||
|
for line in mte.message.splitlines():
|
||||||
|
assert line in mte_repr
|
||||||
|
|
||||||
|
# since this is a *local error* there should be no
|
||||||
|
# boxed traceback content!
|
||||||
|
assert not mte.tb_str
|
||||||
|
|
||||||
|
# propagate to parent?
|
||||||
|
if raise_on_started_mte:
|
||||||
|
raise
|
||||||
|
|
||||||
|
# no-send-side-error fallthrough
|
||||||
|
if (
|
||||||
|
validate_pld_spec
|
||||||
|
and
|
||||||
|
expect_started_mte
|
||||||
|
):
|
||||||
|
raise RuntimeError(
|
||||||
|
'Child-ctx-task SHOULD HAVE raised an MTE for\n\n'
|
||||||
|
f'{started_value!r}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
not expect_started_mte
|
||||||
|
or
|
||||||
|
not validate_pld_spec
|
||||||
|
)
|
||||||
|
|
||||||
|
# if wait_for_parent_to_cancel:
|
||||||
|
# ...
|
||||||
|
#
|
||||||
|
# ^-TODO-^ logic for diff validation policies on each side:
|
||||||
|
#
|
||||||
|
# -[ ] ensure that if we don't validate on the send
|
||||||
|
# side, that we are eventually error-cancelled by our
|
||||||
|
# parent due to the bad `Started` payload!
|
||||||
|
# -[ ] the boxed error should be srced from the parent's
|
||||||
|
# runtime NOT ours!
|
||||||
|
# -[ ] we should still error on bad `return_value`s
|
||||||
|
# despite the parent not yet error-cancelling us?
|
||||||
|
# |_ how do we want the parent side to look in that
|
||||||
|
# case?
|
||||||
|
# -[ ] maybe the equiv of "during handling of the
|
||||||
|
# above error another occurred" for the case where
|
||||||
|
# the parent sends a MTE to this child and while
|
||||||
|
# waiting for the child to terminate it gets back
|
||||||
|
# the MTE for this case?
|
||||||
|
#
|
||||||
|
|
||||||
|
# XXX should always fail on recv side since we can't
|
||||||
|
# really do much else beside terminate and relay the
|
||||||
|
# msg-type-error from this RPC task ;)
|
||||||
|
return return_value
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'return_value',
|
||||||
|
[
|
||||||
|
'yo',
|
||||||
|
None,
|
||||||
|
],
|
||||||
|
ids=[
|
||||||
|
'return[invalid-"yo"]',
|
||||||
|
'return[valid-None]',
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'started_value',
|
||||||
|
[
|
||||||
|
10,
|
||||||
|
PldMsg(field='yo'),
|
||||||
|
],
|
||||||
|
ids=[
|
||||||
|
'Started[invalid-10]',
|
||||||
|
'Started[valid-PldMsg]',
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'pld_check_started_value',
|
||||||
|
[
|
||||||
|
True,
|
||||||
|
False,
|
||||||
|
],
|
||||||
|
ids=[
|
||||||
|
'check-started-pld',
|
||||||
|
'no-started-pld-validate',
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_basic_payload_spec(
|
||||||
|
debug_mode: bool,
|
||||||
|
loglevel: str,
|
||||||
|
return_value: str|None,
|
||||||
|
started_value: int|PldMsg,
|
||||||
|
pld_check_started_value: bool,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Validate the most basic `PldRx` msg-type-spec semantics around
|
||||||
|
a IPC `Context` endpoint start, started-sync, and final return
|
||||||
|
value depending on set payload types and the currently applied
|
||||||
|
pld-spec.
|
||||||
|
|
||||||
|
'''
|
||||||
|
invalid_return: bool = return_value == 'yo'
|
||||||
|
invalid_started: bool = started_value == 10
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
async with tractor.open_nursery(
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
loglevel=loglevel,
|
||||||
|
) as an:
|
||||||
|
p: Portal = await an.start_actor(
|
||||||
|
'child',
|
||||||
|
enable_modules=[__name__],
|
||||||
|
)
|
||||||
|
|
||||||
|
# since not opened yet.
|
||||||
|
assert current_ipc_ctx() is None
|
||||||
|
|
||||||
|
if invalid_started:
|
||||||
|
msg_type_str: str = 'Started'
|
||||||
|
bad_value: int = 10
|
||||||
|
elif invalid_return:
|
||||||
|
msg_type_str: str = 'Return'
|
||||||
|
bad_value: str = 'yo'
|
||||||
|
else:
|
||||||
|
# XXX but should never be used below then..
|
||||||
|
msg_type_str: str = ''
|
||||||
|
bad_value: str = ''
|
||||||
|
|
||||||
|
maybe_mte: MsgTypeError|None = None
|
||||||
|
should_raise: Exception|None = (
|
||||||
|
MsgTypeError if (
|
||||||
|
invalid_return
|
||||||
|
or
|
||||||
|
invalid_started
|
||||||
|
) else None
|
||||||
|
)
|
||||||
|
async with (
|
||||||
|
maybe_expect_raises(
|
||||||
|
raises=should_raise,
|
||||||
|
ensure_in_message=[
|
||||||
|
f"invalid `{msg_type_str}` msg payload",
|
||||||
|
f'{bad_value}',
|
||||||
|
f'has type {type(bad_value)!r}',
|
||||||
|
'not match type-spec',
|
||||||
|
f'`{msg_type_str}.pld: PldMsg|NoneType`',
|
||||||
|
],
|
||||||
|
# only for debug
|
||||||
|
# post_mortem=True,
|
||||||
|
),
|
||||||
|
p.open_context(
|
||||||
|
child,
|
||||||
|
return_value=return_value,
|
||||||
|
started_value=started_value,
|
||||||
|
validate_pld_spec=pld_check_started_value,
|
||||||
|
) as (ctx, first),
|
||||||
|
):
|
||||||
|
# now opened with 'child' sub
|
||||||
|
assert current_ipc_ctx() is ctx
|
||||||
|
|
||||||
|
assert type(first) is PldMsg
|
||||||
|
assert first.field == 'yo'
|
||||||
|
|
||||||
|
try:
|
||||||
|
res: None|PldMsg = await ctx.result(hide_tb=False)
|
||||||
|
assert res is None
|
||||||
|
except MsgTypeError as mte:
|
||||||
|
maybe_mte = mte
|
||||||
|
if not invalid_return:
|
||||||
|
raise
|
||||||
|
|
||||||
|
# expected this invalid `Return.pld` so audit
|
||||||
|
# the error state + meta-data
|
||||||
|
assert mte.expected_msg_type is Return
|
||||||
|
assert mte.cid == ctx.cid
|
||||||
|
mte_repr: str = repr(mte)
|
||||||
|
for line in mte.message.splitlines():
|
||||||
|
assert line in mte_repr
|
||||||
|
|
||||||
|
assert mte.tb_str
|
||||||
|
# await tractor.pause(shield=True)
|
||||||
|
|
||||||
|
# verify expected remote mte deats
|
||||||
|
assert ctx._local_error is None
|
||||||
|
assert (
|
||||||
|
mte is
|
||||||
|
ctx._remote_error is
|
||||||
|
ctx.maybe_error is
|
||||||
|
ctx.outcome
|
||||||
|
)
|
||||||
|
|
||||||
|
if should_raise is None:
|
||||||
|
assert maybe_mte is None
|
||||||
|
|
||||||
|
await p.cancel_actor()
|
||||||
|
|
||||||
|
trio.run(main)
|
||||||
|
|
@ -5,8 +5,7 @@ import pytest
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
from tractor.experimental import msgpub
|
from tractor.experimental import msgpub
|
||||||
|
from tractor._testing import tractor_test
|
||||||
from conftest import tractor_test
|
|
||||||
|
|
||||||
|
|
||||||
def test_type_checks():
|
def test_type_checks():
|
||||||
|
|
@ -160,7 +159,7 @@ async def test_required_args(callwith_expecterror):
|
||||||
)
|
)
|
||||||
def test_multi_actor_subs_arbiter_pub(
|
def test_multi_actor_subs_arbiter_pub(
|
||||||
loglevel,
|
loglevel,
|
||||||
arb_addr,
|
reg_addr,
|
||||||
pub_actor,
|
pub_actor,
|
||||||
):
|
):
|
||||||
"""Try out the neato @pub decorator system.
|
"""Try out the neato @pub decorator system.
|
||||||
|
|
@ -170,7 +169,7 @@ def test_multi_actor_subs_arbiter_pub(
|
||||||
async def main():
|
async def main():
|
||||||
|
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
arbiter_addr=arb_addr,
|
registry_addrs=[reg_addr],
|
||||||
enable_modules=[__name__],
|
enable_modules=[__name__],
|
||||||
) as n:
|
) as n:
|
||||||
|
|
||||||
|
|
@ -255,12 +254,12 @@ def test_multi_actor_subs_arbiter_pub(
|
||||||
|
|
||||||
def test_single_subactor_pub_multitask_subs(
|
def test_single_subactor_pub_multitask_subs(
|
||||||
loglevel,
|
loglevel,
|
||||||
arb_addr,
|
reg_addr,
|
||||||
):
|
):
|
||||||
async def main():
|
async def main():
|
||||||
|
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
arbiter_addr=arb_addr,
|
registry_addrs=[reg_addr],
|
||||||
enable_modules=[__name__],
|
enable_modules=[__name__],
|
||||||
) as n:
|
) as n:
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,237 @@
|
||||||
|
'''
|
||||||
|
Special case testing for issues not (dis)covered in the primary
|
||||||
|
`Context` related functional/scenario suites.
|
||||||
|
|
||||||
|
**NOTE: this mod is a WIP** space for handling
|
||||||
|
odd/rare/undiscovered/not-yet-revealed faults which either
|
||||||
|
loudly (ideal case) breakl our supervision protocol
|
||||||
|
or (worst case) result in distributed sys hangs.
|
||||||
|
|
||||||
|
Suites here further try to clarify (if [partially] ill-defined) and
|
||||||
|
verify our edge case semantics for inter-actor-relayed-exceptions
|
||||||
|
including,
|
||||||
|
|
||||||
|
- lowlevel: what remote obj-data is interchanged for IPC and what is
|
||||||
|
native-obj form is expected from unpacking in the the new
|
||||||
|
mem-domain.
|
||||||
|
|
||||||
|
- which kinds of `RemoteActorError` (and its derivs) are expected by which
|
||||||
|
(types of) peers (parent, child, sibling, etc) with what
|
||||||
|
particular meta-data set such as,
|
||||||
|
|
||||||
|
- `.src_uid`: the original (maybe) peer who raised.
|
||||||
|
- `.relay_uid`: the next-hop-peer who sent it.
|
||||||
|
- `.relay_path`: the sequence of peer actor hops.
|
||||||
|
- `.is_inception`: a predicate that denotes multi-hop remote errors.
|
||||||
|
|
||||||
|
- when should `ExceptionGroup`s be relayed from a particular
|
||||||
|
remote endpoint, they should never be caused by implicit `._rpc`
|
||||||
|
nursery machinery!
|
||||||
|
|
||||||
|
- various special `trio` edge cases around its cancellation semantics
|
||||||
|
and how we (currently) leverage `trio.Cancelled` as a signal for
|
||||||
|
whether a `Context` task should raise `ContextCancelled` (ctx).
|
||||||
|
|
||||||
|
'''
|
||||||
|
import pytest
|
||||||
|
import trio
|
||||||
|
import tractor
|
||||||
|
from tractor import ( # typing
|
||||||
|
ActorNursery,
|
||||||
|
Portal,
|
||||||
|
Context,
|
||||||
|
ContextCancelled,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def sleep_n_chkpt_in_finally(
|
||||||
|
ctx: Context,
|
||||||
|
sleep_n_raise: bool,
|
||||||
|
|
||||||
|
chld_raise_delay: float,
|
||||||
|
chld_finally_delay: float,
|
||||||
|
|
||||||
|
rent_cancels: bool,
|
||||||
|
rent_ctxc_delay: float,
|
||||||
|
|
||||||
|
expect_exc: str|None = None,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Sync, open a tn, then wait for cancel, run a chkpt inside
|
||||||
|
the user's `finally:` teardown.
|
||||||
|
|
||||||
|
This covers a footgun case that `trio` core doesn't seem to care about
|
||||||
|
wherein an exc can be masked by a `trio.Cancelled` raised inside a tn emedded
|
||||||
|
`finally:`.
|
||||||
|
|
||||||
|
Also see `test_trioisms::test_acm_embedded_nursery_propagates_enter_err`
|
||||||
|
for the down and gritty details.
|
||||||
|
|
||||||
|
Since a `@context` endpoint fn can also contain code like this,
|
||||||
|
**and** bc we currently have no easy way other then
|
||||||
|
`trio.Cancelled` to signal cancellation on each side of an IPC `Context`,
|
||||||
|
the footgun issue can compound itself as demonstrated in this suite..
|
||||||
|
|
||||||
|
Here are some edge cases codified with our WIP "sclang" syntax
|
||||||
|
(note the parent(rent)/child(chld) naming here is just
|
||||||
|
pragmatism, generally these most of these cases can occurr
|
||||||
|
regardless of the distributed-task's supervision hiearchy),
|
||||||
|
|
||||||
|
- rent c)=> chld.raises-then-taskc-in-finally
|
||||||
|
|_ chld's body raises an `exc: BaseException`.
|
||||||
|
_ in its `finally:` block it runs a chkpoint
|
||||||
|
which raises a taskc (`trio.Cancelled`) which
|
||||||
|
masks `exc` instead raising taskc up to the first tn.
|
||||||
|
_ the embedded/chld tn captures the masking taskc and then
|
||||||
|
raises it up to the ._rpc-ep-tn instead of `exc`.
|
||||||
|
_ the rent thinks the child ctxc-ed instead of errored..
|
||||||
|
|
||||||
|
'''
|
||||||
|
await ctx.started()
|
||||||
|
|
||||||
|
if expect_exc:
|
||||||
|
expect_exc: BaseException = tractor._exceptions.get_err_type(
|
||||||
|
type_name=expect_exc,
|
||||||
|
)
|
||||||
|
|
||||||
|
berr: BaseException|None = None
|
||||||
|
try:
|
||||||
|
if not sleep_n_raise:
|
||||||
|
await trio.sleep_forever()
|
||||||
|
elif sleep_n_raise:
|
||||||
|
|
||||||
|
# XXX this sleep is less then the sleep the parent
|
||||||
|
# does before calling `ctx.cancel()`
|
||||||
|
await trio.sleep(chld_raise_delay)
|
||||||
|
|
||||||
|
# XXX this will be masked by a taskc raised in
|
||||||
|
# the `finally:` if this fn doesn't terminate
|
||||||
|
# before any ctxc-req arrives AND a checkpoint is hit
|
||||||
|
# in that `finally:`.
|
||||||
|
raise RuntimeError('my app krurshed..')
|
||||||
|
|
||||||
|
except BaseException as _berr:
|
||||||
|
berr = _berr
|
||||||
|
|
||||||
|
# TODO: it'd sure be nice to be able to inject our own
|
||||||
|
# `ContextCancelled` here instead of of `trio.Cancelled`
|
||||||
|
# so that our runtime can expect it and this "user code"
|
||||||
|
# would be able to tell the diff between a generic trio
|
||||||
|
# cancel and a tractor runtime-IPC cancel.
|
||||||
|
if expect_exc:
|
||||||
|
if not isinstance(
|
||||||
|
berr,
|
||||||
|
expect_exc,
|
||||||
|
):
|
||||||
|
raise ValueError(
|
||||||
|
f'Unexpected exc type ??\n'
|
||||||
|
f'{berr!r}\n'
|
||||||
|
f'\n'
|
||||||
|
f'Expected a {expect_exc!r}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
raise berr
|
||||||
|
|
||||||
|
# simulate what user code might try even though
|
||||||
|
# it's a known boo-boo..
|
||||||
|
finally:
|
||||||
|
# maybe wait for rent ctxc to arrive
|
||||||
|
with trio.CancelScope(shield=True):
|
||||||
|
await trio.sleep(chld_finally_delay)
|
||||||
|
|
||||||
|
# !!XXX this will raise `trio.Cancelled` which
|
||||||
|
# will mask the RTE from above!!!
|
||||||
|
#
|
||||||
|
# YES, it's the same case as our extant
|
||||||
|
# `test_trioisms::test_acm_embedded_nursery_propagates_enter_err`
|
||||||
|
try:
|
||||||
|
await trio.lowlevel.checkpoint()
|
||||||
|
except trio.Cancelled as taskc:
|
||||||
|
if (scope_err := taskc.__context__):
|
||||||
|
print(
|
||||||
|
f'XXX MASKED REMOTE ERROR XXX\n'
|
||||||
|
f'ENDPOINT exception -> {scope_err!r}\n'
|
||||||
|
f'will be masked by -> {taskc!r}\n'
|
||||||
|
)
|
||||||
|
# await tractor.pause(shield=True)
|
||||||
|
|
||||||
|
raise taskc
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'chld_callspec',
|
||||||
|
[
|
||||||
|
dict(
|
||||||
|
sleep_n_raise=None,
|
||||||
|
chld_raise_delay=0.1,
|
||||||
|
chld_finally_delay=0.1,
|
||||||
|
expect_exc='Cancelled',
|
||||||
|
rent_cancels=True,
|
||||||
|
rent_ctxc_delay=0.1,
|
||||||
|
),
|
||||||
|
dict(
|
||||||
|
sleep_n_raise='RuntimeError',
|
||||||
|
chld_raise_delay=0.1,
|
||||||
|
chld_finally_delay=1,
|
||||||
|
expect_exc='RuntimeError',
|
||||||
|
rent_cancels=False,
|
||||||
|
rent_ctxc_delay=0.1,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
ids=lambda item: f'chld_callspec={item!r}'
|
||||||
|
)
|
||||||
|
def test_unmasked_remote_exc(
|
||||||
|
debug_mode: bool,
|
||||||
|
chld_callspec: dict,
|
||||||
|
tpt_proto: str,
|
||||||
|
):
|
||||||
|
expect_exc_str: str|None = chld_callspec['sleep_n_raise']
|
||||||
|
rent_ctxc_delay: float|None = chld_callspec['rent_ctxc_delay']
|
||||||
|
async def main():
|
||||||
|
an: ActorNursery
|
||||||
|
async with tractor.open_nursery(
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
enable_transports=[tpt_proto],
|
||||||
|
) as an:
|
||||||
|
ptl: Portal = await an.start_actor(
|
||||||
|
'cancellee',
|
||||||
|
enable_modules=[__name__],
|
||||||
|
)
|
||||||
|
ctx: Context
|
||||||
|
async with (
|
||||||
|
ptl.open_context(
|
||||||
|
sleep_n_chkpt_in_finally,
|
||||||
|
**chld_callspec,
|
||||||
|
) as (ctx, sent),
|
||||||
|
):
|
||||||
|
assert not sent
|
||||||
|
await trio.sleep(rent_ctxc_delay)
|
||||||
|
await ctx.cancel()
|
||||||
|
|
||||||
|
# recv error or result from chld
|
||||||
|
ctxc: ContextCancelled = await ctx.wait_for_result()
|
||||||
|
assert (
|
||||||
|
ctxc is ctx.outcome
|
||||||
|
and
|
||||||
|
isinstance(ctxc, ContextCancelled)
|
||||||
|
)
|
||||||
|
|
||||||
|
# always graceful terminate the sub in non-error cases
|
||||||
|
await an.cancel()
|
||||||
|
|
||||||
|
if expect_exc_str:
|
||||||
|
expect_exc: BaseException = tractor._exceptions.get_err_type(
|
||||||
|
type_name=expect_exc_str,
|
||||||
|
)
|
||||||
|
with pytest.raises(
|
||||||
|
expected_exception=tractor.RemoteActorError,
|
||||||
|
) as excinfo:
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
rae = excinfo.value
|
||||||
|
assert expect_exc == rae.boxed_type
|
||||||
|
|
||||||
|
else:
|
||||||
|
trio.run(main)
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
'''
|
'''
|
||||||
Async context manager cache api testing: ``trionics.maybe_open_context():``
|
Suites for our `.trionics.maybe_open_context()` multi-task
|
||||||
|
shared-cached `@acm` API.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
|
|
@ -9,6 +10,15 @@ from typing import Awaitable
|
||||||
import pytest
|
import pytest
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
from tractor.trionics import (
|
||||||
|
maybe_open_context,
|
||||||
|
)
|
||||||
|
from tractor.log import (
|
||||||
|
get_console_log,
|
||||||
|
get_logger,
|
||||||
|
)
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
_resource: int = 0
|
_resource: int = 0
|
||||||
|
|
@ -34,7 +44,6 @@ def test_resource_only_entered_once(key_on):
|
||||||
global _resource
|
global _resource
|
||||||
_resource = 0
|
_resource = 0
|
||||||
|
|
||||||
kwargs = {}
|
|
||||||
key = None
|
key = None
|
||||||
if key_on == 'key_value':
|
if key_on == 'key_value':
|
||||||
key = 'some_common_key'
|
key = 'some_common_key'
|
||||||
|
|
@ -53,7 +62,7 @@ def test_resource_only_entered_once(key_on):
|
||||||
# different task names per task will be used
|
# different task names per task will be used
|
||||||
kwargs = {'task_name': name}
|
kwargs = {'task_name': name}
|
||||||
|
|
||||||
async with tractor.trionics.maybe_open_context(
|
async with maybe_open_context(
|
||||||
maybe_increment_counter,
|
maybe_increment_counter,
|
||||||
kwargs=kwargs,
|
kwargs=kwargs,
|
||||||
key=key,
|
key=key,
|
||||||
|
|
@ -73,11 +82,13 @@ def test_resource_only_entered_once(key_on):
|
||||||
with trio.move_on_after(0.5):
|
with trio.move_on_after(0.5):
|
||||||
async with (
|
async with (
|
||||||
tractor.open_root_actor(),
|
tractor.open_root_actor(),
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as tn,
|
||||||
):
|
):
|
||||||
|
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
n.start_soon(enter_cached_mngr, f'task_{i}')
|
tn.start_soon(
|
||||||
|
enter_cached_mngr,
|
||||||
|
f'task_{i}',
|
||||||
|
)
|
||||||
await trio.sleep(0.001)
|
await trio.sleep(0.001)
|
||||||
|
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
@ -99,27 +110,55 @@ async def streamer(
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_stream() -> Awaitable[tractor.MsgStream]:
|
async def open_stream() -> Awaitable[
|
||||||
|
tuple[
|
||||||
|
tractor.ActorNursery,
|
||||||
|
tractor.MsgStream,
|
||||||
|
]
|
||||||
|
]:
|
||||||
|
try:
|
||||||
|
async with tractor.open_nursery() as an:
|
||||||
|
portal = await an.start_actor(
|
||||||
|
'streamer',
|
||||||
|
enable_modules=[__name__],
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
async with (
|
||||||
|
portal.open_context(streamer) as (ctx, first),
|
||||||
|
ctx.open_stream() as stream,
|
||||||
|
):
|
||||||
|
print('Entered open_stream() caller')
|
||||||
|
yield an, stream
|
||||||
|
print('Exited open_stream() caller')
|
||||||
|
|
||||||
async with tractor.open_nursery() as tn:
|
finally:
|
||||||
portal = await tn.start_actor('streamer', enable_modules=[__name__])
|
print(
|
||||||
async with (
|
'Cancelling streamer with,\n'
|
||||||
portal.open_context(streamer) as (ctx, first),
|
'=> `Portal.cancel_actor()`'
|
||||||
ctx.open_stream() as stream,
|
)
|
||||||
):
|
await portal.cancel_actor()
|
||||||
yield stream
|
print('Cancelled streamer')
|
||||||
|
|
||||||
await portal.cancel_actor()
|
except Exception as err:
|
||||||
print('CANCELLED STREAMER')
|
print(
|
||||||
|
f'`open_stream()` errored?\n'
|
||||||
|
f'{err!r}\n'
|
||||||
|
)
|
||||||
|
await tractor.pause(shield=True)
|
||||||
|
raise err
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def maybe_open_stream(taskname: str):
|
async def maybe_open_stream(taskname: str):
|
||||||
async with tractor.trionics.maybe_open_context(
|
async with maybe_open_context(
|
||||||
# NOTE: all secondary tasks should cache hit on the same key
|
# NOTE: all secondary tasks should cache hit on the same key
|
||||||
acm_func=open_stream,
|
acm_func=open_stream,
|
||||||
) as (cache_hit, stream):
|
) as (
|
||||||
|
cache_hit,
|
||||||
|
(an, stream)
|
||||||
|
):
|
||||||
|
# when the actor + portal + ctx + stream has already been
|
||||||
|
# allocated we want to just bcast to this task.
|
||||||
if cache_hit:
|
if cache_hit:
|
||||||
print(f'{taskname} loaded from cache')
|
print(f'{taskname} loaded from cache')
|
||||||
|
|
||||||
|
|
@ -127,27 +166,77 @@ async def maybe_open_stream(taskname: str):
|
||||||
# if this feed is already allocated by the first
|
# if this feed is already allocated by the first
|
||||||
# task that entereed
|
# task that entereed
|
||||||
async with stream.subscribe() as bstream:
|
async with stream.subscribe() as bstream:
|
||||||
yield bstream
|
yield an, bstream
|
||||||
|
print(
|
||||||
|
f'cached task exited\n'
|
||||||
|
f')>\n'
|
||||||
|
f' |_{taskname}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
# we should always unreg the "cloned" bcrc for this
|
||||||
|
# consumer-task
|
||||||
|
assert id(bstream) not in bstream._state.subs
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# yield the actual stream
|
# yield the actual stream
|
||||||
yield stream
|
try:
|
||||||
|
yield an, stream
|
||||||
|
finally:
|
||||||
|
print(
|
||||||
|
f'NON-cached task exited\n'
|
||||||
|
f')>\n'
|
||||||
|
f' |_{taskname}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
first_bstream = stream._broadcaster
|
||||||
|
bcrx_state = first_bstream._state
|
||||||
|
subs: dict[int, int] = bcrx_state.subs
|
||||||
|
if len(subs) == 1:
|
||||||
|
assert id(first_bstream) in subs
|
||||||
|
# ^^TODO! the bcrx should always de-allocate all subs,
|
||||||
|
# including the implicit first one allocated on entry
|
||||||
|
# by the first subscribing peer task, no?
|
||||||
|
#
|
||||||
|
# -[ ] adjust `MsgStream.subscribe()` to do this mgmt!
|
||||||
|
# |_ allows reverting `MsgStream.receive()` to the
|
||||||
|
# non-bcaster method.
|
||||||
|
# |_ we can decide whether to reset `._broadcaster`?
|
||||||
|
#
|
||||||
|
# await tractor.pause(shield=True)
|
||||||
|
|
||||||
|
|
||||||
def test_open_local_sub_to_stream():
|
def test_open_local_sub_to_stream(
|
||||||
|
debug_mode: bool,
|
||||||
|
):
|
||||||
'''
|
'''
|
||||||
Verify a single inter-actor stream can can be fanned-out shared to
|
Verify a single inter-actor stream can can be fanned-out shared to
|
||||||
N local tasks using ``trionics.maybe_open_context():``.
|
N local tasks using `trionics.maybe_open_context()`.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
timeout = 3 if platform.system() != "Windows" else 10
|
timeout: float = 3.6
|
||||||
|
if platform.system() == "Windows":
|
||||||
|
timeout: float = 10
|
||||||
|
|
||||||
|
if debug_mode:
|
||||||
|
timeout = 999
|
||||||
|
print(f'IN debug_mode, setting large timeout={timeout!r}..')
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
|
|
||||||
full = list(range(1000))
|
full = list(range(1000))
|
||||||
|
an: tractor.ActorNursery|None = None
|
||||||
|
num_tasks: int = 10
|
||||||
|
|
||||||
async def get_sub_and_pull(taskname: str):
|
async def get_sub_and_pull(taskname: str):
|
||||||
|
|
||||||
|
nonlocal an
|
||||||
|
|
||||||
|
stream: tractor.MsgStream
|
||||||
async with (
|
async with (
|
||||||
maybe_open_stream(taskname) as stream,
|
maybe_open_stream(taskname) as (
|
||||||
|
an,
|
||||||
|
stream,
|
||||||
|
),
|
||||||
):
|
):
|
||||||
if '0' in taskname:
|
if '0' in taskname:
|
||||||
assert isinstance(stream, tractor.MsgStream)
|
assert isinstance(stream, tractor.MsgStream)
|
||||||
|
|
@ -159,24 +248,159 @@ def test_open_local_sub_to_stream():
|
||||||
|
|
||||||
first = await stream.receive()
|
first = await stream.receive()
|
||||||
print(f'{taskname} started with value {first}')
|
print(f'{taskname} started with value {first}')
|
||||||
seq = []
|
seq: list[int] = []
|
||||||
async for msg in stream:
|
async for msg in stream:
|
||||||
seq.append(msg)
|
seq.append(msg)
|
||||||
|
|
||||||
assert set(seq).issubset(set(full))
|
assert set(seq).issubset(set(full))
|
||||||
|
|
||||||
|
# end of @acm block
|
||||||
print(f'{taskname} finished')
|
print(f'{taskname} finished')
|
||||||
|
|
||||||
with trio.fail_after(timeout):
|
root: tractor.Actor
|
||||||
|
with trio.fail_after(timeout) as cs:
|
||||||
# TODO: turns out this isn't multi-task entrant XD
|
# TODO: turns out this isn't multi-task entrant XD
|
||||||
# We probably need an indepotent entry semantic?
|
# We probably need an indepotent entry semantic?
|
||||||
async with tractor.open_root_actor():
|
async with tractor.open_root_actor(
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
# maybe_enable_greenback=True,
|
||||||
|
#
|
||||||
|
# ^TODO? doesn't seem to mk breakpoint() usage work
|
||||||
|
# bc each bg task needs to open a portal??
|
||||||
|
# - [ ] we should consider making this part of
|
||||||
|
# our taskman defaults?
|
||||||
|
# |_see https://github.com/goodboy/tractor/pull/363
|
||||||
|
#
|
||||||
|
) as root:
|
||||||
|
assert root.is_registrar
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
trio.open_nursery() as nurse,
|
trio.open_nursery() as tn,
|
||||||
):
|
):
|
||||||
for i in range(10):
|
for i in range(num_tasks):
|
||||||
nurse.start_soon(get_sub_and_pull, f'task_{i}')
|
tn.start_soon(
|
||||||
|
get_sub_and_pull,
|
||||||
|
f'task_{i}',
|
||||||
|
)
|
||||||
await trio.sleep(0.001)
|
await trio.sleep(0.001)
|
||||||
|
|
||||||
print('all consumer tasks finished')
|
print('all consumer tasks finished!')
|
||||||
|
|
||||||
|
# ?XXX, ensure actor-nursery is shutdown or we might
|
||||||
|
# hang here due to a minor task deadlock/race-condition?
|
||||||
|
#
|
||||||
|
# - seems that all we need is a checkpoint to ensure
|
||||||
|
# the last suspended task, which is inside
|
||||||
|
# `.maybe_open_context()`, can do the
|
||||||
|
# `Portal.cancel_actor()` call?
|
||||||
|
#
|
||||||
|
# - if that bg task isn't resumed, then this blocks
|
||||||
|
# timeout might hit before that?
|
||||||
|
#
|
||||||
|
if root.ipc_server.has_peers():
|
||||||
|
await trio.lowlevel.checkpoint()
|
||||||
|
|
||||||
|
# alt approach, cancel the entire `an`
|
||||||
|
# await tractor.pause()
|
||||||
|
# await an.cancel()
|
||||||
|
|
||||||
|
# end of runtime scope
|
||||||
|
print('root actor terminated.')
|
||||||
|
|
||||||
|
if cs.cancelled_caught:
|
||||||
|
pytest.fail(
|
||||||
|
'Should NOT time out in `open_root_actor()` ?'
|
||||||
|
)
|
||||||
|
|
||||||
|
print('exiting main.')
|
||||||
|
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def cancel_outer_cs(
|
||||||
|
cs: trio.CancelScope|None = None,
|
||||||
|
delay: float = 0,
|
||||||
|
):
|
||||||
|
# on first task delay this enough to block
|
||||||
|
# the 2nd task but then cancel it mid sleep
|
||||||
|
# so that the tn.start() inside the key-err handler block
|
||||||
|
# is cancelled and would previously corrupt the
|
||||||
|
# mutext state.
|
||||||
|
log.info(f'task entering sleep({delay})')
|
||||||
|
await trio.sleep(delay)
|
||||||
|
if cs:
|
||||||
|
log.info('task calling cs.cancel()')
|
||||||
|
cs.cancel()
|
||||||
|
trio.lowlevel.checkpoint()
|
||||||
|
yield
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
||||||
|
def test_lock_not_corrupted_on_fast_cancel(
|
||||||
|
debug_mode: bool,
|
||||||
|
loglevel: str,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Verify that if the caching-task (the first to enter
|
||||||
|
`maybe_open_context()`) is cancelled mid-cache-miss, the embedded
|
||||||
|
mutex can never be left in a corrupted state.
|
||||||
|
|
||||||
|
That is, the lock is always eventually released ensuring a peer
|
||||||
|
(cache-hitting) task will never,
|
||||||
|
|
||||||
|
- be left to inf-block/hang on the `lock.acquire()`.
|
||||||
|
- try to release the lock when still owned by the caching-task
|
||||||
|
due to it having erronously exited without calling
|
||||||
|
`lock.release()`.
|
||||||
|
|
||||||
|
|
||||||
|
'''
|
||||||
|
delay: float = 1.
|
||||||
|
|
||||||
|
async def use_moc(
|
||||||
|
cs: trio.CancelScope|None,
|
||||||
|
delay: float,
|
||||||
|
):
|
||||||
|
log.info('task entering moc')
|
||||||
|
async with maybe_open_context(
|
||||||
|
cancel_outer_cs,
|
||||||
|
kwargs={
|
||||||
|
'cs': cs,
|
||||||
|
'delay': delay,
|
||||||
|
},
|
||||||
|
) as (cache_hit, _null):
|
||||||
|
if cache_hit:
|
||||||
|
log.info('2nd task entered')
|
||||||
|
else:
|
||||||
|
log.info('1st task entered')
|
||||||
|
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
with trio.fail_after(delay + 2):
|
||||||
|
async with (
|
||||||
|
tractor.open_root_actor(
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
loglevel=loglevel,
|
||||||
|
),
|
||||||
|
trio.open_nursery() as tn,
|
||||||
|
):
|
||||||
|
get_console_log('info')
|
||||||
|
log.info('yo starting')
|
||||||
|
cs = tn.cancel_scope
|
||||||
|
tn.start_soon(
|
||||||
|
use_moc,
|
||||||
|
cs,
|
||||||
|
delay,
|
||||||
|
name='child',
|
||||||
|
)
|
||||||
|
with trio.CancelScope() as rent_cs:
|
||||||
|
await use_moc(
|
||||||
|
cs=rent_cs,
|
||||||
|
delay=delay,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,211 @@
|
||||||
|
import time
|
||||||
|
|
||||||
|
import trio
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import tractor
|
||||||
|
from tractor.ipc._ringbuf import (
|
||||||
|
open_ringbuf,
|
||||||
|
RBToken,
|
||||||
|
RingBuffSender,
|
||||||
|
RingBuffReceiver
|
||||||
|
)
|
||||||
|
from tractor._testing.samples import (
|
||||||
|
generate_sample_messages,
|
||||||
|
)
|
||||||
|
|
||||||
|
# in case you don't want to melt your cores, uncomment dis!
|
||||||
|
pytestmark = pytest.mark.skip
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def child_read_shm(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
msg_amount: int,
|
||||||
|
token: RBToken,
|
||||||
|
total_bytes: int,
|
||||||
|
) -> None:
|
||||||
|
recvd_bytes = 0
|
||||||
|
await ctx.started()
|
||||||
|
start_ts = time.time()
|
||||||
|
async with RingBuffReceiver(token) as receiver:
|
||||||
|
while recvd_bytes < total_bytes:
|
||||||
|
msg = await receiver.receive_some()
|
||||||
|
recvd_bytes += len(msg)
|
||||||
|
|
||||||
|
# make sure we dont hold any memoryviews
|
||||||
|
# before the ctx manager aclose()
|
||||||
|
msg = None
|
||||||
|
|
||||||
|
end_ts = time.time()
|
||||||
|
elapsed = end_ts - start_ts
|
||||||
|
elapsed_ms = int(elapsed * 1000)
|
||||||
|
|
||||||
|
print(f'\n\telapsed ms: {elapsed_ms}')
|
||||||
|
print(f'\tmsg/sec: {int(msg_amount / elapsed):,}')
|
||||||
|
print(f'\tbytes/sec: {int(recvd_bytes / elapsed):,}')
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def child_write_shm(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
msg_amount: int,
|
||||||
|
rand_min: int,
|
||||||
|
rand_max: int,
|
||||||
|
token: RBToken,
|
||||||
|
) -> None:
|
||||||
|
msgs, total_bytes = generate_sample_messages(
|
||||||
|
msg_amount,
|
||||||
|
rand_min=rand_min,
|
||||||
|
rand_max=rand_max,
|
||||||
|
)
|
||||||
|
await ctx.started(total_bytes)
|
||||||
|
async with RingBuffSender(token) as sender:
|
||||||
|
for msg in msgs:
|
||||||
|
await sender.send_all(msg)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'msg_amount,rand_min,rand_max,buf_size',
|
||||||
|
[
|
||||||
|
# simple case, fixed payloads, large buffer
|
||||||
|
(100_000, 0, 0, 10 * 1024),
|
||||||
|
|
||||||
|
# guaranteed wrap around on every write
|
||||||
|
(100, 10 * 1024, 20 * 1024, 10 * 1024),
|
||||||
|
|
||||||
|
# large payload size, but large buffer
|
||||||
|
(10_000, 256 * 1024, 512 * 1024, 10 * 1024 * 1024)
|
||||||
|
],
|
||||||
|
ids=[
|
||||||
|
'fixed_payloads_large_buffer',
|
||||||
|
'wrap_around_every_write',
|
||||||
|
'large_payloads_large_buffer',
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_ringbuf(
|
||||||
|
msg_amount: int,
|
||||||
|
rand_min: int,
|
||||||
|
rand_max: int,
|
||||||
|
buf_size: int
|
||||||
|
):
|
||||||
|
async def main():
|
||||||
|
with open_ringbuf(
|
||||||
|
'test_ringbuf',
|
||||||
|
buf_size=buf_size
|
||||||
|
) as token:
|
||||||
|
proc_kwargs = {
|
||||||
|
'pass_fds': (token.write_eventfd, token.wrap_eventfd)
|
||||||
|
}
|
||||||
|
|
||||||
|
common_kwargs = {
|
||||||
|
'msg_amount': msg_amount,
|
||||||
|
'token': token,
|
||||||
|
}
|
||||||
|
async with tractor.open_nursery() as an:
|
||||||
|
send_p = await an.start_actor(
|
||||||
|
'ring_sender',
|
||||||
|
enable_modules=[__name__],
|
||||||
|
proc_kwargs=proc_kwargs
|
||||||
|
)
|
||||||
|
recv_p = await an.start_actor(
|
||||||
|
'ring_receiver',
|
||||||
|
enable_modules=[__name__],
|
||||||
|
proc_kwargs=proc_kwargs
|
||||||
|
)
|
||||||
|
async with (
|
||||||
|
send_p.open_context(
|
||||||
|
child_write_shm,
|
||||||
|
rand_min=rand_min,
|
||||||
|
rand_max=rand_max,
|
||||||
|
**common_kwargs
|
||||||
|
) as (sctx, total_bytes),
|
||||||
|
recv_p.open_context(
|
||||||
|
child_read_shm,
|
||||||
|
**common_kwargs,
|
||||||
|
total_bytes=total_bytes,
|
||||||
|
) as (sctx, _sent),
|
||||||
|
):
|
||||||
|
await recv_p.result()
|
||||||
|
|
||||||
|
await send_p.cancel_actor()
|
||||||
|
await recv_p.cancel_actor()
|
||||||
|
|
||||||
|
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def child_blocked_receiver(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
token: RBToken
|
||||||
|
):
|
||||||
|
async with RingBuffReceiver(token) as receiver:
|
||||||
|
await ctx.started()
|
||||||
|
await receiver.receive_some()
|
||||||
|
|
||||||
|
|
||||||
|
def test_ring_reader_cancel():
|
||||||
|
async def main():
|
||||||
|
with open_ringbuf('test_ring_cancel_reader') as token:
|
||||||
|
async with (
|
||||||
|
tractor.open_nursery() as an,
|
||||||
|
RingBuffSender(token) as _sender,
|
||||||
|
):
|
||||||
|
recv_p = await an.start_actor(
|
||||||
|
'ring_blocked_receiver',
|
||||||
|
enable_modules=[__name__],
|
||||||
|
proc_kwargs={
|
||||||
|
'pass_fds': (token.write_eventfd, token.wrap_eventfd)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
async with (
|
||||||
|
recv_p.open_context(
|
||||||
|
child_blocked_receiver,
|
||||||
|
token=token
|
||||||
|
) as (sctx, _sent),
|
||||||
|
):
|
||||||
|
await trio.sleep(1)
|
||||||
|
await an.cancel()
|
||||||
|
|
||||||
|
|
||||||
|
with pytest.raises(tractor._exceptions.ContextCancelled):
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def child_blocked_sender(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
token: RBToken
|
||||||
|
):
|
||||||
|
async with RingBuffSender(token) as sender:
|
||||||
|
await ctx.started()
|
||||||
|
await sender.send_all(b'this will wrap')
|
||||||
|
|
||||||
|
|
||||||
|
def test_ring_sender_cancel():
|
||||||
|
async def main():
|
||||||
|
with open_ringbuf(
|
||||||
|
'test_ring_cancel_sender',
|
||||||
|
buf_size=1
|
||||||
|
) as token:
|
||||||
|
async with tractor.open_nursery() as an:
|
||||||
|
recv_p = await an.start_actor(
|
||||||
|
'ring_blocked_sender',
|
||||||
|
enable_modules=[__name__],
|
||||||
|
proc_kwargs={
|
||||||
|
'pass_fds': (token.write_eventfd, token.wrap_eventfd)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
async with (
|
||||||
|
recv_p.open_context(
|
||||||
|
child_blocked_sender,
|
||||||
|
token=token
|
||||||
|
) as (sctx, _sent),
|
||||||
|
):
|
||||||
|
await trio.sleep(1)
|
||||||
|
await an.cancel()
|
||||||
|
|
||||||
|
|
||||||
|
with pytest.raises(tractor._exceptions.ContextCancelled):
|
||||||
|
trio.run(main)
|
||||||
|
|
@ -0,0 +1,240 @@
|
||||||
|
'''
|
||||||
|
Special attention cases for using "infect `asyncio`" mode from a root
|
||||||
|
actor; i.e. not using a std `trio.run()` bootstrap.
|
||||||
|
|
||||||
|
'''
|
||||||
|
import asyncio
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import trio
|
||||||
|
import tractor
|
||||||
|
from tractor import (
|
||||||
|
to_asyncio,
|
||||||
|
)
|
||||||
|
from tests.test_infected_asyncio import (
|
||||||
|
aio_echo_server,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'raise_error_mid_stream',
|
||||||
|
[
|
||||||
|
False,
|
||||||
|
Exception,
|
||||||
|
KeyboardInterrupt,
|
||||||
|
],
|
||||||
|
ids='raise_error={}'.format,
|
||||||
|
)
|
||||||
|
def test_infected_root_actor(
|
||||||
|
raise_error_mid_stream: bool|Exception,
|
||||||
|
|
||||||
|
# conftest wide
|
||||||
|
loglevel: str,
|
||||||
|
debug_mode: bool,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Verify you can run the `tractor` runtime with `Actor.is_infected_aio() == True`
|
||||||
|
in the root actor.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async def _trio_main():
|
||||||
|
with trio.fail_after(2 if not debug_mode else 999):
|
||||||
|
first: str
|
||||||
|
chan: to_asyncio.LinkedTaskChannel
|
||||||
|
async with (
|
||||||
|
tractor.open_root_actor(
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
loglevel=loglevel,
|
||||||
|
),
|
||||||
|
to_asyncio.open_channel_from(
|
||||||
|
aio_echo_server,
|
||||||
|
) as (first, chan),
|
||||||
|
):
|
||||||
|
assert first == 'start'
|
||||||
|
|
||||||
|
for i in range(1000):
|
||||||
|
await chan.send(i)
|
||||||
|
out = await chan.receive()
|
||||||
|
assert out == i
|
||||||
|
print(f'asyncio echoing {i}')
|
||||||
|
|
||||||
|
if (
|
||||||
|
raise_error_mid_stream
|
||||||
|
and
|
||||||
|
i == 500
|
||||||
|
):
|
||||||
|
raise raise_error_mid_stream
|
||||||
|
|
||||||
|
if out is None:
|
||||||
|
try:
|
||||||
|
out = await chan.receive()
|
||||||
|
except trio.EndOfChannel:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
raise RuntimeError(
|
||||||
|
'aio channel never stopped?'
|
||||||
|
)
|
||||||
|
|
||||||
|
if raise_error_mid_stream:
|
||||||
|
with pytest.raises(raise_error_mid_stream):
|
||||||
|
tractor.to_asyncio.run_as_asyncio_guest(
|
||||||
|
trio_main=_trio_main,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
tractor.to_asyncio.run_as_asyncio_guest(
|
||||||
|
trio_main=_trio_main,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
async def sync_and_err(
|
||||||
|
# just signature placeholders for compat with
|
||||||
|
# ``to_asyncio.open_channel_from()``
|
||||||
|
to_trio: trio.MemorySendChannel,
|
||||||
|
from_trio: asyncio.Queue,
|
||||||
|
ev: asyncio.Event,
|
||||||
|
|
||||||
|
):
|
||||||
|
if to_trio:
|
||||||
|
to_trio.send_nowait('start')
|
||||||
|
|
||||||
|
await ev.wait()
|
||||||
|
raise RuntimeError('asyncio-side')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'aio_err_trigger',
|
||||||
|
[
|
||||||
|
'before_start_point',
|
||||||
|
'after_trio_task_starts',
|
||||||
|
'after_start_point',
|
||||||
|
],
|
||||||
|
ids='aio_err_triggered={}'.format
|
||||||
|
)
|
||||||
|
def test_trio_prestarted_task_bubbles(
|
||||||
|
aio_err_trigger: str,
|
||||||
|
|
||||||
|
# conftest wide
|
||||||
|
loglevel: str,
|
||||||
|
debug_mode: bool,
|
||||||
|
):
|
||||||
|
async def pre_started_err(
|
||||||
|
raise_err: bool = False,
|
||||||
|
pre_sleep: float|None = None,
|
||||||
|
aio_trigger: asyncio.Event|None = None,
|
||||||
|
task_status=trio.TASK_STATUS_IGNORED,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Maybe pre-started error then sleep.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if pre_sleep is not None:
|
||||||
|
print(f'Sleeping from trio for {pre_sleep!r}s !')
|
||||||
|
await trio.sleep(pre_sleep)
|
||||||
|
|
||||||
|
# signal aio-task to raise JUST AFTER this task
|
||||||
|
# starts but has not yet `.started()`
|
||||||
|
if aio_trigger:
|
||||||
|
print('Signalling aio-task to raise from `trio`!!')
|
||||||
|
aio_trigger.set()
|
||||||
|
|
||||||
|
if raise_err:
|
||||||
|
print('Raising from trio!')
|
||||||
|
raise TypeError('trio-side')
|
||||||
|
|
||||||
|
task_status.started()
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
async def _trio_main():
|
||||||
|
with trio.fail_after(2 if not debug_mode else 999):
|
||||||
|
first: str
|
||||||
|
chan: to_asyncio.LinkedTaskChannel
|
||||||
|
aio_ev = asyncio.Event()
|
||||||
|
|
||||||
|
async with (
|
||||||
|
tractor.open_root_actor(
|
||||||
|
debug_mode=False,
|
||||||
|
loglevel=loglevel,
|
||||||
|
),
|
||||||
|
):
|
||||||
|
# TODO, tests for this with 3.13 egs?
|
||||||
|
# from tractor.devx import open_crash_handler
|
||||||
|
# with open_crash_handler():
|
||||||
|
async with (
|
||||||
|
# where we'll start a sub-task that errors BEFORE
|
||||||
|
# calling `.started()` such that the error should
|
||||||
|
# bubble before the guest run terminates!
|
||||||
|
trio.open_nursery() as tn,
|
||||||
|
|
||||||
|
# THEN start an infect task which should error just
|
||||||
|
# after the trio-side's task does.
|
||||||
|
to_asyncio.open_channel_from(
|
||||||
|
partial(
|
||||||
|
sync_and_err,
|
||||||
|
ev=aio_ev,
|
||||||
|
)
|
||||||
|
) as (first, chan),
|
||||||
|
):
|
||||||
|
|
||||||
|
for i in range(5):
|
||||||
|
pre_sleep: float|None = None
|
||||||
|
last_iter: bool = (i == 4)
|
||||||
|
|
||||||
|
# TODO, missing cases?
|
||||||
|
# -[ ] error as well on
|
||||||
|
# 'after_start_point' case as well for
|
||||||
|
# another case?
|
||||||
|
raise_err: bool = False
|
||||||
|
|
||||||
|
if last_iter:
|
||||||
|
raise_err: bool = True
|
||||||
|
|
||||||
|
# trigger aio task to error on next loop
|
||||||
|
# tick/checkpoint
|
||||||
|
if aio_err_trigger == 'before_start_point':
|
||||||
|
aio_ev.set()
|
||||||
|
|
||||||
|
pre_sleep: float = 0
|
||||||
|
|
||||||
|
await tn.start(
|
||||||
|
pre_started_err,
|
||||||
|
raise_err,
|
||||||
|
pre_sleep,
|
||||||
|
(aio_ev if (
|
||||||
|
aio_err_trigger == 'after_trio_task_starts'
|
||||||
|
and
|
||||||
|
last_iter
|
||||||
|
) else None
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
aio_err_trigger == 'after_start_point'
|
||||||
|
and
|
||||||
|
last_iter
|
||||||
|
):
|
||||||
|
aio_ev.set()
|
||||||
|
|
||||||
|
# ensure the trio-task's error bubbled despite the aio-side
|
||||||
|
# having (maybe) errored first.
|
||||||
|
if aio_err_trigger in (
|
||||||
|
'after_trio_task_starts',
|
||||||
|
'after_start_point',
|
||||||
|
):
|
||||||
|
patt: str = 'trio-side'
|
||||||
|
expect_exc = TypeError
|
||||||
|
|
||||||
|
# when aio errors BEFORE (last) trio task is scheduled, we should
|
||||||
|
# never see anythinb but the aio-side.
|
||||||
|
else:
|
||||||
|
patt: str = 'asyncio-side'
|
||||||
|
expect_exc = RuntimeError
|
||||||
|
|
||||||
|
with pytest.raises(expect_exc) as excinfo:
|
||||||
|
tractor.to_asyncio.run_as_asyncio_guest(
|
||||||
|
trio_main=_trio_main,
|
||||||
|
)
|
||||||
|
|
||||||
|
caught_exc = excinfo.value
|
||||||
|
assert patt in caught_exc.args
|
||||||
|
|
@ -0,0 +1,108 @@
|
||||||
|
'''
|
||||||
|
Runtime boot/init sanity.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import trio
|
||||||
|
|
||||||
|
import tractor
|
||||||
|
from tractor._exceptions import RuntimeFailure
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def open_new_root_in_sub(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
async with tractor.open_root_actor():
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'open_root_in',
|
||||||
|
['root', 'sub'],
|
||||||
|
ids='open_2nd_root_in={}'.format,
|
||||||
|
)
|
||||||
|
def test_only_one_root_actor(
|
||||||
|
open_root_in: str,
|
||||||
|
reg_addr: tuple,
|
||||||
|
debug_mode: bool
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Verify we specially fail whenever more then one root actor
|
||||||
|
is attempted to be opened within an already opened tree.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async def main():
|
||||||
|
async with tractor.open_nursery() as an:
|
||||||
|
|
||||||
|
if open_root_in == 'root':
|
||||||
|
async with tractor.open_root_actor(
|
||||||
|
registry_addrs=[reg_addr],
|
||||||
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
|
ptl: tractor.Portal = await an.start_actor(
|
||||||
|
name='bad_rooty_boi',
|
||||||
|
enable_modules=[__name__],
|
||||||
|
)
|
||||||
|
|
||||||
|
async with ptl.open_context(
|
||||||
|
open_new_root_in_sub,
|
||||||
|
) as (ctx, first):
|
||||||
|
pass
|
||||||
|
|
||||||
|
if open_root_in == 'root':
|
||||||
|
with pytest.raises(
|
||||||
|
RuntimeFailure
|
||||||
|
) as excinfo:
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
else:
|
||||||
|
with pytest.raises(
|
||||||
|
tractor.RemoteActorError,
|
||||||
|
) as excinfo:
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
assert excinfo.value.boxed_type is RuntimeFailure
|
||||||
|
|
||||||
|
|
||||||
|
def test_implicit_root_via_first_nursery(
|
||||||
|
reg_addr: tuple,
|
||||||
|
debug_mode: bool
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
The first `ActorNursery` open should implicitly call
|
||||||
|
`_root.open_root_actor()`.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async def main():
|
||||||
|
async with tractor.open_nursery() as an:
|
||||||
|
assert an._implicit_runtime_started
|
||||||
|
assert tractor.current_actor().aid.name == 'root'
|
||||||
|
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
|
def test_runtime_vars_unset(
|
||||||
|
reg_addr: tuple,
|
||||||
|
debug_mode: bool
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Ensure any `._state._runtime_vars` are restored to default values
|
||||||
|
after the root actor-runtime exits!
|
||||||
|
|
||||||
|
'''
|
||||||
|
assert not tractor._state._runtime_vars['_debug_mode']
|
||||||
|
async def main():
|
||||||
|
assert not tractor._state._runtime_vars['_debug_mode']
|
||||||
|
async with tractor.open_nursery(
|
||||||
|
debug_mode=True,
|
||||||
|
):
|
||||||
|
assert tractor._state._runtime_vars['_debug_mode']
|
||||||
|
|
||||||
|
# after runtime closure, should be reverted!
|
||||||
|
assert not tractor._state._runtime_vars['_debug_mode']
|
||||||
|
|
||||||
|
trio.run(main)
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
"""
|
'''
|
||||||
RPC related
|
RPC (or maybe better labelled as "RTS: remote task scheduling"?)
|
||||||
"""
|
related API and error checks.
|
||||||
|
|
||||||
|
'''
|
||||||
import itertools
|
import itertools
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
@ -13,9 +15,19 @@ async def sleep_back_actor(
|
||||||
func_name,
|
func_name,
|
||||||
func_defined,
|
func_defined,
|
||||||
exposed_mods,
|
exposed_mods,
|
||||||
|
*,
|
||||||
|
reg_addr: tuple,
|
||||||
):
|
):
|
||||||
if actor_name:
|
if actor_name:
|
||||||
async with tractor.find_actor(actor_name) as portal:
|
async with tractor.find_actor(
|
||||||
|
actor_name,
|
||||||
|
# NOTE: must be set manually since
|
||||||
|
# the subactor doesn't have the reg_addr
|
||||||
|
# fixture code run in it!
|
||||||
|
# TODO: maybe we should just set this once in the
|
||||||
|
# _state mod and derive to all children?
|
||||||
|
registry_addrs=[reg_addr],
|
||||||
|
) as portal:
|
||||||
try:
|
try:
|
||||||
await portal.run(__name__, func_name)
|
await portal.run(__name__, func_name)
|
||||||
except tractor.RemoteActorError as err:
|
except tractor.RemoteActorError as err:
|
||||||
|
|
@ -24,7 +36,7 @@ async def sleep_back_actor(
|
||||||
if not exposed_mods:
|
if not exposed_mods:
|
||||||
expect = tractor.ModuleNotExposed
|
expect = tractor.ModuleNotExposed
|
||||||
|
|
||||||
assert err.type is expect
|
assert err.boxed_type is expect
|
||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
await trio.sleep(float('inf'))
|
await trio.sleep(float('inf'))
|
||||||
|
|
@ -42,14 +54,25 @@ async def short_sleep():
|
||||||
(['tmp_mod'], 'import doggy', ModuleNotFoundError),
|
(['tmp_mod'], 'import doggy', ModuleNotFoundError),
|
||||||
(['tmp_mod'], '4doggy', SyntaxError),
|
(['tmp_mod'], '4doggy', SyntaxError),
|
||||||
],
|
],
|
||||||
ids=['no_mods', 'this_mod', 'this_mod_bad_func', 'fail_to_import',
|
ids=[
|
||||||
'fail_on_syntax'],
|
'no_mods',
|
||||||
|
'this_mod',
|
||||||
|
'this_mod_bad_func',
|
||||||
|
'fail_to_import',
|
||||||
|
'fail_on_syntax',
|
||||||
|
],
|
||||||
)
|
)
|
||||||
def test_rpc_errors(arb_addr, to_call, testdir):
|
def test_rpc_errors(
|
||||||
"""Test errors when making various RPC requests to an actor
|
reg_addr,
|
||||||
|
to_call,
|
||||||
|
testdir,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Test errors when making various RPC requests to an actor
|
||||||
that either doesn't have the requested module exposed or doesn't define
|
that either doesn't have the requested module exposed or doesn't define
|
||||||
the named function.
|
the named function.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
exposed_mods, funcname, inside_err = to_call
|
exposed_mods, funcname, inside_err = to_call
|
||||||
subactor_exposed_mods = []
|
subactor_exposed_mods = []
|
||||||
func_defined = globals().get(funcname, False)
|
func_defined = globals().get(funcname, False)
|
||||||
|
|
@ -77,8 +100,13 @@ def test_rpc_errors(arb_addr, to_call, testdir):
|
||||||
|
|
||||||
# spawn a subactor which calls us back
|
# spawn a subactor which calls us back
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
arbiter_addr=arb_addr,
|
registry_addrs=[reg_addr],
|
||||||
enable_modules=exposed_mods.copy(),
|
enable_modules=exposed_mods.copy(),
|
||||||
|
|
||||||
|
# NOTE: will halt test in REPL if uncommented, so only
|
||||||
|
# do that if actually debugging subactor but keep it
|
||||||
|
# disabled for the test.
|
||||||
|
# debug_mode=True,
|
||||||
) as n:
|
) as n:
|
||||||
|
|
||||||
actor = tractor.current_actor()
|
actor = tractor.current_actor()
|
||||||
|
|
@ -95,6 +123,7 @@ def test_rpc_errors(arb_addr, to_call, testdir):
|
||||||
exposed_mods=exposed_mods,
|
exposed_mods=exposed_mods,
|
||||||
func_defined=True if func_defined else False,
|
func_defined=True if func_defined else False,
|
||||||
enable_modules=subactor_exposed_mods,
|
enable_modules=subactor_exposed_mods,
|
||||||
|
reg_addr=reg_addr,
|
||||||
)
|
)
|
||||||
|
|
||||||
def run():
|
def run():
|
||||||
|
|
@ -105,18 +134,20 @@ def test_rpc_errors(arb_addr, to_call, testdir):
|
||||||
run()
|
run()
|
||||||
else:
|
else:
|
||||||
# underlying errors aren't propagated upwards (yet)
|
# underlying errors aren't propagated upwards (yet)
|
||||||
with pytest.raises(remote_err) as err:
|
with pytest.raises(
|
||||||
|
expected_exception=(remote_err, ExceptionGroup),
|
||||||
|
) as err:
|
||||||
run()
|
run()
|
||||||
|
|
||||||
# get raw instance from pytest wrapper
|
# get raw instance from pytest wrapper
|
||||||
value = err.value
|
value = err.value
|
||||||
|
|
||||||
# might get multiple `trio.Cancelled`s as well inside an inception
|
# might get multiple `trio.Cancelled`s as well inside an inception
|
||||||
if isinstance(value, trio.MultiError):
|
if isinstance(value, ExceptionGroup):
|
||||||
value = next(itertools.dropwhile(
|
value = next(itertools.dropwhile(
|
||||||
lambda exc: not isinstance(exc, tractor.RemoteActorError),
|
lambda exc: not isinstance(exc, tractor.RemoteActorError),
|
||||||
value.exceptions
|
value.exceptions
|
||||||
))
|
))
|
||||||
|
|
||||||
if getattr(value, 'type', None):
|
if getattr(value, 'type', None):
|
||||||
assert value.type is inside_err
|
assert value.boxed_type is inside_err
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ import pytest
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
from conftest import tractor_test
|
from tractor._testing import tractor_test
|
||||||
|
|
||||||
|
|
||||||
_file_path: str = ''
|
_file_path: str = ''
|
||||||
|
|
@ -64,7 +64,8 @@ async def test_lifetime_stack_wipes_tmpfile(
|
||||||
|
|
||||||
except (
|
except (
|
||||||
tractor.RemoteActorError,
|
tractor.RemoteActorError,
|
||||||
tractor.BaseExceptionGroup,
|
# tractor.BaseExceptionGroup,
|
||||||
|
BaseExceptionGroup,
|
||||||
):
|
):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,167 @@
|
||||||
|
"""
|
||||||
|
Shared mem primitives and APIs.
|
||||||
|
|
||||||
|
"""
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
# import numpy
|
||||||
|
import pytest
|
||||||
|
import trio
|
||||||
|
import tractor
|
||||||
|
from tractor.ipc._shm import (
|
||||||
|
open_shm_list,
|
||||||
|
attach_shm_list,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def child_attach_shml_alot(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
shm_key: str,
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
await ctx.started(shm_key)
|
||||||
|
|
||||||
|
# now try to attach a boatload of times in a loop..
|
||||||
|
for _ in range(1000):
|
||||||
|
shml = attach_shm_list(
|
||||||
|
key=shm_key,
|
||||||
|
readonly=False,
|
||||||
|
)
|
||||||
|
assert shml.shm.name == shm_key
|
||||||
|
await trio.sleep(0.001)
|
||||||
|
|
||||||
|
|
||||||
|
def test_child_attaches_alot():
|
||||||
|
async def main():
|
||||||
|
async with tractor.open_nursery() as an:
|
||||||
|
|
||||||
|
# allocate writeable list in parent
|
||||||
|
key = f'shml_{uuid.uuid4()}'
|
||||||
|
shml = open_shm_list(
|
||||||
|
key=key,
|
||||||
|
)
|
||||||
|
|
||||||
|
portal = await an.start_actor(
|
||||||
|
'shm_attacher',
|
||||||
|
enable_modules=[__name__],
|
||||||
|
)
|
||||||
|
|
||||||
|
async with (
|
||||||
|
portal.open_context(
|
||||||
|
child_attach_shml_alot,
|
||||||
|
shm_key=shml.key,
|
||||||
|
) as (ctx, start_val),
|
||||||
|
):
|
||||||
|
assert start_val == key
|
||||||
|
await ctx.result()
|
||||||
|
|
||||||
|
await portal.cancel_actor()
|
||||||
|
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def child_read_shm_list(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
shm_key: str,
|
||||||
|
use_str: bool,
|
||||||
|
frame_size: int,
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
# attach in child
|
||||||
|
shml = attach_shm_list(
|
||||||
|
key=shm_key,
|
||||||
|
# dtype=str if use_str else float,
|
||||||
|
)
|
||||||
|
await ctx.started(shml.key)
|
||||||
|
|
||||||
|
async with ctx.open_stream() as stream:
|
||||||
|
async for i in stream:
|
||||||
|
print(f'(child): reading shm list index: {i}')
|
||||||
|
|
||||||
|
if use_str:
|
||||||
|
expect = str(float(i))
|
||||||
|
else:
|
||||||
|
expect = float(i)
|
||||||
|
|
||||||
|
if frame_size == 1:
|
||||||
|
val = shml[i]
|
||||||
|
assert expect == val
|
||||||
|
print(f'(child): reading value: {val}')
|
||||||
|
else:
|
||||||
|
frame = shml[i - frame_size:i]
|
||||||
|
print(f'(child): reading frame: {frame}')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'use_str',
|
||||||
|
[False, True],
|
||||||
|
ids=lambda i: f'use_str_values={i}',
|
||||||
|
)
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'frame_size',
|
||||||
|
[1, 2**6, 2**10],
|
||||||
|
ids=lambda i: f'frame_size={i}',
|
||||||
|
)
|
||||||
|
def test_parent_writer_child_reader(
|
||||||
|
use_str: bool,
|
||||||
|
frame_size: int,
|
||||||
|
):
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
async with tractor.open_nursery(
|
||||||
|
# debug_mode=True,
|
||||||
|
) as an:
|
||||||
|
|
||||||
|
portal = await an.start_actor(
|
||||||
|
'shm_reader',
|
||||||
|
enable_modules=[__name__],
|
||||||
|
debug_mode=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# allocate writeable list in parent
|
||||||
|
key = 'shm_list'
|
||||||
|
seq_size = int(2 * 2 ** 10)
|
||||||
|
shml = open_shm_list(
|
||||||
|
key=key,
|
||||||
|
size=seq_size,
|
||||||
|
dtype=str if use_str else float,
|
||||||
|
readonly=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
async with (
|
||||||
|
portal.open_context(
|
||||||
|
child_read_shm_list,
|
||||||
|
shm_key=key,
|
||||||
|
use_str=use_str,
|
||||||
|
frame_size=frame_size,
|
||||||
|
) as (ctx, sent),
|
||||||
|
|
||||||
|
ctx.open_stream() as stream,
|
||||||
|
):
|
||||||
|
|
||||||
|
assert sent == key
|
||||||
|
|
||||||
|
for i in range(seq_size):
|
||||||
|
|
||||||
|
val = float(i)
|
||||||
|
if use_str:
|
||||||
|
val = str(val)
|
||||||
|
|
||||||
|
# print(f'(parent): writing {val}')
|
||||||
|
shml[i] = val
|
||||||
|
|
||||||
|
# only on frame fills do we
|
||||||
|
# signal to the child that a frame's
|
||||||
|
# worth is ready.
|
||||||
|
if (i % frame_size) == 0:
|
||||||
|
print(f'(parent): signalling frame full on {val}')
|
||||||
|
await stream.send(i)
|
||||||
|
else:
|
||||||
|
print(f'(parent): signalling final frame on {val}')
|
||||||
|
await stream.send(i)
|
||||||
|
|
||||||
|
await portal.cancel_actor()
|
||||||
|
|
||||||
|
trio.run(main)
|
||||||
|
|
@ -2,83 +2,110 @@
|
||||||
Spawning basics
|
Spawning basics
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from typing import Optional
|
from functools import partial
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
)
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
from conftest import tractor_test
|
from tractor._testing import tractor_test
|
||||||
|
|
||||||
data_to_pass_down = {'doggy': 10, 'kitty': 4}
|
data_to_pass_down = {
|
||||||
|
'doggy': 10,
|
||||||
|
'kitty': 4,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
async def spawn(
|
async def spawn(
|
||||||
is_arbiter: bool,
|
should_be_root: bool,
|
||||||
data: dict,
|
data: dict,
|
||||||
arb_addr: tuple[str, int],
|
reg_addr: tuple[str, int],
|
||||||
|
|
||||||
|
debug_mode: bool = False,
|
||||||
):
|
):
|
||||||
namespaces = [__name__]
|
|
||||||
|
|
||||||
await trio.sleep(0.1)
|
await trio.sleep(0.1)
|
||||||
|
actor = tractor.current_actor(err_on_no_runtime=False)
|
||||||
|
|
||||||
async with tractor.open_root_actor(
|
if should_be_root:
|
||||||
arbiter_addr=arb_addr,
|
assert actor is None # no runtime yet
|
||||||
):
|
async with (
|
||||||
|
tractor.open_root_actor(
|
||||||
|
arbiter_addr=reg_addr,
|
||||||
|
),
|
||||||
|
tractor.open_nursery() as an,
|
||||||
|
):
|
||||||
|
# now runtime exists
|
||||||
|
actor: tractor.Actor = tractor.current_actor()
|
||||||
|
assert actor.is_arbiter == should_be_root
|
||||||
|
|
||||||
actor = tractor.current_actor()
|
# spawns subproc here
|
||||||
assert actor.is_arbiter == is_arbiter
|
portal: tractor.Portal = await an.run_in_actor(
|
||||||
data = data_to_pass_down
|
fn=spawn,
|
||||||
|
|
||||||
if actor.is_arbiter:
|
# spawning args
|
||||||
|
name='sub-actor',
|
||||||
|
enable_modules=[__name__],
|
||||||
|
|
||||||
async with tractor.open_nursery(
|
# passed to a subactor-recursive RPC invoke
|
||||||
) as nursery:
|
# of this same `spawn()` fn.
|
||||||
|
should_be_root=False,
|
||||||
|
data=data_to_pass_down,
|
||||||
|
reg_addr=reg_addr,
|
||||||
|
)
|
||||||
|
|
||||||
# forks here
|
assert len(an._children) == 1
|
||||||
portal = await nursery.run_in_actor(
|
assert (
|
||||||
spawn,
|
portal.channel.uid
|
||||||
is_arbiter=False,
|
in
|
||||||
name='sub-actor',
|
tractor.current_actor().ipc_server._peers
|
||||||
data=data,
|
)
|
||||||
arb_addr=arb_addr,
|
|
||||||
enable_modules=namespaces,
|
|
||||||
)
|
|
||||||
|
|
||||||
assert len(nursery._children) == 1
|
# get result from child subactor
|
||||||
assert portal.channel.uid in tractor.current_actor()._peers
|
result = await portal.result()
|
||||||
# be sure we can still get the result
|
assert result == 10
|
||||||
result = await portal.result()
|
return result
|
||||||
assert result == 10
|
else:
|
||||||
return result
|
assert actor.is_arbiter == should_be_root
|
||||||
else:
|
return 10
|
||||||
return 10
|
|
||||||
|
|
||||||
|
|
||||||
def test_local_arbiter_subactor_global_state(arb_addr):
|
def test_run_in_actor_same_func_in_child(
|
||||||
|
reg_addr: tuple,
|
||||||
|
debug_mode: bool,
|
||||||
|
):
|
||||||
result = trio.run(
|
result = trio.run(
|
||||||
spawn,
|
partial(
|
||||||
True,
|
spawn,
|
||||||
data_to_pass_down,
|
should_be_root=True,
|
||||||
arb_addr,
|
data=data_to_pass_down,
|
||||||
|
reg_addr=reg_addr,
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
assert result == 10
|
assert result == 10
|
||||||
|
|
||||||
|
|
||||||
async def movie_theatre_question():
|
async def movie_theatre_question():
|
||||||
"""A question asked in a dark theatre, in a tangent
|
'''
|
||||||
|
A question asked in a dark theatre, in a tangent
|
||||||
(errr, I mean different) process.
|
(errr, I mean different) process.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
return 'have you ever seen a portal?'
|
return 'have you ever seen a portal?'
|
||||||
|
|
||||||
|
|
||||||
@tractor_test
|
@tractor_test
|
||||||
async def test_movie_theatre_convo(start_method):
|
async def test_movie_theatre_convo(start_method):
|
||||||
"""The main ``tractor`` routine.
|
'''
|
||||||
"""
|
The main ``tractor`` routine.
|
||||||
async with tractor.open_nursery() as n:
|
|
||||||
|
|
||||||
portal = await n.start_actor(
|
'''
|
||||||
|
async with tractor.open_nursery(debug_mode=True) as an:
|
||||||
|
|
||||||
|
portal = await an.start_actor(
|
||||||
'frank',
|
'frank',
|
||||||
# enable the actor to run funcs from this current module
|
# enable the actor to run funcs from this current module
|
||||||
enable_modules=[__name__],
|
enable_modules=[__name__],
|
||||||
|
|
@ -94,7 +121,9 @@ async def test_movie_theatre_convo(start_method):
|
||||||
await portal.cancel_actor()
|
await portal.cancel_actor()
|
||||||
|
|
||||||
|
|
||||||
async def cellar_door(return_value: Optional[str]):
|
async def cellar_door(
|
||||||
|
return_value: str|None,
|
||||||
|
):
|
||||||
return return_value
|
return return_value
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -104,17 +133,19 @@ async def cellar_door(return_value: Optional[str]):
|
||||||
)
|
)
|
||||||
@tractor_test
|
@tractor_test
|
||||||
async def test_most_beautiful_word(
|
async def test_most_beautiful_word(
|
||||||
start_method,
|
start_method: str,
|
||||||
return_value
|
return_value: Any,
|
||||||
|
debug_mode: bool,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
The main ``tractor`` routine.
|
The main ``tractor`` routine.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
with trio.fail_after(1):
|
with trio.fail_after(1):
|
||||||
async with tractor.open_nursery() as n:
|
async with tractor.open_nursery(
|
||||||
|
debug_mode=debug_mode,
|
||||||
portal = await n.run_in_actor(
|
) as an:
|
||||||
|
portal = await an.run_in_actor(
|
||||||
cellar_door,
|
cellar_door,
|
||||||
return_value=return_value,
|
return_value=return_value,
|
||||||
name='some_linguist',
|
name='some_linguist',
|
||||||
|
|
@ -140,7 +171,7 @@ async def check_loglevel(level):
|
||||||
def test_loglevel_propagated_to_subactor(
|
def test_loglevel_propagated_to_subactor(
|
||||||
start_method,
|
start_method,
|
||||||
capfd,
|
capfd,
|
||||||
arb_addr,
|
reg_addr,
|
||||||
):
|
):
|
||||||
if start_method == 'mp_forkserver':
|
if start_method == 'mp_forkserver':
|
||||||
pytest.skip(
|
pytest.skip(
|
||||||
|
|
@ -152,7 +183,7 @@ def test_loglevel_propagated_to_subactor(
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
name='arbiter',
|
name='arbiter',
|
||||||
start_method=start_method,
|
start_method=start_method,
|
||||||
arbiter_addr=arb_addr,
|
arbiter_addr=reg_addr,
|
||||||
|
|
||||||
) as tn:
|
) as tn:
|
||||||
await tn.run_in_actor(
|
await tn.run_in_actor(
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,9 @@
|
||||||
Broadcast channels for fan-out to local tasks.
|
Broadcast channels for fan-out to local tasks.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import (
|
||||||
|
asynccontextmanager as acm,
|
||||||
|
)
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from itertools import cycle
|
from itertools import cycle
|
||||||
import time
|
import time
|
||||||
|
|
@ -12,7 +14,11 @@ import pytest
|
||||||
import trio
|
import trio
|
||||||
from trio.lowlevel import current_task
|
from trio.lowlevel import current_task
|
||||||
import tractor
|
import tractor
|
||||||
from tractor.trionics import broadcast_receiver, Lagged
|
from tractor.trionics import (
|
||||||
|
broadcast_receiver,
|
||||||
|
Lagged,
|
||||||
|
collapse_eg,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
|
|
@ -37,7 +43,7 @@ async def echo_sequences(
|
||||||
|
|
||||||
async def ensure_sequence(
|
async def ensure_sequence(
|
||||||
|
|
||||||
stream: tractor.ReceiveMsgStream,
|
stream: tractor.MsgStream,
|
||||||
sequence: list,
|
sequence: list,
|
||||||
delay: Optional[float] = None,
|
delay: Optional[float] = None,
|
||||||
|
|
||||||
|
|
@ -59,21 +65,21 @@ async def ensure_sequence(
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def open_sequence_streamer(
|
async def open_sequence_streamer(
|
||||||
|
|
||||||
sequence: list[int],
|
sequence: list[int],
|
||||||
arb_addr: tuple[str, int],
|
reg_addr: tuple[str, int],
|
||||||
start_method: str,
|
start_method: str,
|
||||||
|
|
||||||
) -> tractor.MsgStream:
|
) -> tractor.MsgStream:
|
||||||
|
|
||||||
async with tractor.open_nursery(
|
async with tractor.open_nursery(
|
||||||
arbiter_addr=arb_addr,
|
arbiter_addr=reg_addr,
|
||||||
start_method=start_method,
|
start_method=start_method,
|
||||||
) as tn:
|
) as an:
|
||||||
|
|
||||||
portal = await tn.start_actor(
|
portal = await an.start_actor(
|
||||||
'sequence_echoer',
|
'sequence_echoer',
|
||||||
enable_modules=[__name__],
|
enable_modules=[__name__],
|
||||||
)
|
)
|
||||||
|
|
@ -83,14 +89,14 @@ async def open_sequence_streamer(
|
||||||
) as (ctx, first):
|
) as (ctx, first):
|
||||||
|
|
||||||
assert first is None
|
assert first is None
|
||||||
async with ctx.open_stream(backpressure=True) as stream:
|
async with ctx.open_stream(allow_overruns=True) as stream:
|
||||||
yield stream
|
yield stream
|
||||||
|
|
||||||
await portal.cancel_actor()
|
await portal.cancel_actor()
|
||||||
|
|
||||||
|
|
||||||
def test_stream_fan_out_to_local_subscriptions(
|
def test_stream_fan_out_to_local_subscriptions(
|
||||||
arb_addr,
|
reg_addr,
|
||||||
start_method,
|
start_method,
|
||||||
):
|
):
|
||||||
|
|
||||||
|
|
@ -100,7 +106,7 @@ def test_stream_fan_out_to_local_subscriptions(
|
||||||
|
|
||||||
async with open_sequence_streamer(
|
async with open_sequence_streamer(
|
||||||
sequence,
|
sequence,
|
||||||
arb_addr,
|
reg_addr,
|
||||||
start_method,
|
start_method,
|
||||||
) as stream:
|
) as stream:
|
||||||
|
|
||||||
|
|
@ -135,7 +141,7 @@ def test_stream_fan_out_to_local_subscriptions(
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
def test_consumer_and_parent_maybe_lag(
|
def test_consumer_and_parent_maybe_lag(
|
||||||
arb_addr,
|
reg_addr,
|
||||||
start_method,
|
start_method,
|
||||||
task_delays,
|
task_delays,
|
||||||
):
|
):
|
||||||
|
|
@ -147,14 +153,17 @@ def test_consumer_and_parent_maybe_lag(
|
||||||
|
|
||||||
async with open_sequence_streamer(
|
async with open_sequence_streamer(
|
||||||
sequence,
|
sequence,
|
||||||
arb_addr,
|
reg_addr,
|
||||||
start_method,
|
start_method,
|
||||||
) as stream:
|
) as stream:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
async with trio.open_nursery() as n:
|
async with (
|
||||||
|
collapse_eg(),
|
||||||
|
trio.open_nursery() as tn,
|
||||||
|
):
|
||||||
|
|
||||||
n.start_soon(
|
tn.start_soon(
|
||||||
ensure_sequence,
|
ensure_sequence,
|
||||||
stream,
|
stream,
|
||||||
sequence.copy(),
|
sequence.copy(),
|
||||||
|
|
@ -208,10 +217,11 @@ def test_consumer_and_parent_maybe_lag(
|
||||||
|
|
||||||
|
|
||||||
def test_faster_task_to_recv_is_cancelled_by_slower(
|
def test_faster_task_to_recv_is_cancelled_by_slower(
|
||||||
arb_addr,
|
reg_addr,
|
||||||
start_method,
|
start_method,
|
||||||
):
|
):
|
||||||
'''Ensure that if a faster task consuming from a stream is cancelled
|
'''
|
||||||
|
Ensure that if a faster task consuming from a stream is cancelled
|
||||||
the slower task can continue to receive all expected values.
|
the slower task can continue to receive all expected values.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
@ -221,13 +231,13 @@ def test_faster_task_to_recv_is_cancelled_by_slower(
|
||||||
|
|
||||||
async with open_sequence_streamer(
|
async with open_sequence_streamer(
|
||||||
sequence,
|
sequence,
|
||||||
arb_addr,
|
reg_addr,
|
||||||
start_method,
|
start_method,
|
||||||
|
|
||||||
) as stream:
|
) as stream:
|
||||||
|
|
||||||
async with trio.open_nursery() as n:
|
async with trio.open_nursery() as tn:
|
||||||
n.start_soon(
|
tn.start_soon(
|
||||||
ensure_sequence,
|
ensure_sequence,
|
||||||
stream,
|
stream,
|
||||||
sequence.copy(),
|
sequence.copy(),
|
||||||
|
|
@ -249,7 +259,7 @@ def test_faster_task_to_recv_is_cancelled_by_slower(
|
||||||
continue
|
continue
|
||||||
|
|
||||||
print('cancelling faster subtask')
|
print('cancelling faster subtask')
|
||||||
n.cancel_scope.cancel()
|
tn.cancel_scope.cancel()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
value = await stream.receive()
|
value = await stream.receive()
|
||||||
|
|
@ -267,7 +277,7 @@ def test_faster_task_to_recv_is_cancelled_by_slower(
|
||||||
# the faster subtask was cancelled
|
# the faster subtask was cancelled
|
||||||
break
|
break
|
||||||
|
|
||||||
# await tractor.breakpoint()
|
# await tractor.pause()
|
||||||
# await stream.receive()
|
# await stream.receive()
|
||||||
print(f'final value: {value}')
|
print(f'final value: {value}')
|
||||||
|
|
||||||
|
|
@ -298,7 +308,7 @@ def test_subscribe_errors_after_close():
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_slow_consumers_lag_out(
|
def test_ensure_slow_consumers_lag_out(
|
||||||
arb_addr,
|
reg_addr,
|
||||||
start_method,
|
start_method,
|
||||||
):
|
):
|
||||||
'''This is a pure local task test; no tractor
|
'''This is a pure local task test; no tractor
|
||||||
|
|
@ -367,13 +377,13 @@ def test_ensure_slow_consumers_lag_out(
|
||||||
f'on {lags}:{value}')
|
f'on {lags}:{value}')
|
||||||
return
|
return
|
||||||
|
|
||||||
async with trio.open_nursery() as nursery:
|
async with trio.open_nursery() as tn:
|
||||||
|
|
||||||
for i in range(1, num_laggers):
|
for i in range(1, num_laggers):
|
||||||
|
|
||||||
task_name = f'sub_{i}'
|
task_name = f'sub_{i}'
|
||||||
laggers[task_name] = 0
|
laggers[task_name] = 0
|
||||||
nursery.start_soon(
|
tn.start_soon(
|
||||||
partial(
|
partial(
|
||||||
sub_and_print,
|
sub_and_print,
|
||||||
delay=i*0.001,
|
delay=i*0.001,
|
||||||
|
|
@ -409,8 +419,8 @@ def test_ensure_slow_consumers_lag_out(
|
||||||
seq = brx._state.subs[brx.key]
|
seq = brx._state.subs[brx.key]
|
||||||
assert seq == len(brx._state.queue) - 1
|
assert seq == len(brx._state.queue) - 1
|
||||||
|
|
||||||
# all backpressured entries in the underlying
|
# all no_overruns entries in the underlying
|
||||||
# channel should have been copied into the caster
|
# channel should have been copied into the bcaster
|
||||||
# queue trailing-window
|
# queue trailing-window
|
||||||
async for i in rx:
|
async for i in rx:
|
||||||
print(f'bped: {i}')
|
print(f'bped: {i}')
|
||||||
|
|
@ -460,3 +470,52 @@ def test_first_recver_is_cancelled():
|
||||||
assert value == 1
|
assert value == 1
|
||||||
|
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
|
def test_no_raise_on_lag():
|
||||||
|
'''
|
||||||
|
Run a simple 2-task broadcast where one task is slow but configured
|
||||||
|
so that it does not raise `Lagged` on overruns using
|
||||||
|
`raise_on_lasg=False` and verify that the task does not raise.
|
||||||
|
|
||||||
|
'''
|
||||||
|
size = 100
|
||||||
|
tx, rx = trio.open_memory_channel(size)
|
||||||
|
brx = broadcast_receiver(rx, size)
|
||||||
|
|
||||||
|
async def slow():
|
||||||
|
async with brx.subscribe(
|
||||||
|
raise_on_lag=False,
|
||||||
|
) as br:
|
||||||
|
async for msg in br:
|
||||||
|
print(f'slow task got: {msg}')
|
||||||
|
await trio.sleep(0.1)
|
||||||
|
|
||||||
|
async def fast():
|
||||||
|
async with brx.subscribe() as br:
|
||||||
|
async for msg in br:
|
||||||
|
print(f'fast task got: {msg}')
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
async with (
|
||||||
|
tractor.open_root_actor(
|
||||||
|
# NOTE: so we see the warning msg emitted by the bcaster
|
||||||
|
# internals when the no raise flag is set.
|
||||||
|
loglevel='warning',
|
||||||
|
),
|
||||||
|
collapse_eg(),
|
||||||
|
trio.open_nursery() as n,
|
||||||
|
):
|
||||||
|
n.start_soon(slow)
|
||||||
|
n.start_soon(fast)
|
||||||
|
|
||||||
|
for i in range(1000):
|
||||||
|
await tx.send(i)
|
||||||
|
|
||||||
|
# simulate user nailing ctl-c after realizing
|
||||||
|
# there's a lag in the slow task.
|
||||||
|
await trio.sleep(1)
|
||||||
|
raise KeyboardInterrupt
|
||||||
|
|
||||||
|
with pytest.raises(KeyboardInterrupt):
|
||||||
|
trio.run(main)
|
||||||
|
|
|
||||||
|
|
@ -3,9 +3,21 @@ Reminders for oddities in `trio` that we need to stay aware of and/or
|
||||||
want to see changed.
|
want to see changed.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
from contextlib import (
|
||||||
|
asynccontextmanager as acm,
|
||||||
|
)
|
||||||
|
from types import ModuleType
|
||||||
|
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
from _pytest import pathlib
|
||||||
|
from tractor.trionics import collapse_eg
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio import TaskStatus
|
||||||
|
from tractor._testing import (
|
||||||
|
examples_dir,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
|
@ -60,6 +72,7 @@ def test_stashed_child_nursery(use_start_soon):
|
||||||
async def main():
|
async def main():
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
|
collapse_eg(),
|
||||||
trio.open_nursery() as pn,
|
trio.open_nursery() as pn,
|
||||||
):
|
):
|
||||||
cn = await pn.start(mk_child_nursery)
|
cn = await pn.start(mk_child_nursery)
|
||||||
|
|
@ -80,3 +93,210 @@ def test_stashed_child_nursery(use_start_soon):
|
||||||
|
|
||||||
with pytest.raises(NameError):
|
with pytest.raises(NameError):
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
('unmask_from_canc', 'canc_from_finally'),
|
||||||
|
[
|
||||||
|
(True, False),
|
||||||
|
(True, True),
|
||||||
|
pytest.param(False, True,
|
||||||
|
marks=pytest.mark.xfail(reason="never raises!")
|
||||||
|
),
|
||||||
|
],
|
||||||
|
# TODO, ask ronny how to impl this .. XD
|
||||||
|
# ids='unmask_from_canc={0}, canc_from_finally={1}',#.format,
|
||||||
|
)
|
||||||
|
def test_acm_embedded_nursery_propagates_enter_err(
|
||||||
|
canc_from_finally: bool,
|
||||||
|
unmask_from_canc: bool,
|
||||||
|
debug_mode: bool,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Demo how a masking `trio.Cancelled` could be handled by unmasking
|
||||||
|
from the `.__context__` field when a user (by accident) re-raises
|
||||||
|
from a `finally:`.
|
||||||
|
|
||||||
|
'''
|
||||||
|
import tractor
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def wraps_tn_that_always_cancels():
|
||||||
|
async with (
|
||||||
|
trio.open_nursery() as tn,
|
||||||
|
tractor.trionics.maybe_raise_from_masking_exc(
|
||||||
|
unmask_from=(
|
||||||
|
(trio.Cancelled,) if unmask_from_canc
|
||||||
|
else ()
|
||||||
|
),
|
||||||
|
)
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
yield tn
|
||||||
|
finally:
|
||||||
|
if canc_from_finally:
|
||||||
|
tn.cancel_scope.cancel()
|
||||||
|
await trio.lowlevel.checkpoint()
|
||||||
|
|
||||||
|
async def _main():
|
||||||
|
with tractor.devx.maybe_open_crash_handler(
|
||||||
|
pdb=debug_mode,
|
||||||
|
) as bxerr:
|
||||||
|
assert not bxerr.value
|
||||||
|
|
||||||
|
async with (
|
||||||
|
wraps_tn_that_always_cancels() as tn,
|
||||||
|
):
|
||||||
|
assert not tn.cancel_scope.cancel_called
|
||||||
|
assert 0
|
||||||
|
|
||||||
|
if debug_mode:
|
||||||
|
assert (
|
||||||
|
(err := bxerr.value)
|
||||||
|
and
|
||||||
|
type(err) is AssertionError
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(ExceptionGroup) as excinfo:
|
||||||
|
trio.run(_main)
|
||||||
|
|
||||||
|
eg: ExceptionGroup = excinfo.value
|
||||||
|
assert_eg, rest_eg = eg.split(AssertionError)
|
||||||
|
|
||||||
|
assert len(assert_eg.exceptions) == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_gatherctxs_with_memchan_breaks_multicancelled(
|
||||||
|
debug_mode: bool,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Demo how a using an `async with sndchan` inside
|
||||||
|
a `.trionics.gather_contexts()` task will break a strict-eg-tn's
|
||||||
|
multi-cancelled absorption..
|
||||||
|
|
||||||
|
'''
|
||||||
|
from tractor import (
|
||||||
|
trionics,
|
||||||
|
)
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_memchan() -> trio.abc.ReceiveChannel:
|
||||||
|
|
||||||
|
task: trio.Task = trio.lowlevel.current_task()
|
||||||
|
print(
|
||||||
|
f'Opening {task!r}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
# 1 to force eager sending
|
||||||
|
send, recv = trio.open_memory_channel(16)
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with send:
|
||||||
|
yield recv
|
||||||
|
finally:
|
||||||
|
print(
|
||||||
|
f'Closed {task!r}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
async with (
|
||||||
|
# XXX should ensure ONLY the KBI
|
||||||
|
# is relayed upward
|
||||||
|
collapse_eg(),
|
||||||
|
trio.open_nursery(), # as tn,
|
||||||
|
|
||||||
|
trionics.gather_contexts([
|
||||||
|
open_memchan(),
|
||||||
|
open_memchan(),
|
||||||
|
]) as recv_chans,
|
||||||
|
):
|
||||||
|
assert len(recv_chans) == 2
|
||||||
|
|
||||||
|
await trio.sleep(1)
|
||||||
|
raise KeyboardInterrupt
|
||||||
|
# tn.cancel_scope.cancel()
|
||||||
|
|
||||||
|
with pytest.raises(KeyboardInterrupt):
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'raise_unmasked', [
|
||||||
|
True,
|
||||||
|
pytest.param(
|
||||||
|
False,
|
||||||
|
marks=pytest.mark.xfail(
|
||||||
|
reason="see examples/trio/send_chan_aclose_masks.py"
|
||||||
|
)
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'child_errors_mid_stream',
|
||||||
|
[True, False],
|
||||||
|
)
|
||||||
|
def test_unmask_aclose_as_checkpoint_on_aexit(
|
||||||
|
raise_unmasked: bool,
|
||||||
|
child_errors_mid_stream: bool,
|
||||||
|
debug_mode: bool,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Verify that our unmasker util works over the common case where
|
||||||
|
a mem-chan's `.aclose()` is included in an `@acm` stack
|
||||||
|
and it being currently a checkpoint, can `trio.Cancelled`-mask an embedded
|
||||||
|
exception from user code resulting in a silent failure which
|
||||||
|
appears like graceful cancellation.
|
||||||
|
|
||||||
|
This test suite is mostly implemented as an example script so it
|
||||||
|
could more easily be shared with `trio`-core peeps as `tractor`-less
|
||||||
|
minimum reproducing example.
|
||||||
|
|
||||||
|
'''
|
||||||
|
mod: ModuleType = pathlib.import_path(
|
||||||
|
examples_dir()
|
||||||
|
/ 'trio'
|
||||||
|
/ 'send_chan_aclose_masks_beg.py',
|
||||||
|
root=examples_dir(),
|
||||||
|
consider_namespace_packages=False,
|
||||||
|
)
|
||||||
|
with pytest.raises(RuntimeError):
|
||||||
|
trio.run(partial(
|
||||||
|
mod.main,
|
||||||
|
raise_unmasked=raise_unmasked,
|
||||||
|
child_errors_mid_stream=child_errors_mid_stream,
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'ignore_special_cases', [
|
||||||
|
True,
|
||||||
|
pytest.param(
|
||||||
|
False,
|
||||||
|
marks=pytest.mark.xfail(
|
||||||
|
reason="see examples/trio/lockacquire_not_umasked.py"
|
||||||
|
)
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_cancelled_lockacquire_in_ipctx_not_unmasked(
|
||||||
|
ignore_special_cases: bool,
|
||||||
|
loglevel: str,
|
||||||
|
debug_mode: bool,
|
||||||
|
):
|
||||||
|
mod: ModuleType = pathlib.import_path(
|
||||||
|
examples_dir()
|
||||||
|
/ 'trio'
|
||||||
|
/ 'lockacquire_not_unmasked.py',
|
||||||
|
root=examples_dir(),
|
||||||
|
consider_namespace_packages=False,
|
||||||
|
)
|
||||||
|
async def _main():
|
||||||
|
with trio.fail_after(2):
|
||||||
|
await mod.main(
|
||||||
|
ignore_special_cases=ignore_special_cases,
|
||||||
|
loglevel=loglevel,
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
)
|
||||||
|
|
||||||
|
trio.run(_main)
|
||||||
|
|
|
||||||
|
|
@ -15,71 +15,56 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
tractor: structured concurrent "actors".
|
tractor: structured concurrent ``trio``-"actors".
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from exceptiongroup import BaseExceptionGroup
|
|
||||||
|
|
||||||
from ._clustering import open_actor_cluster
|
from ._clustering import (
|
||||||
from ._ipc import Channel
|
open_actor_cluster as open_actor_cluster,
|
||||||
|
)
|
||||||
|
from ._context import (
|
||||||
|
Context as Context, # the type
|
||||||
|
context as context, # a func-decorator
|
||||||
|
)
|
||||||
from ._streaming import (
|
from ._streaming import (
|
||||||
Context,
|
MsgStream as MsgStream,
|
||||||
ReceiveMsgStream,
|
stream as stream,
|
||||||
MsgStream,
|
|
||||||
stream,
|
|
||||||
context,
|
|
||||||
)
|
)
|
||||||
from ._discovery import (
|
from ._discovery import (
|
||||||
get_arbiter,
|
get_registry as get_registry,
|
||||||
find_actor,
|
find_actor as find_actor,
|
||||||
wait_for_actor,
|
wait_for_actor as wait_for_actor,
|
||||||
query_actor,
|
query_actor as query_actor,
|
||||||
|
)
|
||||||
|
from ._supervise import (
|
||||||
|
open_nursery as open_nursery,
|
||||||
|
ActorNursery as ActorNursery,
|
||||||
)
|
)
|
||||||
from ._supervise import open_nursery
|
|
||||||
from ._state import (
|
from ._state import (
|
||||||
current_actor,
|
current_actor as current_actor,
|
||||||
is_root_process,
|
is_root_process as is_root_process,
|
||||||
|
current_ipc_ctx as current_ipc_ctx,
|
||||||
|
debug_mode as debug_mode
|
||||||
)
|
)
|
||||||
from ._exceptions import (
|
from ._exceptions import (
|
||||||
RemoteActorError,
|
ContextCancelled as ContextCancelled,
|
||||||
ModuleNotExposed,
|
ModuleNotExposed as ModuleNotExposed,
|
||||||
ContextCancelled,
|
MsgTypeError as MsgTypeError,
|
||||||
|
RemoteActorError as RemoteActorError,
|
||||||
|
TransportClosed as TransportClosed,
|
||||||
)
|
)
|
||||||
from ._debug import breakpoint, post_mortem
|
from .devx import (
|
||||||
from . import msg
|
breakpoint as breakpoint,
|
||||||
|
pause as pause,
|
||||||
|
pause_from_sync as pause_from_sync,
|
||||||
|
post_mortem as post_mortem,
|
||||||
|
)
|
||||||
|
from . import msg as msg
|
||||||
from ._root import (
|
from ._root import (
|
||||||
run_daemon,
|
run_daemon as run_daemon,
|
||||||
open_root_actor,
|
open_root_actor as open_root_actor,
|
||||||
)
|
)
|
||||||
from ._portal import Portal
|
from .ipc import Channel as Channel
|
||||||
from ._runtime import Actor
|
from ._portal import Portal as Portal
|
||||||
|
from ._runtime import Actor as Actor
|
||||||
|
# from . import hilevel as hilevel
|
||||||
__all__ = [
|
|
||||||
'Actor',
|
|
||||||
'Channel',
|
|
||||||
'Context',
|
|
||||||
'ContextCancelled',
|
|
||||||
'ModuleNotExposed',
|
|
||||||
'MsgStream',
|
|
||||||
'BaseExceptionGroup',
|
|
||||||
'Portal',
|
|
||||||
'ReceiveMsgStream',
|
|
||||||
'RemoteActorError',
|
|
||||||
'breakpoint',
|
|
||||||
'context',
|
|
||||||
'current_actor',
|
|
||||||
'find_actor',
|
|
||||||
'get_arbiter',
|
|
||||||
'is_root_process',
|
|
||||||
'msg',
|
|
||||||
'open_actor_cluster',
|
|
||||||
'open_nursery',
|
|
||||||
'open_root_actor',
|
|
||||||
'post_mortem',
|
|
||||||
'query_actor',
|
|
||||||
'run_daemon',
|
|
||||||
'stream',
|
|
||||||
'to_asyncio',
|
|
||||||
'wait_for_actor',
|
|
||||||
]
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,282 @@
|
||||||
|
# tractor: structured concurrent "actors".
|
||||||
|
# Copyright 2018-eternity Tyler Goodlet.
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
from __future__ import annotations
|
||||||
|
from uuid import uuid4
|
||||||
|
from typing import (
|
||||||
|
Protocol,
|
||||||
|
ClassVar,
|
||||||
|
Type,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
|
|
||||||
|
from bidict import bidict
|
||||||
|
from trio import (
|
||||||
|
SocketListener,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .log import get_logger
|
||||||
|
from ._state import (
|
||||||
|
_def_tpt_proto,
|
||||||
|
)
|
||||||
|
from .ipc._tcp import TCPAddress
|
||||||
|
from .ipc._uds import UDSAddress
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._runtime import Actor
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO, maybe breakout the netns key to a struct?
|
||||||
|
# class NetNs(Struct)[str, int]:
|
||||||
|
# ...
|
||||||
|
|
||||||
|
# TODO, can't we just use a type alias
|
||||||
|
# for this? namely just some `tuple[str, int, str, str]`?
|
||||||
|
#
|
||||||
|
# -[ ] would also just be simpler to keep this as SockAddr[tuple]
|
||||||
|
# or something, implying it's just a simple pair of values which can
|
||||||
|
# presumably be mapped to all transports?
|
||||||
|
# -[ ] `pydoc socket.socket.getsockname()` delivers a 4-tuple for
|
||||||
|
# ipv6 `(hostaddr, port, flowinfo, scope_id)`.. so how should we
|
||||||
|
# handle that?
|
||||||
|
# -[ ] as a further alternative to this wrap()/unwrap() approach we
|
||||||
|
# could just implement `enc/dec_hook()`s for the `Address`-types
|
||||||
|
# and just deal with our internal objs directly and always and
|
||||||
|
# leave it to the codec layer to figure out marshalling?
|
||||||
|
# |_ would mean only one spot to do the `.unwrap()` (which we may
|
||||||
|
# end up needing to call from the hook()s anyway?)
|
||||||
|
# -[x] rename to `UnwrappedAddress[Descriptor]` ??
|
||||||
|
# seems like the right name as per,
|
||||||
|
# https://www.geeksforgeeks.org/introduction-to-address-descriptor/
|
||||||
|
#
|
||||||
|
UnwrappedAddress = (
|
||||||
|
# tcp/udp/uds
|
||||||
|
tuple[
|
||||||
|
str, # host/domain(tcp), filesys-dir(uds)
|
||||||
|
int|str, # port/path(uds)
|
||||||
|
]
|
||||||
|
# ?TODO? should we also include another 2 fields from
|
||||||
|
# our `Aid` msg such that we include the runtime `Actor.uid`
|
||||||
|
# of `.name` and `.uuid`?
|
||||||
|
# - would ensure uniqueness across entire net?
|
||||||
|
# - allows for easier runtime-level filtering of "actors by
|
||||||
|
# service name"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO, maybe rename to `SocketAddress`?
|
||||||
|
class Address(Protocol):
|
||||||
|
proto_key: ClassVar[str]
|
||||||
|
unwrapped_type: ClassVar[UnwrappedAddress]
|
||||||
|
|
||||||
|
# TODO, i feel like an `.is_bound()` is a better thing to
|
||||||
|
# support?
|
||||||
|
# Lke, what use does this have besides a noop and if it's not
|
||||||
|
# valid why aren't we erroring on creation/use?
|
||||||
|
@property
|
||||||
|
def is_valid(self) -> bool:
|
||||||
|
...
|
||||||
|
|
||||||
|
# TODO, maybe `.netns` is a better name?
|
||||||
|
@property
|
||||||
|
def namespace(self) -> tuple[str, int]|None:
|
||||||
|
'''
|
||||||
|
The if-available, OS-specific "network namespace" key.
|
||||||
|
|
||||||
|
'''
|
||||||
|
...
|
||||||
|
|
||||||
|
@property
|
||||||
|
def bindspace(self) -> str:
|
||||||
|
'''
|
||||||
|
Deliver the socket address' "bindable space" from
|
||||||
|
a `socket.socket.bind()` and thus from the perspective of
|
||||||
|
specific transport protocol domain.
|
||||||
|
|
||||||
|
I.e. for most (layer-4) network-socket protocols this is
|
||||||
|
normally the ipv4/6 address, for UDS this is normally
|
||||||
|
a filesystem (sub-directory).
|
||||||
|
|
||||||
|
For (distributed) network protocols this is normally the routing
|
||||||
|
layer's domain/(ip-)address, though it might also include a "network namespace"
|
||||||
|
key different then the default.
|
||||||
|
|
||||||
|
For local-host-only transports this is either an explicit
|
||||||
|
namespace (with types defined by the OS: netns, Cgroup, IPC,
|
||||||
|
pid, etc. on linux) or failing that the sub-directory in the
|
||||||
|
filesys in which socket/shm files are located *under*.
|
||||||
|
|
||||||
|
'''
|
||||||
|
...
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_addr(cls, addr: UnwrappedAddress) -> Address:
|
||||||
|
...
|
||||||
|
|
||||||
|
def unwrap(self) -> UnwrappedAddress:
|
||||||
|
'''
|
||||||
|
Deliver the underying minimum field set in
|
||||||
|
a primitive python data type-structure.
|
||||||
|
'''
|
||||||
|
...
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_random(
|
||||||
|
cls,
|
||||||
|
current_actor: Actor,
|
||||||
|
bindspace: str|None = None,
|
||||||
|
) -> Address:
|
||||||
|
...
|
||||||
|
|
||||||
|
# TODO, this should be something like a `.get_def_registar_addr()`
|
||||||
|
# or similar since,
|
||||||
|
# - it should be a **host singleton** (not root/tree singleton)
|
||||||
|
# - we **only need this value** when one isn't provided to the
|
||||||
|
# runtime at boot and we want to implicitly provide a host-wide
|
||||||
|
# registrar.
|
||||||
|
# - each rooted-actor-tree should likely have its own
|
||||||
|
# micro-registry (likely the root being it), also see
|
||||||
|
@classmethod
|
||||||
|
def get_root(cls) -> Address:
|
||||||
|
...
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
...
|
||||||
|
|
||||||
|
def __eq__(self, other) -> bool:
|
||||||
|
...
|
||||||
|
|
||||||
|
async def open_listener(
|
||||||
|
self,
|
||||||
|
**kwargs,
|
||||||
|
) -> SocketListener:
|
||||||
|
...
|
||||||
|
|
||||||
|
async def close_listener(self):
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
_address_types: bidict[str, Type[Address]] = {
|
||||||
|
'tcp': TCPAddress,
|
||||||
|
'uds': UDSAddress
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# TODO! really these are discovery sys default addrs ONLY useful for
|
||||||
|
# when none is provided to a root actor on first boot.
|
||||||
|
_default_lo_addrs: dict[
|
||||||
|
str,
|
||||||
|
UnwrappedAddress
|
||||||
|
] = {
|
||||||
|
'tcp': TCPAddress.get_root().unwrap(),
|
||||||
|
'uds': UDSAddress.get_root().unwrap(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_address_cls(name: str) -> Type[Address]:
|
||||||
|
return _address_types[name]
|
||||||
|
|
||||||
|
|
||||||
|
def is_wrapped_addr(addr: any) -> bool:
|
||||||
|
return type(addr) in _address_types.values()
|
||||||
|
|
||||||
|
|
||||||
|
def mk_uuid() -> str:
|
||||||
|
'''
|
||||||
|
Encapsulate creation of a uuid4 as `str` as used
|
||||||
|
for creating `Actor.uid: tuple[str, str]` and/or
|
||||||
|
`.msg.types.Aid`.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return str(uuid4())
|
||||||
|
|
||||||
|
|
||||||
|
def wrap_address(
|
||||||
|
addr: UnwrappedAddress
|
||||||
|
) -> Address:
|
||||||
|
'''
|
||||||
|
Wrap an `UnwrappedAddress` as an `Address`-type based
|
||||||
|
on matching builtin python data-structures which we adhoc
|
||||||
|
use for each.
|
||||||
|
|
||||||
|
XXX NOTE, careful care must be placed to ensure
|
||||||
|
`UnwrappedAddress` cases are **definitely unique** otherwise the
|
||||||
|
wrong transport backend may be loaded and will break many
|
||||||
|
low-level things in our runtime in a not-fun-to-debug way!
|
||||||
|
|
||||||
|
XD
|
||||||
|
|
||||||
|
'''
|
||||||
|
if is_wrapped_addr(addr):
|
||||||
|
return addr
|
||||||
|
|
||||||
|
cls: Type|None = None
|
||||||
|
# if 'sock' in addr[0]:
|
||||||
|
# import pdbp; pdbp.set_trace()
|
||||||
|
match addr:
|
||||||
|
|
||||||
|
# classic network socket-address as tuple/list
|
||||||
|
case (
|
||||||
|
(str(), int())
|
||||||
|
|
|
||||||
|
[str(), int()]
|
||||||
|
):
|
||||||
|
cls = TCPAddress
|
||||||
|
|
||||||
|
case (
|
||||||
|
# (str()|Path(), str()|Path()),
|
||||||
|
# ^TODO? uhh why doesn't this work!?
|
||||||
|
|
||||||
|
(_, filename)
|
||||||
|
) if type(filename) is str:
|
||||||
|
cls = UDSAddress
|
||||||
|
|
||||||
|
# likely an unset UDS or TCP reg address as defaulted in
|
||||||
|
# `_state._runtime_vars['_root_mailbox']`
|
||||||
|
#
|
||||||
|
# TODO? figure out when/if we even need this?
|
||||||
|
case (
|
||||||
|
None
|
||||||
|
|
|
||||||
|
[None, None]
|
||||||
|
):
|
||||||
|
cls: Type[Address] = get_address_cls(_def_tpt_proto)
|
||||||
|
addr: UnwrappedAddress = cls.get_root().unwrap()
|
||||||
|
|
||||||
|
case _:
|
||||||
|
# import pdbp; pdbp.set_trace()
|
||||||
|
raise TypeError(
|
||||||
|
f'Can not wrap unwrapped-address ??\n'
|
||||||
|
f'type(addr): {type(addr)!r}\n'
|
||||||
|
f'addr: {addr!r}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
return cls.from_addr(addr)
|
||||||
|
|
||||||
|
|
||||||
|
def default_lo_addrs(
|
||||||
|
transports: list[str],
|
||||||
|
) -> list[Type[Address]]:
|
||||||
|
'''
|
||||||
|
Return the default, host-singleton, registry address
|
||||||
|
for an input transport key set.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return [
|
||||||
|
_default_lo_addrs[transport]
|
||||||
|
for transport in transports
|
||||||
|
]
|
||||||
|
|
@ -18,8 +18,6 @@
|
||||||
This is the "bootloader" for actors started using the native trio backend.
|
This is the "bootloader" for actors started using the native trio backend.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import sys
|
|
||||||
import trio
|
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
from ast import literal_eval
|
from ast import literal_eval
|
||||||
|
|
@ -33,13 +31,16 @@ def parse_uid(arg):
|
||||||
return str(name), str(uuid) # ensures str encoding
|
return str(name), str(uuid) # ensures str encoding
|
||||||
|
|
||||||
def parse_ipaddr(arg):
|
def parse_ipaddr(arg):
|
||||||
host, port = literal_eval(arg)
|
try:
|
||||||
return (str(host), int(port))
|
return literal_eval(arg)
|
||||||
|
|
||||||
|
except (ValueError, SyntaxError):
|
||||||
|
# UDS: try to interpret as a straight up str
|
||||||
|
return arg
|
||||||
|
|
||||||
from ._entry import _trio_main
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
__tracebackhide__: bool = True
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument("--uid", type=parse_uid)
|
parser.add_argument("--uid", type=parse_uid)
|
||||||
|
|
@ -49,8 +50,8 @@ if __name__ == "__main__":
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
subactor = Actor(
|
subactor = Actor(
|
||||||
args.uid[0],
|
name=args.uid[0],
|
||||||
uid=args.uid[1],
|
uuid=args.uid[1],
|
||||||
loglevel=args.loglevel,
|
loglevel=args.loglevel,
|
||||||
spawn_method="trio"
|
spawn_method="trio"
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -19,10 +19,13 @@ Actor cluster helpers.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
from contextlib import (
|
||||||
from contextlib import asynccontextmanager as acm
|
asynccontextmanager as acm,
|
||||||
|
)
|
||||||
from multiprocessing import cpu_count
|
from multiprocessing import cpu_count
|
||||||
from typing import AsyncGenerator, Optional
|
from typing import (
|
||||||
|
AsyncGenerator,
|
||||||
|
)
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
|
@ -52,10 +55,17 @@ async def open_actor_cluster(
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'Number of names is {len(names)} but count it {count}')
|
'Number of names is {len(names)} but count it {count}')
|
||||||
|
|
||||||
async with tractor.open_nursery(
|
async with (
|
||||||
**runtime_kwargs,
|
# tractor.trionics.collapse_eg(),
|
||||||
) as an:
|
tractor.open_nursery(
|
||||||
async with trio.open_nursery() as n:
|
**runtime_kwargs,
|
||||||
|
) as an
|
||||||
|
):
|
||||||
|
async with (
|
||||||
|
# tractor.trionics.collapse_eg(),
|
||||||
|
trio.open_nursery() as tn,
|
||||||
|
tractor.trionics.maybe_raise_from_masking_exc()
|
||||||
|
):
|
||||||
uid = tractor.current_actor().uid
|
uid = tractor.current_actor().uid
|
||||||
|
|
||||||
async def _start(name: str) -> None:
|
async def _start(name: str) -> None:
|
||||||
|
|
@ -66,9 +76,8 @@ async def open_actor_cluster(
|
||||||
)
|
)
|
||||||
|
|
||||||
for name in names:
|
for name in names:
|
||||||
n.start_soon(_start, name)
|
tn.start_soon(_start, name)
|
||||||
|
|
||||||
assert len(portals) == count
|
assert len(portals) == count
|
||||||
yield portals
|
yield portals
|
||||||
|
|
||||||
await an.cancel(hard_kill=hard_kill)
|
await an.cancel(hard_kill=hard_kill)
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,923 +0,0 @@
|
||||||
# tractor: structured concurrent "actors".
|
|
||||||
# Copyright 2018-eternity Tyler Goodlet.
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Multi-core debugging for da peeps!
|
|
||||||
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
import bdb
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import signal
|
|
||||||
from functools import (
|
|
||||||
partial,
|
|
||||||
cached_property,
|
|
||||||
)
|
|
||||||
from contextlib import asynccontextmanager as acm
|
|
||||||
from typing import (
|
|
||||||
Any,
|
|
||||||
Optional,
|
|
||||||
Callable,
|
|
||||||
AsyncIterator,
|
|
||||||
AsyncGenerator,
|
|
||||||
)
|
|
||||||
from types import FrameType
|
|
||||||
|
|
||||||
import tractor
|
|
||||||
import trio
|
|
||||||
from trio_typing import TaskStatus
|
|
||||||
|
|
||||||
from .log import get_logger
|
|
||||||
from ._discovery import get_root
|
|
||||||
from ._state import (
|
|
||||||
is_root_process,
|
|
||||||
debug_mode,
|
|
||||||
)
|
|
||||||
from ._exceptions import (
|
|
||||||
is_multi_cancelled,
|
|
||||||
ContextCancelled,
|
|
||||||
)
|
|
||||||
from ._ipc import Channel
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
# wtf: only exported when installed in dev mode?
|
|
||||||
import pdbpp
|
|
||||||
except ImportError:
|
|
||||||
# pdbpp is installed in regular mode...it monkey patches stuff
|
|
||||||
import pdb
|
|
||||||
xpm = getattr(pdb, 'xpm', None)
|
|
||||||
assert xpm, "pdbpp is not installed?" # type: ignore
|
|
||||||
pdbpp = pdb
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['breakpoint', 'post_mortem']
|
|
||||||
|
|
||||||
|
|
||||||
class Lock:
|
|
||||||
'''
|
|
||||||
Actor global debug lock state.
|
|
||||||
|
|
||||||
Mostly to avoid a lot of ``global`` declarations for now XD.
|
|
||||||
|
|
||||||
'''
|
|
||||||
repl: MultiActorPdb | None = None
|
|
||||||
# placeholder for function to set a ``trio.Event`` on debugger exit
|
|
||||||
# pdb_release_hook: Optional[Callable] = None
|
|
||||||
|
|
||||||
_trio_handler: Callable[
|
|
||||||
[int, Optional[FrameType]], Any
|
|
||||||
] | int | None = None
|
|
||||||
|
|
||||||
# actor-wide variable pointing to current task name using debugger
|
|
||||||
local_task_in_debug: str | None = None
|
|
||||||
|
|
||||||
# NOTE: set by the current task waiting on the root tty lock from
|
|
||||||
# the CALLER side of the `lock_tty_for_child()` context entry-call
|
|
||||||
# and must be cancelled if this actor is cancelled via IPC
|
|
||||||
# request-message otherwise deadlocks with the parent actor may
|
|
||||||
# ensure
|
|
||||||
_debugger_request_cs: Optional[trio.CancelScope] = None
|
|
||||||
|
|
||||||
# NOTE: set only in the root actor for the **local** root spawned task
|
|
||||||
# which has acquired the lock (i.e. this is on the callee side of
|
|
||||||
# the `lock_tty_for_child()` context entry).
|
|
||||||
_root_local_task_cs_in_debug: Optional[trio.CancelScope] = None
|
|
||||||
|
|
||||||
# actor tree-wide actor uid that supposedly has the tty lock
|
|
||||||
global_actor_in_debug: Optional[tuple[str, str]] = None
|
|
||||||
|
|
||||||
local_pdb_complete: Optional[trio.Event] = None
|
|
||||||
no_remote_has_tty: Optional[trio.Event] = None
|
|
||||||
|
|
||||||
# lock in root actor preventing multi-access to local tty
|
|
||||||
_debug_lock: trio.StrictFIFOLock = trio.StrictFIFOLock()
|
|
||||||
|
|
||||||
_orig_sigint_handler: Optional[Callable] = None
|
|
||||||
_blocked: set[tuple[str, str]] = set()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def shield_sigint(cls):
|
|
||||||
cls._orig_sigint_handler = signal.signal(
|
|
||||||
signal.SIGINT,
|
|
||||||
shield_sigint_handler,
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def unshield_sigint(cls):
|
|
||||||
# always restore ``trio``'s sigint handler. see notes below in
|
|
||||||
# the pdb factory about the nightmare that is that code swapping
|
|
||||||
# out the handler when the repl activates...
|
|
||||||
signal.signal(signal.SIGINT, cls._trio_handler)
|
|
||||||
cls._orig_sigint_handler = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def release(cls):
|
|
||||||
try:
|
|
||||||
cls._debug_lock.release()
|
|
||||||
except RuntimeError:
|
|
||||||
# uhhh makes no sense but been seeing the non-owner
|
|
||||||
# release error even though this is definitely the task
|
|
||||||
# that locked?
|
|
||||||
owner = cls._debug_lock.statistics().owner
|
|
||||||
if owner:
|
|
||||||
raise
|
|
||||||
|
|
||||||
# actor-local state, irrelevant for non-root.
|
|
||||||
cls.global_actor_in_debug = None
|
|
||||||
cls.local_task_in_debug = None
|
|
||||||
|
|
||||||
try:
|
|
||||||
# sometimes the ``trio`` might already be terminated in
|
|
||||||
# which case this call will raise.
|
|
||||||
if cls.local_pdb_complete is not None:
|
|
||||||
cls.local_pdb_complete.set()
|
|
||||||
finally:
|
|
||||||
# restore original sigint handler
|
|
||||||
cls.unshield_sigint()
|
|
||||||
cls.repl = None
|
|
||||||
|
|
||||||
|
|
||||||
class TractorConfig(pdbpp.DefaultConfig):
|
|
||||||
'''
|
|
||||||
Custom ``pdbpp`` goodness.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# use_pygments = True
|
|
||||||
# sticky_by_default = True
|
|
||||||
enable_hidden_frames = False
|
|
||||||
|
|
||||||
|
|
||||||
class MultiActorPdb(pdbpp.Pdb):
|
|
||||||
'''
|
|
||||||
Add teardown hooks to the regular ``pdbpp.Pdb``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# override the pdbpp config with our coolio one
|
|
||||||
DefaultConfig = TractorConfig
|
|
||||||
|
|
||||||
# def preloop(self):
|
|
||||||
# print('IN PRELOOP')
|
|
||||||
# super().preloop()
|
|
||||||
|
|
||||||
# TODO: figure out how to disallow recursive .set_trace() entry
|
|
||||||
# since that'll cause deadlock for us.
|
|
||||||
def set_continue(self):
|
|
||||||
try:
|
|
||||||
super().set_continue()
|
|
||||||
finally:
|
|
||||||
Lock.release()
|
|
||||||
|
|
||||||
def set_quit(self):
|
|
||||||
try:
|
|
||||||
super().set_quit()
|
|
||||||
finally:
|
|
||||||
Lock.release()
|
|
||||||
|
|
||||||
# XXX NOTE: we only override this because apparently the stdlib pdb
|
|
||||||
# bois likes to touch the SIGINT handler as much as i like to touch
|
|
||||||
# my d$%&.
|
|
||||||
def _cmdloop(self):
|
|
||||||
self.cmdloop()
|
|
||||||
|
|
||||||
@cached_property
|
|
||||||
def shname(self) -> str | None:
|
|
||||||
'''
|
|
||||||
Attempt to return the login shell name with a special check for
|
|
||||||
the infamous `xonsh` since it seems to have some issues much
|
|
||||||
different from std shells when it comes to flushing the prompt?
|
|
||||||
|
|
||||||
'''
|
|
||||||
# SUPER HACKY and only really works if `xonsh` is not used
|
|
||||||
# before spawning further sub-shells..
|
|
||||||
shpath = os.getenv('SHELL', None)
|
|
||||||
|
|
||||||
if shpath:
|
|
||||||
if (
|
|
||||||
os.getenv('XONSH_LOGIN', default=False)
|
|
||||||
or 'xonsh' in shpath
|
|
||||||
):
|
|
||||||
return 'xonsh'
|
|
||||||
|
|
||||||
return os.path.basename(shpath)
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def _acquire_debug_lock_from_root_task(
|
|
||||||
uid: tuple[str, str]
|
|
||||||
|
|
||||||
) -> AsyncIterator[trio.StrictFIFOLock]:
|
|
||||||
'''
|
|
||||||
Acquire a root-actor local FIFO lock which tracks mutex access of
|
|
||||||
the process tree's global debugger breakpoint.
|
|
||||||
|
|
||||||
This lock avoids tty clobbering (by preventing multiple processes
|
|
||||||
reading from stdstreams) and ensures multi-actor, sequential access
|
|
||||||
to the ``pdb`` repl.
|
|
||||||
|
|
||||||
'''
|
|
||||||
task_name = trio.lowlevel.current_task().name
|
|
||||||
|
|
||||||
log.runtime(
|
|
||||||
f"Attempting to acquire TTY lock, remote task: {task_name}:{uid}"
|
|
||||||
)
|
|
||||||
|
|
||||||
we_acquired = False
|
|
||||||
|
|
||||||
try:
|
|
||||||
log.runtime(
|
|
||||||
f"entering lock checkpoint, remote task: {task_name}:{uid}"
|
|
||||||
)
|
|
||||||
we_acquired = True
|
|
||||||
|
|
||||||
# NOTE: if the surrounding cancel scope from the
|
|
||||||
# `lock_tty_for_child()` caller is cancelled, this line should
|
|
||||||
# unblock and NOT leave us in some kind of
|
|
||||||
# a "child-locked-TTY-but-child-is-uncontactable-over-IPC"
|
|
||||||
# condition.
|
|
||||||
await Lock._debug_lock.acquire()
|
|
||||||
|
|
||||||
if Lock.no_remote_has_tty is None:
|
|
||||||
# mark the tty lock as being in use so that the runtime
|
|
||||||
# can try to avoid clobbering any connection from a child
|
|
||||||
# that's currently relying on it.
|
|
||||||
Lock.no_remote_has_tty = trio.Event()
|
|
||||||
|
|
||||||
Lock.global_actor_in_debug = uid
|
|
||||||
log.runtime(f"TTY lock acquired, remote task: {task_name}:{uid}")
|
|
||||||
|
|
||||||
# NOTE: critical section: this yield is unshielded!
|
|
||||||
|
|
||||||
# IF we received a cancel during the shielded lock entry of some
|
|
||||||
# next-in-queue requesting task, then the resumption here will
|
|
||||||
# result in that ``trio.Cancelled`` being raised to our caller
|
|
||||||
# (likely from ``lock_tty_for_child()`` below)! In
|
|
||||||
# this case the ``finally:`` below should trigger and the
|
|
||||||
# surrounding caller side context should cancel normally
|
|
||||||
# relaying back to the caller.
|
|
||||||
|
|
||||||
yield Lock._debug_lock
|
|
||||||
|
|
||||||
finally:
|
|
||||||
if (
|
|
||||||
we_acquired
|
|
||||||
and Lock._debug_lock.locked()
|
|
||||||
):
|
|
||||||
Lock._debug_lock.release()
|
|
||||||
|
|
||||||
# IFF there are no more requesting tasks queued up fire, the
|
|
||||||
# "tty-unlocked" event thereby alerting any monitors of the lock that
|
|
||||||
# we are now back in the "tty unlocked" state. This is basically
|
|
||||||
# and edge triggered signal around an empty queue of sub-actor
|
|
||||||
# tasks that may have tried to acquire the lock.
|
|
||||||
stats = Lock._debug_lock.statistics()
|
|
||||||
if (
|
|
||||||
not stats.owner
|
|
||||||
):
|
|
||||||
log.runtime(f"No more tasks waiting on tty lock! says {uid}")
|
|
||||||
if Lock.no_remote_has_tty is not None:
|
|
||||||
Lock.no_remote_has_tty.set()
|
|
||||||
Lock.no_remote_has_tty = None
|
|
||||||
|
|
||||||
Lock.global_actor_in_debug = None
|
|
||||||
|
|
||||||
log.runtime(
|
|
||||||
f"TTY lock released, remote task: {task_name}:{uid}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
|
||||||
async def lock_tty_for_child(
|
|
||||||
|
|
||||||
ctx: tractor.Context,
|
|
||||||
subactor_uid: tuple[str, str]
|
|
||||||
|
|
||||||
) -> str:
|
|
||||||
'''
|
|
||||||
Lock the TTY in the root process of an actor tree in a new
|
|
||||||
inter-actor-context-task such that the ``pdbpp`` debugger console
|
|
||||||
can be mutex-allocated to the calling sub-actor for REPL control
|
|
||||||
without interference by other processes / threads.
|
|
||||||
|
|
||||||
NOTE: this task must be invoked in the root process of the actor
|
|
||||||
tree. It is meant to be invoked as an rpc-task and should be
|
|
||||||
highly reliable at releasing the mutex complete!
|
|
||||||
|
|
||||||
'''
|
|
||||||
task_name = trio.lowlevel.current_task().name
|
|
||||||
|
|
||||||
if tuple(subactor_uid) in Lock._blocked:
|
|
||||||
log.warning(
|
|
||||||
f'Actor {subactor_uid} is blocked from acquiring debug lock\n'
|
|
||||||
f"remote task: {task_name}:{subactor_uid}"
|
|
||||||
)
|
|
||||||
ctx._enter_debugger_on_cancel = False
|
|
||||||
await ctx.cancel(f'Debug lock blocked for {subactor_uid}')
|
|
||||||
return 'pdb_lock_blocked'
|
|
||||||
|
|
||||||
# TODO: when we get to true remote debugging
|
|
||||||
# this will deliver stdin data?
|
|
||||||
|
|
||||||
log.debug(
|
|
||||||
"Attempting to acquire TTY lock\n"
|
|
||||||
f"remote task: {task_name}:{subactor_uid}"
|
|
||||||
)
|
|
||||||
|
|
||||||
log.debug(f"Actor {subactor_uid} is WAITING on stdin hijack lock")
|
|
||||||
Lock.shield_sigint()
|
|
||||||
|
|
||||||
try:
|
|
||||||
with (
|
|
||||||
trio.CancelScope(shield=True) as debug_lock_cs,
|
|
||||||
):
|
|
||||||
Lock._root_local_task_cs_in_debug = debug_lock_cs
|
|
||||||
async with _acquire_debug_lock_from_root_task(subactor_uid):
|
|
||||||
|
|
||||||
# indicate to child that we've locked stdio
|
|
||||||
await ctx.started('Locked')
|
|
||||||
log.debug(
|
|
||||||
f"Actor {subactor_uid} acquired stdin hijack lock"
|
|
||||||
)
|
|
||||||
|
|
||||||
# wait for unlock pdb by child
|
|
||||||
async with ctx.open_stream() as stream:
|
|
||||||
assert await stream.receive() == 'pdb_unlock'
|
|
||||||
|
|
||||||
return "pdb_unlock_complete"
|
|
||||||
|
|
||||||
finally:
|
|
||||||
Lock._root_local_task_cs_in_debug = None
|
|
||||||
Lock.unshield_sigint()
|
|
||||||
|
|
||||||
|
|
||||||
async def wait_for_parent_stdin_hijack(
|
|
||||||
actor_uid: tuple[str, str],
|
|
||||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED
|
|
||||||
):
|
|
||||||
'''
|
|
||||||
Connect to the root actor via a ``Context`` and invoke a task which
|
|
||||||
locks a root-local TTY lock: ``lock_tty_for_child()``; this func
|
|
||||||
should be called in a new task from a child actor **and never the
|
|
||||||
root*.
|
|
||||||
|
|
||||||
This function is used by any sub-actor to acquire mutex access to
|
|
||||||
the ``pdb`` REPL and thus the root's TTY for interactive debugging
|
|
||||||
(see below inside ``_breakpoint()``). It can be used to ensure that
|
|
||||||
an intermediate nursery-owning actor does not clobber its children
|
|
||||||
if they are in debug (see below inside
|
|
||||||
``maybe_wait_for_debugger()``).
|
|
||||||
|
|
||||||
'''
|
|
||||||
with trio.CancelScope(shield=True) as cs:
|
|
||||||
Lock._debugger_request_cs = cs
|
|
||||||
|
|
||||||
try:
|
|
||||||
async with get_root() as portal:
|
|
||||||
|
|
||||||
# this syncs to child's ``Context.started()`` call.
|
|
||||||
async with portal.open_context(
|
|
||||||
|
|
||||||
tractor._debug.lock_tty_for_child,
|
|
||||||
subactor_uid=actor_uid,
|
|
||||||
|
|
||||||
) as (ctx, val):
|
|
||||||
|
|
||||||
log.debug('locked context')
|
|
||||||
assert val == 'Locked'
|
|
||||||
|
|
||||||
async with ctx.open_stream() as stream:
|
|
||||||
# unblock local caller
|
|
||||||
|
|
||||||
try:
|
|
||||||
assert Lock.local_pdb_complete
|
|
||||||
task_status.started(cs)
|
|
||||||
await Lock.local_pdb_complete.wait()
|
|
||||||
|
|
||||||
finally:
|
|
||||||
# TODO: shielding currently can cause hangs...
|
|
||||||
# with trio.CancelScope(shield=True):
|
|
||||||
await stream.send('pdb_unlock')
|
|
||||||
|
|
||||||
# sync with callee termination
|
|
||||||
assert await ctx.result() == "pdb_unlock_complete"
|
|
||||||
|
|
||||||
log.debug('exitting child side locking task context')
|
|
||||||
|
|
||||||
except ContextCancelled:
|
|
||||||
log.warning('Root actor cancelled debug lock')
|
|
||||||
raise
|
|
||||||
|
|
||||||
finally:
|
|
||||||
Lock.local_task_in_debug = None
|
|
||||||
log.debug('Exiting debugger from child')
|
|
||||||
|
|
||||||
|
|
||||||
def mk_mpdb() -> tuple[MultiActorPdb, Callable]:
|
|
||||||
|
|
||||||
pdb = MultiActorPdb()
|
|
||||||
# signal.signal = pdbpp.hideframe(signal.signal)
|
|
||||||
|
|
||||||
Lock.shield_sigint()
|
|
||||||
|
|
||||||
# XXX: These are the important flags mentioned in
|
|
||||||
# https://github.com/python-trio/trio/issues/1155
|
|
||||||
# which resolve the traceback spews to console.
|
|
||||||
pdb.allow_kbdint = True
|
|
||||||
pdb.nosigint = True
|
|
||||||
|
|
||||||
return pdb, Lock.unshield_sigint
|
|
||||||
|
|
||||||
|
|
||||||
async def _breakpoint(
|
|
||||||
|
|
||||||
debug_func,
|
|
||||||
|
|
||||||
# TODO:
|
|
||||||
# shield: bool = False
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Breakpoint entry for engaging debugger instance sync-interaction,
|
|
||||||
from async code, executing in actor runtime (task).
|
|
||||||
|
|
||||||
'''
|
|
||||||
__tracebackhide__ = True
|
|
||||||
actor = tractor.current_actor()
|
|
||||||
pdb, undo_sigint = mk_mpdb()
|
|
||||||
task_name = trio.lowlevel.current_task().name
|
|
||||||
|
|
||||||
# TODO: is it possible to debug a trio.Cancelled except block?
|
|
||||||
# right now it seems like we can kinda do with by shielding
|
|
||||||
# around ``tractor.breakpoint()`` but not if we move the shielded
|
|
||||||
# scope here???
|
|
||||||
# with trio.CancelScope(shield=shield):
|
|
||||||
# await trio.lowlevel.checkpoint()
|
|
||||||
|
|
||||||
if (
|
|
||||||
not Lock.local_pdb_complete
|
|
||||||
or Lock.local_pdb_complete.is_set()
|
|
||||||
):
|
|
||||||
Lock.local_pdb_complete = trio.Event()
|
|
||||||
|
|
||||||
# TODO: need a more robust check for the "root" actor
|
|
||||||
if (
|
|
||||||
not is_root_process()
|
|
||||||
and actor._parent_chan # a connected child
|
|
||||||
):
|
|
||||||
|
|
||||||
if Lock.local_task_in_debug:
|
|
||||||
|
|
||||||
# Recurrence entry case: this task already has the lock and
|
|
||||||
# is likely recurrently entering a breakpoint
|
|
||||||
if Lock.local_task_in_debug == task_name:
|
|
||||||
# noop on recurrent entry case but we want to trigger
|
|
||||||
# a checkpoint to allow other actors error-propagate and
|
|
||||||
# potetially avoid infinite re-entries in some subactor.
|
|
||||||
await trio.lowlevel.checkpoint()
|
|
||||||
return
|
|
||||||
|
|
||||||
# if **this** actor is already in debug mode block here
|
|
||||||
# waiting for the control to be released - this allows
|
|
||||||
# support for recursive entries to `tractor.breakpoint()`
|
|
||||||
log.warning(f"{actor.uid} already has a debug lock, waiting...")
|
|
||||||
|
|
||||||
await Lock.local_pdb_complete.wait()
|
|
||||||
await trio.sleep(0.1)
|
|
||||||
|
|
||||||
# mark local actor as "in debug mode" to avoid recurrent
|
|
||||||
# entries/requests to the root process
|
|
||||||
Lock.local_task_in_debug = task_name
|
|
||||||
|
|
||||||
# this **must** be awaited by the caller and is done using the
|
|
||||||
# root nursery so that the debugger can continue to run without
|
|
||||||
# being restricted by the scope of a new task nursery.
|
|
||||||
|
|
||||||
# TODO: if we want to debug a trio.Cancelled triggered exception
|
|
||||||
# we have to figure out how to avoid having the service nursery
|
|
||||||
# cancel on this task start? I *think* this works below:
|
|
||||||
# ```python
|
|
||||||
# actor._service_n.cancel_scope.shield = shield
|
|
||||||
# ```
|
|
||||||
# but not entirely sure if that's a sane way to implement it?
|
|
||||||
try:
|
|
||||||
with trio.CancelScope(shield=True):
|
|
||||||
await actor._service_n.start(
|
|
||||||
wait_for_parent_stdin_hijack,
|
|
||||||
actor.uid,
|
|
||||||
)
|
|
||||||
Lock.repl = pdb
|
|
||||||
except RuntimeError:
|
|
||||||
Lock.release()
|
|
||||||
|
|
||||||
if actor._cancel_called:
|
|
||||||
# service nursery won't be usable and we
|
|
||||||
# don't want to lock up the root either way since
|
|
||||||
# we're in (the midst of) cancellation.
|
|
||||||
return
|
|
||||||
|
|
||||||
raise
|
|
||||||
|
|
||||||
elif is_root_process():
|
|
||||||
|
|
||||||
# we also wait in the root-parent for any child that
|
|
||||||
# may have the tty locked prior
|
|
||||||
# TODO: wait, what about multiple root tasks acquiring it though?
|
|
||||||
if Lock.global_actor_in_debug == actor.uid:
|
|
||||||
# re-entrant root process already has it: noop.
|
|
||||||
return
|
|
||||||
|
|
||||||
# XXX: since we need to enter pdb synchronously below,
|
|
||||||
# we have to release the lock manually from pdb completion
|
|
||||||
# callbacks. Can't think of a nicer way then this atm.
|
|
||||||
if Lock._debug_lock.locked():
|
|
||||||
log.warning(
|
|
||||||
'Root actor attempting to shield-acquire active tty lock'
|
|
||||||
f' owned by {Lock.global_actor_in_debug}')
|
|
||||||
|
|
||||||
# must shield here to avoid hitting a ``Cancelled`` and
|
|
||||||
# a child getting stuck bc we clobbered the tty
|
|
||||||
with trio.CancelScope(shield=True):
|
|
||||||
await Lock._debug_lock.acquire()
|
|
||||||
else:
|
|
||||||
# may be cancelled
|
|
||||||
await Lock._debug_lock.acquire()
|
|
||||||
|
|
||||||
Lock.global_actor_in_debug = actor.uid
|
|
||||||
Lock.local_task_in_debug = task_name
|
|
||||||
Lock.repl = pdb
|
|
||||||
|
|
||||||
try:
|
|
||||||
# block here one (at the appropriate frame *up*) where
|
|
||||||
# ``breakpoint()`` was awaited and begin handling stdio.
|
|
||||||
log.debug("Entering the synchronous world of pdb")
|
|
||||||
debug_func(actor, pdb)
|
|
||||||
|
|
||||||
except bdb.BdbQuit:
|
|
||||||
Lock.release()
|
|
||||||
raise
|
|
||||||
|
|
||||||
# XXX: apparently we can't do this without showing this frame
|
|
||||||
# in the backtrace on first entry to the REPL? Seems like an odd
|
|
||||||
# behaviour that should have been fixed by now. This is also why
|
|
||||||
# we scrapped all the @cm approaches that were tried previously.
|
|
||||||
# finally:
|
|
||||||
# __tracebackhide__ = True
|
|
||||||
# # frame = sys._getframe()
|
|
||||||
# # last_f = frame.f_back
|
|
||||||
# # last_f.f_globals['__tracebackhide__'] = True
|
|
||||||
# # signal.signal = pdbpp.hideframe(signal.signal)
|
|
||||||
|
|
||||||
|
|
||||||
def shield_sigint_handler(
|
|
||||||
signum: int,
|
|
||||||
frame: 'frame', # type: ignore # noqa
|
|
||||||
# pdb_obj: Optional[MultiActorPdb] = None,
|
|
||||||
*args,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Specialized, debugger-aware SIGINT handler.
|
|
||||||
|
|
||||||
In childred we always ignore to avoid deadlocks since cancellation
|
|
||||||
should always be managed by the parent supervising actor. The root
|
|
||||||
is always cancelled on ctrl-c.
|
|
||||||
|
|
||||||
'''
|
|
||||||
__tracebackhide__ = True
|
|
||||||
|
|
||||||
uid_in_debug = Lock.global_actor_in_debug
|
|
||||||
|
|
||||||
actor = tractor.current_actor()
|
|
||||||
# print(f'{actor.uid} in HANDLER with ')
|
|
||||||
|
|
||||||
def do_cancel():
|
|
||||||
# If we haven't tried to cancel the runtime then do that instead
|
|
||||||
# of raising a KBI (which may non-gracefully destroy
|
|
||||||
# a ``trio.run()``).
|
|
||||||
if not actor._cancel_called:
|
|
||||||
actor.cancel_soon()
|
|
||||||
|
|
||||||
# If the runtime is already cancelled it likely means the user
|
|
||||||
# hit ctrl-c again because teardown didn't full take place in
|
|
||||||
# which case we do the "hard" raising of a local KBI.
|
|
||||||
else:
|
|
||||||
raise KeyboardInterrupt
|
|
||||||
|
|
||||||
any_connected = False
|
|
||||||
|
|
||||||
if uid_in_debug is not None:
|
|
||||||
# try to see if the supposed (sub)actor in debug still
|
|
||||||
# has an active connection to *this* actor, and if not
|
|
||||||
# it's likely they aren't using the TTY lock / debugger
|
|
||||||
# and we should propagate SIGINT normally.
|
|
||||||
chans = actor._peers.get(tuple(uid_in_debug))
|
|
||||||
if chans:
|
|
||||||
any_connected = any(chan.connected() for chan in chans)
|
|
||||||
if not any_connected:
|
|
||||||
log.warning(
|
|
||||||
'A global actor reported to be in debug '
|
|
||||||
'but no connection exists for this child:\n'
|
|
||||||
f'{uid_in_debug}\n'
|
|
||||||
'Allowing SIGINT propagation..'
|
|
||||||
)
|
|
||||||
return do_cancel()
|
|
||||||
|
|
||||||
# only set in the actor actually running the REPL
|
|
||||||
pdb_obj = Lock.repl
|
|
||||||
|
|
||||||
# root actor branch that reports whether or not a child
|
|
||||||
# has locked debugger.
|
|
||||||
if (
|
|
||||||
is_root_process()
|
|
||||||
and uid_in_debug is not None
|
|
||||||
|
|
||||||
# XXX: only if there is an existing connection to the
|
|
||||||
# (sub-)actor in debug do we ignore SIGINT in this
|
|
||||||
# parent! Otherwise we may hang waiting for an actor
|
|
||||||
# which has already terminated to unlock.
|
|
||||||
and any_connected
|
|
||||||
):
|
|
||||||
# we are root and some actor is in debug mode
|
|
||||||
# if uid_in_debug is not None:
|
|
||||||
|
|
||||||
if pdb_obj:
|
|
||||||
name = uid_in_debug[0]
|
|
||||||
if name != 'root':
|
|
||||||
log.pdb(
|
|
||||||
f"Ignoring SIGINT, child in debug mode: `{uid_in_debug}`"
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
log.pdb(
|
|
||||||
"Ignoring SIGINT while in debug mode"
|
|
||||||
)
|
|
||||||
elif (
|
|
||||||
is_root_process()
|
|
||||||
):
|
|
||||||
if pdb_obj:
|
|
||||||
log.pdb(
|
|
||||||
"Ignoring SIGINT since debug mode is enabled"
|
|
||||||
)
|
|
||||||
|
|
||||||
if (
|
|
||||||
Lock._root_local_task_cs_in_debug
|
|
||||||
and not Lock._root_local_task_cs_in_debug.cancel_called
|
|
||||||
):
|
|
||||||
Lock._root_local_task_cs_in_debug.cancel()
|
|
||||||
|
|
||||||
# revert back to ``trio`` handler asap!
|
|
||||||
Lock.unshield_sigint()
|
|
||||||
|
|
||||||
# child actor that has locked the debugger
|
|
||||||
elif not is_root_process():
|
|
||||||
|
|
||||||
chan: Channel = actor._parent_chan
|
|
||||||
if not chan or not chan.connected():
|
|
||||||
log.warning(
|
|
||||||
'A global actor reported to be in debug '
|
|
||||||
'but no connection exists for its parent:\n'
|
|
||||||
f'{uid_in_debug}\n'
|
|
||||||
'Allowing SIGINT propagation..'
|
|
||||||
)
|
|
||||||
return do_cancel()
|
|
||||||
|
|
||||||
task = Lock.local_task_in_debug
|
|
||||||
if (
|
|
||||||
task
|
|
||||||
and pdb_obj
|
|
||||||
):
|
|
||||||
log.pdb(
|
|
||||||
f"Ignoring SIGINT while task in debug mode: `{task}`"
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO: how to handle the case of an intermediary-child actor
|
|
||||||
# that **is not** marked in debug mode? See oustanding issue:
|
|
||||||
# https://github.com/goodboy/tractor/issues/320
|
|
||||||
# elif debug_mode():
|
|
||||||
|
|
||||||
else: # XXX: shouldn't ever get here?
|
|
||||||
print("WTFWTFWTF")
|
|
||||||
raise KeyboardInterrupt
|
|
||||||
|
|
||||||
# NOTE: currently (at least on ``fancycompleter`` 0.9.2)
|
|
||||||
# it looks to be that the last command that was run (eg. ll)
|
|
||||||
# will be repeated by default.
|
|
||||||
|
|
||||||
# maybe redraw/print last REPL output to console since
|
|
||||||
# we want to alert the user that more input is expect since
|
|
||||||
# nothing has been done dur to ignoring sigint.
|
|
||||||
if (
|
|
||||||
pdb_obj # only when this actor has a REPL engaged
|
|
||||||
):
|
|
||||||
# XXX: yah, mega hack, but how else do we catch this madness XD
|
|
||||||
if pdb_obj.shname == 'xonsh':
|
|
||||||
pdb_obj.stdout.write(pdb_obj.prompt)
|
|
||||||
|
|
||||||
pdb_obj.stdout.flush()
|
|
||||||
|
|
||||||
# TODO: make this work like sticky mode where if there is output
|
|
||||||
# detected as written to the tty we redraw this part underneath
|
|
||||||
# and erase the past draw of this same bit above?
|
|
||||||
# pdb_obj.sticky = True
|
|
||||||
# pdb_obj._print_if_sticky()
|
|
||||||
|
|
||||||
# also see these links for an approach from ``ptk``:
|
|
||||||
# https://github.com/goodboy/tractor/issues/130#issuecomment-663752040
|
|
||||||
# https://github.com/prompt-toolkit/python-prompt-toolkit/blob/c2c6af8a0308f9e5d7c0e28cb8a02963fe0ce07a/prompt_toolkit/patch_stdout.py
|
|
||||||
|
|
||||||
# XXX: lol, see ``pdbpp`` issue:
|
|
||||||
# https://github.com/pdbpp/pdbpp/issues/496
|
|
||||||
|
|
||||||
|
|
||||||
def _set_trace(
|
|
||||||
actor: Optional[tractor.Actor] = None,
|
|
||||||
pdb: Optional[MultiActorPdb] = None,
|
|
||||||
):
|
|
||||||
__tracebackhide__ = True
|
|
||||||
actor = actor or tractor.current_actor()
|
|
||||||
|
|
||||||
# start 2 levels up in user code
|
|
||||||
frame: Optional[FrameType] = sys._getframe()
|
|
||||||
if frame:
|
|
||||||
frame = frame.f_back # type: ignore
|
|
||||||
|
|
||||||
if frame and pdb and actor is not None:
|
|
||||||
log.pdb(f"\nAttaching pdb to actor: {actor.uid}\n")
|
|
||||||
# no f!#$&* idea, but when we're in async land
|
|
||||||
# we need 2x frames up?
|
|
||||||
frame = frame.f_back
|
|
||||||
|
|
||||||
else:
|
|
||||||
pdb, undo_sigint = mk_mpdb()
|
|
||||||
|
|
||||||
# we entered the global ``breakpoint()`` built-in from sync code?
|
|
||||||
Lock.local_task_in_debug = 'sync'
|
|
||||||
|
|
||||||
pdb.set_trace(frame=frame)
|
|
||||||
|
|
||||||
|
|
||||||
breakpoint = partial(
|
|
||||||
_breakpoint,
|
|
||||||
_set_trace,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _post_mortem(
|
|
||||||
actor: tractor.Actor,
|
|
||||||
pdb: MultiActorPdb,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Enter the ``pdbpp`` port mortem entrypoint using our custom
|
|
||||||
debugger instance.
|
|
||||||
|
|
||||||
'''
|
|
||||||
log.pdb(f"\nAttaching to pdb in crashed actor: {actor.uid}\n")
|
|
||||||
|
|
||||||
# TODO: you need ``pdbpp`` master (at least this commit
|
|
||||||
# https://github.com/pdbpp/pdbpp/commit/b757794857f98d53e3ebbe70879663d7d843a6c2)
|
|
||||||
# to fix this and avoid the hang it causes. See issue:
|
|
||||||
# https://github.com/pdbpp/pdbpp/issues/480
|
|
||||||
# TODO: help with a 3.10+ major release if/when it arrives.
|
|
||||||
|
|
||||||
pdbpp.xpm(Pdb=lambda: pdb)
|
|
||||||
|
|
||||||
|
|
||||||
post_mortem = partial(
|
|
||||||
_breakpoint,
|
|
||||||
_post_mortem,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def _maybe_enter_pm(err):
|
|
||||||
if (
|
|
||||||
debug_mode()
|
|
||||||
|
|
||||||
# NOTE: don't enter debug mode recursively after quitting pdb
|
|
||||||
# Iow, don't re-enter the repl if the `quit` command was issued
|
|
||||||
# by the user.
|
|
||||||
and not isinstance(err, bdb.BdbQuit)
|
|
||||||
|
|
||||||
# XXX: if the error is the likely result of runtime-wide
|
|
||||||
# cancellation, we don't want to enter the debugger since
|
|
||||||
# there's races between when the parent actor has killed all
|
|
||||||
# comms and when the child tries to contact said parent to
|
|
||||||
# acquire the tty lock.
|
|
||||||
|
|
||||||
# Really we just want to mostly avoid catching KBIs here so there
|
|
||||||
# might be a simpler check we can do?
|
|
||||||
and not is_multi_cancelled(err)
|
|
||||||
):
|
|
||||||
log.debug("Actor crashed, entering debug mode")
|
|
||||||
try:
|
|
||||||
await post_mortem()
|
|
||||||
finally:
|
|
||||||
Lock.release()
|
|
||||||
return True
|
|
||||||
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def acquire_debug_lock(
|
|
||||||
subactor_uid: tuple[str, str],
|
|
||||||
) -> AsyncGenerator[None, tuple]:
|
|
||||||
'''
|
|
||||||
Grab root's debug lock on entry, release on exit.
|
|
||||||
|
|
||||||
This helper is for actor's who don't actually need
|
|
||||||
to acquired the debugger but want to wait until the
|
|
||||||
lock is free in the process-tree root.
|
|
||||||
|
|
||||||
'''
|
|
||||||
if not debug_mode():
|
|
||||||
yield None
|
|
||||||
return
|
|
||||||
|
|
||||||
async with trio.open_nursery() as n:
|
|
||||||
cs = await n.start(
|
|
||||||
wait_for_parent_stdin_hijack,
|
|
||||||
subactor_uid,
|
|
||||||
)
|
|
||||||
yield None
|
|
||||||
cs.cancel()
|
|
||||||
|
|
||||||
|
|
||||||
async def maybe_wait_for_debugger(
|
|
||||||
poll_steps: int = 2,
|
|
||||||
poll_delay: float = 0.1,
|
|
||||||
child_in_debug: bool = False,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
if (
|
|
||||||
not debug_mode()
|
|
||||||
and not child_in_debug
|
|
||||||
):
|
|
||||||
return
|
|
||||||
|
|
||||||
if (
|
|
||||||
is_root_process()
|
|
||||||
):
|
|
||||||
# If we error in the root but the debugger is
|
|
||||||
# engaged we don't want to prematurely kill (and
|
|
||||||
# thus clobber access to) the local tty since it
|
|
||||||
# will make the pdb repl unusable.
|
|
||||||
# Instead try to wait for pdb to be released before
|
|
||||||
# tearing down.
|
|
||||||
|
|
||||||
sub_in_debug = None
|
|
||||||
|
|
||||||
for _ in range(poll_steps):
|
|
||||||
|
|
||||||
if Lock.global_actor_in_debug:
|
|
||||||
sub_in_debug = tuple(Lock.global_actor_in_debug)
|
|
||||||
|
|
||||||
log.debug('Root polling for debug')
|
|
||||||
|
|
||||||
with trio.CancelScope(shield=True):
|
|
||||||
await trio.sleep(poll_delay)
|
|
||||||
|
|
||||||
# TODO: could this make things more deterministic? wait
|
|
||||||
# to see if a sub-actor task will be scheduled and grab
|
|
||||||
# the tty lock on the next tick?
|
|
||||||
# XXX: doesn't seem to work
|
|
||||||
# await trio.testing.wait_all_tasks_blocked(cushion=0)
|
|
||||||
|
|
||||||
debug_complete = Lock.no_remote_has_tty
|
|
||||||
if (
|
|
||||||
(debug_complete and
|
|
||||||
not debug_complete.is_set())
|
|
||||||
):
|
|
||||||
log.debug(
|
|
||||||
'Root has errored but pdb is in use by '
|
|
||||||
f'child {sub_in_debug}\n'
|
|
||||||
'Waiting on tty lock to release..')
|
|
||||||
|
|
||||||
await debug_complete.wait()
|
|
||||||
|
|
||||||
await trio.sleep(poll_delay)
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
log.debug(
|
|
||||||
'Root acquired TTY LOCK'
|
|
||||||
)
|
|
||||||
|
|
@ -15,50 +15,79 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Actor discovery API.
|
Discovery (protocols) API for automatic addressing and location
|
||||||
|
management of (service) actors.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
from typing import (
|
from typing import (
|
||||||
Optional,
|
|
||||||
Union,
|
|
||||||
AsyncGenerator,
|
AsyncGenerator,
|
||||||
|
AsyncContextManager,
|
||||||
|
TYPE_CHECKING,
|
||||||
)
|
)
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
|
|
||||||
from ._ipc import _connect_chan, Channel
|
from tractor.log import get_logger
|
||||||
|
from .trionics import (
|
||||||
|
gather_contexts,
|
||||||
|
collapse_eg,
|
||||||
|
)
|
||||||
|
from .ipc import _connect_chan, Channel
|
||||||
|
from ._addr import (
|
||||||
|
UnwrappedAddress,
|
||||||
|
Address,
|
||||||
|
wrap_address
|
||||||
|
)
|
||||||
from ._portal import (
|
from ._portal import (
|
||||||
Portal,
|
Portal,
|
||||||
open_portal,
|
open_portal,
|
||||||
LocalPortal,
|
LocalPortal,
|
||||||
)
|
)
|
||||||
from ._state import current_actor, _runtime_vars
|
from ._state import (
|
||||||
|
current_actor,
|
||||||
|
_runtime_vars,
|
||||||
|
_def_tpt_proto,
|
||||||
|
)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._runtime import Actor
|
||||||
|
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def get_arbiter(
|
async def get_registry(
|
||||||
|
addr: UnwrappedAddress|None = None,
|
||||||
host: str,
|
) -> AsyncGenerator[
|
||||||
port: int,
|
Portal | LocalPortal | None,
|
||||||
|
None,
|
||||||
) -> AsyncGenerator[Union[Portal, LocalPortal], None]:
|
]:
|
||||||
'''Return a portal instance connected to a local or remote
|
|
||||||
arbiter.
|
|
||||||
'''
|
'''
|
||||||
actor = current_actor()
|
Return a portal instance connected to a local or remote
|
||||||
|
registry-service actor; if a connection already exists re-use it
|
||||||
|
(presumably to call a `.register_actor()` registry runtime RPC
|
||||||
|
ep).
|
||||||
|
|
||||||
if not actor:
|
'''
|
||||||
raise RuntimeError("No actor instance has been defined yet?")
|
actor: Actor = current_actor()
|
||||||
|
if actor.is_registrar:
|
||||||
if actor.is_arbiter:
|
|
||||||
# we're already the arbiter
|
# we're already the arbiter
|
||||||
# (likely a re-entrant call from the arbiter actor)
|
# (likely a re-entrant call from the arbiter actor)
|
||||||
yield LocalPortal(actor, Channel((host, port)))
|
yield LocalPortal(
|
||||||
|
actor,
|
||||||
|
Channel(transport=None)
|
||||||
|
# ^XXX, we DO NOT actually provide nor connect an
|
||||||
|
# underlying transport since this is merely an API shim.
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
async with _connect_chan(host, port) as chan:
|
# TODO: try to look pre-existing connection from
|
||||||
|
# `Server._peers` and use it instead?
|
||||||
async with open_portal(chan) as arb_portal:
|
async with (
|
||||||
|
_connect_chan(addr) as chan,
|
||||||
yield arb_portal
|
open_portal(chan) as regstr_ptl,
|
||||||
|
):
|
||||||
|
yield regstr_ptl
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
|
|
@ -66,51 +95,131 @@ async def get_root(
|
||||||
**kwargs,
|
**kwargs,
|
||||||
) -> AsyncGenerator[Portal, None]:
|
) -> AsyncGenerator[Portal, None]:
|
||||||
|
|
||||||
host, port = _runtime_vars['_root_mailbox']
|
# TODO: rename mailbox to `_root_maddr` when we finally
|
||||||
assert host is not None
|
# add and impl libp2p multi-addrs?
|
||||||
|
addr = _runtime_vars['_root_mailbox']
|
||||||
|
|
||||||
async with _connect_chan(host, port) as chan:
|
async with (
|
||||||
async with open_portal(chan, **kwargs) as portal:
|
_connect_chan(addr) as chan,
|
||||||
yield portal
|
open_portal(chan, **kwargs) as portal,
|
||||||
|
):
|
||||||
|
yield portal
|
||||||
|
|
||||||
|
|
||||||
|
def get_peer_by_name(
|
||||||
|
name: str,
|
||||||
|
# uuid: str|None = None,
|
||||||
|
|
||||||
|
) -> list[Channel]|None: # at least 1
|
||||||
|
'''
|
||||||
|
Scan for an existing connection (set) to a named actor
|
||||||
|
and return any channels from `Server._peers: dict`.
|
||||||
|
|
||||||
|
This is an optimization method over querying the registrar for
|
||||||
|
the same info.
|
||||||
|
|
||||||
|
'''
|
||||||
|
actor: Actor = current_actor()
|
||||||
|
to_scan: dict[tuple, list[Channel]] = actor.ipc_server._peers.copy()
|
||||||
|
|
||||||
|
# TODO: is this ever needed? creates a duplicate channel on actor._peers
|
||||||
|
# when multiple find_actor calls are made to same actor from a single ctx
|
||||||
|
# which causes actor exit to hang waiting forever on
|
||||||
|
# `actor._no_more_peers.wait()` in `_runtime.async_main`
|
||||||
|
|
||||||
|
# pchan: Channel|None = actor._parent_chan
|
||||||
|
# if pchan and pchan.uid not in to_scan:
|
||||||
|
# to_scan[pchan.uid].append(pchan)
|
||||||
|
|
||||||
|
for aid, chans in to_scan.items():
|
||||||
|
_, peer_name = aid
|
||||||
|
if name == peer_name:
|
||||||
|
if not chans:
|
||||||
|
log.warning(
|
||||||
|
'No IPC chans for matching peer {peer_name}\n'
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
return chans
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def query_actor(
|
async def query_actor(
|
||||||
name: str,
|
name: str,
|
||||||
arbiter_sockaddr: Optional[tuple[str, int]] = None,
|
regaddr: UnwrappedAddress|None = None,
|
||||||
|
|
||||||
) -> AsyncGenerator[tuple[str, int], None]:
|
) -> AsyncGenerator[
|
||||||
|
UnwrappedAddress|None,
|
||||||
|
None,
|
||||||
|
]:
|
||||||
'''
|
'''
|
||||||
Simple address lookup for a given actor name.
|
Lookup a transport address (by actor name) via querying a registrar
|
||||||
|
listening @ `regaddr`.
|
||||||
|
|
||||||
Returns the (socket) address or ``None``.
|
Returns the transport protocol (socket) address or `None` if no
|
||||||
|
entry under that name exists.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
actor = current_actor()
|
actor: Actor = current_actor()
|
||||||
async with get_arbiter(
|
if (
|
||||||
*arbiter_sockaddr or actor._arb_addr
|
name == 'registrar'
|
||||||
) as arb_portal:
|
and actor.is_registrar
|
||||||
|
):
|
||||||
|
raise RuntimeError(
|
||||||
|
'The current actor IS the registry!?'
|
||||||
|
)
|
||||||
|
|
||||||
sockaddr = await arb_portal.run_from_ns(
|
maybe_peers: list[Channel]|None = get_peer_by_name(name)
|
||||||
|
if maybe_peers:
|
||||||
|
yield maybe_peers[0].raddr
|
||||||
|
return
|
||||||
|
|
||||||
|
reg_portal: Portal
|
||||||
|
regaddr: Address = wrap_address(regaddr) or actor.reg_addrs[0]
|
||||||
|
async with get_registry(regaddr) as reg_portal:
|
||||||
|
# TODO: return portals to all available actors - for now
|
||||||
|
# just the last one that registered
|
||||||
|
addr: UnwrappedAddress = await reg_portal.run_from_ns(
|
||||||
'self',
|
'self',
|
||||||
'find_actor',
|
'find_actor',
|
||||||
name=name,
|
name=name,
|
||||||
)
|
)
|
||||||
|
yield addr
|
||||||
|
|
||||||
# TODO: return portals to all available actors - for now just
|
|
||||||
# the last one that registered
|
|
||||||
if name == 'arbiter' and actor.is_arbiter:
|
|
||||||
raise RuntimeError("The current actor is the arbiter")
|
|
||||||
|
|
||||||
yield sockaddr if sockaddr else None
|
@acm
|
||||||
|
async def maybe_open_portal(
|
||||||
|
addr: UnwrappedAddress,
|
||||||
|
name: str,
|
||||||
|
):
|
||||||
|
async with query_actor(
|
||||||
|
name=name,
|
||||||
|
regaddr=addr,
|
||||||
|
) as addr:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if addr:
|
||||||
|
async with _connect_chan(addr) as chan:
|
||||||
|
async with open_portal(chan) as portal:
|
||||||
|
yield portal
|
||||||
|
else:
|
||||||
|
yield None
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def find_actor(
|
async def find_actor(
|
||||||
name: str,
|
name: str,
|
||||||
arbiter_sockaddr: tuple[str, int] | None = None
|
registry_addrs: list[UnwrappedAddress]|None = None,
|
||||||
|
enable_transports: list[str] = [_def_tpt_proto],
|
||||||
|
|
||||||
) -> AsyncGenerator[Optional[Portal], None]:
|
only_first: bool = True,
|
||||||
|
raise_on_none: bool = False,
|
||||||
|
|
||||||
|
) -> AsyncGenerator[
|
||||||
|
Portal | list[Portal] | None,
|
||||||
|
None,
|
||||||
|
]:
|
||||||
'''
|
'''
|
||||||
Ask the arbiter to find actor(s) by name.
|
Ask the arbiter to find actor(s) by name.
|
||||||
|
|
||||||
|
|
@ -118,40 +227,106 @@ async def find_actor(
|
||||||
known to the arbiter.
|
known to the arbiter.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async with query_actor(
|
# optimization path, use any pre-existing peer channel
|
||||||
name=name,
|
maybe_peers: list[Channel]|None = get_peer_by_name(name)
|
||||||
arbiter_sockaddr=arbiter_sockaddr,
|
if maybe_peers and only_first:
|
||||||
) as sockaddr:
|
async with open_portal(maybe_peers[0]) as peer_portal:
|
||||||
|
yield peer_portal
|
||||||
|
return
|
||||||
|
|
||||||
if sockaddr:
|
if not registry_addrs:
|
||||||
async with _connect_chan(*sockaddr) as chan:
|
# XXX NOTE: make sure to dynamically read the value on
|
||||||
async with open_portal(chan) as portal:
|
# every call since something may change it globally (eg.
|
||||||
yield portal
|
# like in our discovery test suite)!
|
||||||
else:
|
from ._addr import default_lo_addrs
|
||||||
|
registry_addrs = (
|
||||||
|
_runtime_vars['_registry_addrs']
|
||||||
|
or
|
||||||
|
default_lo_addrs(enable_transports)
|
||||||
|
)
|
||||||
|
|
||||||
|
maybe_portals: list[
|
||||||
|
AsyncContextManager[UnwrappedAddress]
|
||||||
|
] = list(
|
||||||
|
maybe_open_portal(
|
||||||
|
addr=addr,
|
||||||
|
name=name,
|
||||||
|
)
|
||||||
|
for addr in registry_addrs
|
||||||
|
)
|
||||||
|
portals: list[Portal]
|
||||||
|
async with (
|
||||||
|
collapse_eg(),
|
||||||
|
gather_contexts(
|
||||||
|
mngrs=maybe_portals,
|
||||||
|
) as portals,
|
||||||
|
):
|
||||||
|
# log.runtime(
|
||||||
|
# 'Gathered portals:\n'
|
||||||
|
# f'{portals}'
|
||||||
|
# )
|
||||||
|
# NOTE: `gather_contexts()` will return a
|
||||||
|
# `tuple[None, None, ..., None]` if no contact
|
||||||
|
# can be made with any regstrar at any of the
|
||||||
|
# N provided addrs!
|
||||||
|
if not any(portals):
|
||||||
|
if raise_on_none:
|
||||||
|
raise RuntimeError(
|
||||||
|
f'No actor "{name}" found registered @ {registry_addrs}'
|
||||||
|
)
|
||||||
yield None
|
yield None
|
||||||
|
return
|
||||||
|
|
||||||
|
portals: list[Portal] = list(portals)
|
||||||
|
if only_first:
|
||||||
|
yield portals[0]
|
||||||
|
|
||||||
|
else:
|
||||||
|
# TODO: currently this may return multiple portals
|
||||||
|
# given there are multi-homed or multiple registrars..
|
||||||
|
# SO, we probably need de-duplication logic?
|
||||||
|
yield portals
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def wait_for_actor(
|
async def wait_for_actor(
|
||||||
name: str,
|
name: str,
|
||||||
arbiter_sockaddr: tuple[str, int] | None = None
|
registry_addr: UnwrappedAddress | None = None,
|
||||||
|
|
||||||
) -> AsyncGenerator[Portal, None]:
|
) -> AsyncGenerator[Portal, None]:
|
||||||
"""Wait on an actor to register with the arbiter.
|
'''
|
||||||
|
Wait on at least one peer actor to register `name` with the
|
||||||
|
registrar, yield a `Portal to the first registree.
|
||||||
|
|
||||||
A portal to the first registered actor is returned.
|
'''
|
||||||
"""
|
actor: Actor = current_actor()
|
||||||
actor = current_actor()
|
|
||||||
|
|
||||||
async with get_arbiter(
|
# optimization path, use any pre-existing peer channel
|
||||||
*arbiter_sockaddr or actor._arb_addr,
|
maybe_peers: list[Channel]|None = get_peer_by_name(name)
|
||||||
) as arb_portal:
|
if maybe_peers:
|
||||||
sockaddrs = await arb_portal.run_from_ns(
|
async with open_portal(maybe_peers[0]) as peer_portal:
|
||||||
|
yield peer_portal
|
||||||
|
return
|
||||||
|
|
||||||
|
regaddr: UnwrappedAddress = (
|
||||||
|
registry_addr
|
||||||
|
or
|
||||||
|
actor.reg_addrs[0]
|
||||||
|
)
|
||||||
|
# TODO: use `.trionics.gather_contexts()` like
|
||||||
|
# above in `find_actor()` as well?
|
||||||
|
reg_portal: Portal
|
||||||
|
async with get_registry(regaddr) as reg_portal:
|
||||||
|
addrs = await reg_portal.run_from_ns(
|
||||||
'self',
|
'self',
|
||||||
'wait_for_actor',
|
'wait_for_actor',
|
||||||
name=name,
|
name=name,
|
||||||
)
|
)
|
||||||
sockaddr = sockaddrs[-1]
|
|
||||||
|
|
||||||
async with _connect_chan(*sockaddr) as chan:
|
# get latest registered addr by default?
|
||||||
|
# TODO: offer multi-portal yields in multi-homed case?
|
||||||
|
addr: UnwrappedAddress = addrs[-1]
|
||||||
|
|
||||||
|
async with _connect_chan(addr) as chan:
|
||||||
async with open_portal(chan) as portal:
|
async with open_portal(chan) as portal:
|
||||||
yield portal
|
yield portal
|
||||||
|
|
|
||||||
|
|
@ -20,6 +20,8 @@ Sub-process entry points.
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
import multiprocessing as mp
|
||||||
|
# import os
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
|
|
@ -32,7 +34,13 @@ from .log import (
|
||||||
get_logger,
|
get_logger,
|
||||||
)
|
)
|
||||||
from . import _state
|
from . import _state
|
||||||
|
from .devx import (
|
||||||
|
_frame_stack,
|
||||||
|
pformat,
|
||||||
|
)
|
||||||
|
# from .msg import pretty_struct
|
||||||
from .to_asyncio import run_as_asyncio_guest
|
from .to_asyncio import run_as_asyncio_guest
|
||||||
|
from ._addr import UnwrappedAddress
|
||||||
from ._runtime import (
|
from ._runtime import (
|
||||||
async_main,
|
async_main,
|
||||||
Actor,
|
Actor,
|
||||||
|
|
@ -47,38 +55,40 @@ log = get_logger(__name__)
|
||||||
|
|
||||||
def _mp_main(
|
def _mp_main(
|
||||||
|
|
||||||
actor: Actor, # type: ignore
|
actor: Actor,
|
||||||
accept_addr: tuple[str, int],
|
accept_addrs: list[UnwrappedAddress],
|
||||||
forkserver_info: tuple[Any, Any, Any, Any, Any],
|
forkserver_info: tuple[Any, Any, Any, Any, Any],
|
||||||
start_method: SpawnMethodKey,
|
start_method: SpawnMethodKey,
|
||||||
parent_addr: tuple[str, int] | None = None,
|
parent_addr: UnwrappedAddress | None = None,
|
||||||
infect_asyncio: bool = False,
|
infect_asyncio: bool = False,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
The routine called *after fork* which invokes a fresh ``trio.run``
|
The routine called *after fork* which invokes a fresh `trio.run()`
|
||||||
|
|
||||||
'''
|
'''
|
||||||
actor._forkserver_info = forkserver_info
|
actor._forkserver_info = forkserver_info
|
||||||
from ._spawn import try_set_start_method
|
from ._spawn import try_set_start_method
|
||||||
spawn_ctx = try_set_start_method(start_method)
|
spawn_ctx: mp.context.BaseContext = try_set_start_method(start_method)
|
||||||
|
assert spawn_ctx
|
||||||
|
|
||||||
if actor.loglevel is not None:
|
if actor.loglevel is not None:
|
||||||
log.info(
|
log.info(
|
||||||
f"Setting loglevel for {actor.uid} to {actor.loglevel}")
|
f'Setting loglevel for {actor.uid} to {actor.loglevel}'
|
||||||
|
)
|
||||||
get_console_log(actor.loglevel)
|
get_console_log(actor.loglevel)
|
||||||
|
|
||||||
assert spawn_ctx
|
# TODO: use scops headers like for `trio` below!
|
||||||
|
# (well after we libify it maybe..)
|
||||||
log.info(
|
log.info(
|
||||||
f"Started new {spawn_ctx.current_process()} for {actor.uid}")
|
f'Started new {spawn_ctx.current_process()} for {actor.uid}'
|
||||||
|
# f"parent_addr is {parent_addr}"
|
||||||
_state._current_actor = actor
|
)
|
||||||
|
_state._current_actor: Actor = actor
|
||||||
log.debug(f"parent_addr is {parent_addr}")
|
|
||||||
trio_main = partial(
|
trio_main = partial(
|
||||||
async_main,
|
async_main,
|
||||||
actor,
|
actor=actor,
|
||||||
accept_addr,
|
accept_addrs=accept_addrs,
|
||||||
parent_addr=parent_addr
|
parent_addr=parent_addr
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
|
|
@ -91,14 +101,15 @@ def _mp_main(
|
||||||
pass # handle it the same way trio does?
|
pass # handle it the same way trio does?
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
log.info(f"Actor {actor.uid} terminated")
|
log.info(
|
||||||
|
f'`mp`-subactor {actor.uid} exited'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _trio_main(
|
def _trio_main(
|
||||||
|
actor: Actor,
|
||||||
actor: Actor, # type: ignore
|
|
||||||
*,
|
*,
|
||||||
parent_addr: tuple[str, int] | None = None,
|
parent_addr: UnwrappedAddress|None = None,
|
||||||
infect_asyncio: bool = False,
|
infect_asyncio: bool = False,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
@ -106,33 +117,66 @@ def _trio_main(
|
||||||
Entry point for a `trio_run_in_process` subactor.
|
Entry point for a `trio_run_in_process` subactor.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
log.info(f"Started new trio process for {actor.uid}")
|
_frame_stack.hide_runtime_frames()
|
||||||
|
|
||||||
if actor.loglevel is not None:
|
|
||||||
log.info(
|
|
||||||
f"Setting loglevel for {actor.uid} to {actor.loglevel}")
|
|
||||||
get_console_log(actor.loglevel)
|
|
||||||
|
|
||||||
log.info(
|
|
||||||
f"Started {actor.uid}")
|
|
||||||
|
|
||||||
_state._current_actor = actor
|
_state._current_actor = actor
|
||||||
|
|
||||||
log.debug(f"parent_addr is {parent_addr}")
|
|
||||||
trio_main = partial(
|
trio_main = partial(
|
||||||
async_main,
|
async_main,
|
||||||
actor,
|
actor,
|
||||||
parent_addr=parent_addr
|
parent_addr=parent_addr
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if actor.loglevel is not None:
|
||||||
|
get_console_log(actor.loglevel)
|
||||||
|
log.info(
|
||||||
|
f'Starting `trio` subactor from parent @ '
|
||||||
|
f'{parent_addr}\n'
|
||||||
|
+
|
||||||
|
pformat.nest_from_op(
|
||||||
|
input_op='>(', # see syntax ideas above
|
||||||
|
text=f'{actor}',
|
||||||
|
)
|
||||||
|
)
|
||||||
|
logmeth = log.info
|
||||||
|
exit_status: str = (
|
||||||
|
'Subactor exited\n'
|
||||||
|
+
|
||||||
|
pformat.nest_from_op(
|
||||||
|
input_op=')>', # like a "closed-to-play"-icon from super perspective
|
||||||
|
text=f'{actor}',
|
||||||
|
nest_indent=1,
|
||||||
|
)
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
if infect_asyncio:
|
if infect_asyncio:
|
||||||
actor._infected_aio = True
|
actor._infected_aio = True
|
||||||
run_as_asyncio_guest(trio_main)
|
run_as_asyncio_guest(trio_main)
|
||||||
else:
|
else:
|
||||||
trio.run(trio_main)
|
trio.run(trio_main)
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
log.warning(f"Actor {actor.uid} received KBI")
|
logmeth = log.cancel
|
||||||
|
exit_status: str = (
|
||||||
|
'Actor received KBI (aka an OS-cancel)\n'
|
||||||
|
+
|
||||||
|
pformat.nest_from_op(
|
||||||
|
input_op='c)>', # closed due to cancel (see above)
|
||||||
|
text=f'{actor}',
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except BaseException as err:
|
||||||
|
logmeth = log.error
|
||||||
|
exit_status: str = (
|
||||||
|
'Main actor task exited due to crash?\n'
|
||||||
|
+
|
||||||
|
pformat.nest_from_op(
|
||||||
|
input_op='x)>', # closed by error
|
||||||
|
text=f'{actor}',
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# NOTE since we raise a tb will already be shown on the
|
||||||
|
# console, thus we do NOT use `.exception()` above.
|
||||||
|
raise err
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
log.info(f"Actor {actor.uid} terminated")
|
logmeth(exit_status)
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load Diff
480
tractor/_ipc.py
480
tractor/_ipc.py
|
|
@ -1,480 +0,0 @@
|
||||||
# tractor: structured concurrent "actors".
|
|
||||||
# Copyright 2018-eternity Tyler Goodlet.
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Inter-process comms abstractions
|
|
||||||
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
import platform
|
|
||||||
import struct
|
|
||||||
import typing
|
|
||||||
from collections.abc import (
|
|
||||||
AsyncGenerator,
|
|
||||||
AsyncIterator,
|
|
||||||
)
|
|
||||||
from typing import (
|
|
||||||
Any,
|
|
||||||
runtime_checkable,
|
|
||||||
Optional,
|
|
||||||
Protocol,
|
|
||||||
Type,
|
|
||||||
TypeVar,
|
|
||||||
)
|
|
||||||
|
|
||||||
from tricycle import BufferedReceiveStream
|
|
||||||
import msgspec
|
|
||||||
import trio
|
|
||||||
from async_generator import asynccontextmanager
|
|
||||||
|
|
||||||
from .log import get_logger
|
|
||||||
from ._exceptions import TransportClosed
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
_is_windows = platform.system() == 'Windows'
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def get_stream_addrs(stream: trio.SocketStream) -> tuple:
|
|
||||||
# should both be IP sockets
|
|
||||||
lsockname = stream.socket.getsockname()
|
|
||||||
rsockname = stream.socket.getpeername()
|
|
||||||
return (
|
|
||||||
tuple(lsockname[:2]),
|
|
||||||
tuple(rsockname[:2]),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
MsgType = TypeVar("MsgType")
|
|
||||||
|
|
||||||
# TODO: consider using a generic def and indexing with our eventual
|
|
||||||
# msg definition/types?
|
|
||||||
# - https://docs.python.org/3/library/typing.html#typing.Protocol
|
|
||||||
# - https://jcristharif.com/msgspec/usage.html#structs
|
|
||||||
|
|
||||||
|
|
||||||
@runtime_checkable
|
|
||||||
class MsgTransport(Protocol[MsgType]):
|
|
||||||
|
|
||||||
stream: trio.SocketStream
|
|
||||||
drained: list[MsgType]
|
|
||||||
|
|
||||||
def __init__(self, stream: trio.SocketStream) -> None:
|
|
||||||
...
|
|
||||||
|
|
||||||
# XXX: should this instead be called `.sendall()`?
|
|
||||||
async def send(self, msg: MsgType) -> None:
|
|
||||||
...
|
|
||||||
|
|
||||||
async def recv(self) -> MsgType:
|
|
||||||
...
|
|
||||||
|
|
||||||
def __aiter__(self) -> MsgType:
|
|
||||||
...
|
|
||||||
|
|
||||||
def connected(self) -> bool:
|
|
||||||
...
|
|
||||||
|
|
||||||
# defining this sync otherwise it causes a mypy error because it
|
|
||||||
# can't figure out it's a generator i guess?..?
|
|
||||||
def drain(self) -> AsyncIterator[dict]:
|
|
||||||
...
|
|
||||||
|
|
||||||
@property
|
|
||||||
def laddr(self) -> tuple[str, int]:
|
|
||||||
...
|
|
||||||
|
|
||||||
@property
|
|
||||||
def raddr(self) -> tuple[str, int]:
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: not sure why we have to inherit here, but it seems to be an
|
|
||||||
# issue with ``get_msg_transport()`` returning a ``Type[Protocol]``;
|
|
||||||
# probably should make a `mypy` issue?
|
|
||||||
class MsgpackTCPStream(MsgTransport):
|
|
||||||
'''
|
|
||||||
A ``trio.SocketStream`` delivering ``msgpack`` formatted data
|
|
||||||
using the ``msgspec`` codec lib.
|
|
||||||
|
|
||||||
'''
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
stream: trio.SocketStream,
|
|
||||||
prefix_size: int = 4,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
self.stream = stream
|
|
||||||
assert self.stream.socket
|
|
||||||
|
|
||||||
# should both be IP sockets
|
|
||||||
self._laddr, self._raddr = get_stream_addrs(stream)
|
|
||||||
|
|
||||||
# create read loop instance
|
|
||||||
self._agen = self._iter_packets()
|
|
||||||
self._send_lock = trio.StrictFIFOLock()
|
|
||||||
|
|
||||||
# public i guess?
|
|
||||||
self.drained: list[dict] = []
|
|
||||||
|
|
||||||
self.recv_stream = BufferedReceiveStream(transport_stream=stream)
|
|
||||||
self.prefix_size = prefix_size
|
|
||||||
|
|
||||||
# TODO: struct aware messaging coders
|
|
||||||
self.encode = msgspec.msgpack.Encoder().encode
|
|
||||||
self.decode = msgspec.msgpack.Decoder().decode # dict[str, Any])
|
|
||||||
|
|
||||||
async def _iter_packets(self) -> AsyncGenerator[dict, None]:
|
|
||||||
'''Yield packets from the underlying stream.
|
|
||||||
|
|
||||||
'''
|
|
||||||
import msgspec # noqa
|
|
||||||
decodes_failed: int = 0
|
|
||||||
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
header = await self.recv_stream.receive_exactly(4)
|
|
||||||
|
|
||||||
except (
|
|
||||||
ValueError,
|
|
||||||
ConnectionResetError,
|
|
||||||
|
|
||||||
# not sure entirely why we need this but without it we
|
|
||||||
# seem to be getting racy failures here on
|
|
||||||
# arbiter/registry name subs..
|
|
||||||
trio.BrokenResourceError,
|
|
||||||
):
|
|
||||||
raise TransportClosed(
|
|
||||||
f'transport {self} was already closed prior ro read'
|
|
||||||
)
|
|
||||||
|
|
||||||
if header == b'':
|
|
||||||
raise TransportClosed(
|
|
||||||
f'transport {self} was already closed prior ro read'
|
|
||||||
)
|
|
||||||
|
|
||||||
size, = struct.unpack("<I", header)
|
|
||||||
|
|
||||||
log.transport(f'received header {size}') # type: ignore
|
|
||||||
|
|
||||||
msg_bytes = await self.recv_stream.receive_exactly(size)
|
|
||||||
|
|
||||||
log.transport(f"received {msg_bytes}") # type: ignore
|
|
||||||
try:
|
|
||||||
yield self.decode(msg_bytes)
|
|
||||||
except (
|
|
||||||
msgspec.DecodeError,
|
|
||||||
UnicodeDecodeError,
|
|
||||||
):
|
|
||||||
if decodes_failed < 4:
|
|
||||||
# ignore decoding errors for now and assume they have to
|
|
||||||
# do with a channel drop - hope that receiving from the
|
|
||||||
# channel will raise an expected error and bubble up.
|
|
||||||
try:
|
|
||||||
msg_str: str | bytes = msg_bytes.decode()
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
msg_str = msg_bytes
|
|
||||||
|
|
||||||
log.error(
|
|
||||||
'`msgspec` failed to decode!?\n'
|
|
||||||
'dumping bytes:\n'
|
|
||||||
f'{msg_str!r}'
|
|
||||||
)
|
|
||||||
decodes_failed += 1
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
|
|
||||||
async def send(self, msg: Any) -> None:
|
|
||||||
async with self._send_lock:
|
|
||||||
|
|
||||||
bytes_data: bytes = self.encode(msg)
|
|
||||||
|
|
||||||
# supposedly the fastest says,
|
|
||||||
# https://stackoverflow.com/a/54027962
|
|
||||||
size: bytes = struct.pack("<I", len(bytes_data))
|
|
||||||
|
|
||||||
return await self.stream.send_all(size + bytes_data)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def laddr(self) -> tuple[str, int]:
|
|
||||||
return self._laddr
|
|
||||||
|
|
||||||
@property
|
|
||||||
def raddr(self) -> tuple[str, int]:
|
|
||||||
return self._raddr
|
|
||||||
|
|
||||||
async def recv(self) -> Any:
|
|
||||||
return await self._agen.asend(None)
|
|
||||||
|
|
||||||
async def drain(self) -> AsyncIterator[dict]:
|
|
||||||
'''
|
|
||||||
Drain the stream's remaining messages sent from
|
|
||||||
the far end until the connection is closed by
|
|
||||||
the peer.
|
|
||||||
|
|
||||||
'''
|
|
||||||
try:
|
|
||||||
async for msg in self._iter_packets():
|
|
||||||
self.drained.append(msg)
|
|
||||||
except TransportClosed:
|
|
||||||
for msg in self.drained:
|
|
||||||
yield msg
|
|
||||||
|
|
||||||
def __aiter__(self):
|
|
||||||
return self._agen
|
|
||||||
|
|
||||||
def connected(self) -> bool:
|
|
||||||
return self.stream.socket.fileno() != -1
|
|
||||||
|
|
||||||
|
|
||||||
def get_msg_transport(
|
|
||||||
|
|
||||||
key: tuple[str, str],
|
|
||||||
|
|
||||||
) -> Type[MsgTransport]:
|
|
||||||
|
|
||||||
return {
|
|
||||||
('msgpack', 'tcp'): MsgpackTCPStream,
|
|
||||||
}[key]
|
|
||||||
|
|
||||||
|
|
||||||
class Channel:
|
|
||||||
'''
|
|
||||||
An inter-process channel for communication between (remote) actors.
|
|
||||||
|
|
||||||
Wraps a ``MsgStream``: transport + encoding IPC connection.
|
|
||||||
|
|
||||||
Currently we only support ``trio.SocketStream`` for transport
|
|
||||||
(aka TCP) and the ``msgpack`` interchange format via the ``msgspec``
|
|
||||||
codec libary.
|
|
||||||
|
|
||||||
'''
|
|
||||||
def __init__(
|
|
||||||
|
|
||||||
self,
|
|
||||||
destaddr: Optional[tuple[str, int]],
|
|
||||||
|
|
||||||
msg_transport_type_key: tuple[str, str] = ('msgpack', 'tcp'),
|
|
||||||
|
|
||||||
# TODO: optional reconnection support?
|
|
||||||
# auto_reconnect: bool = False,
|
|
||||||
# on_reconnect: typing.Callable[..., typing.Awaitable] = None,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
# self._recon_seq = on_reconnect
|
|
||||||
# self._autorecon = auto_reconnect
|
|
||||||
|
|
||||||
self._destaddr = destaddr
|
|
||||||
self._transport_key = msg_transport_type_key
|
|
||||||
|
|
||||||
# Either created in ``.connect()`` or passed in by
|
|
||||||
# user in ``.from_stream()``.
|
|
||||||
self._stream: Optional[trio.SocketStream] = None
|
|
||||||
self.msgstream: Optional[MsgTransport] = None
|
|
||||||
|
|
||||||
# set after handshake - always uid of far end
|
|
||||||
self.uid: Optional[tuple[str, str]] = None
|
|
||||||
|
|
||||||
self._agen = self._aiter_recv()
|
|
||||||
self._exc: Optional[Exception] = None # set if far end actor errors
|
|
||||||
self._closed: bool = False
|
|
||||||
# flag set on ``Portal.cancel_actor()`` indicating
|
|
||||||
# remote (peer) cancellation of the far end actor runtime.
|
|
||||||
self._cancel_called: bool = False # set on ``Portal.cancel_actor()``
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_stream(
|
|
||||||
cls,
|
|
||||||
stream: trio.SocketStream,
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) -> Channel:
|
|
||||||
|
|
||||||
src, dst = get_stream_addrs(stream)
|
|
||||||
chan = Channel(destaddr=dst, **kwargs)
|
|
||||||
|
|
||||||
# set immediately here from provided instance
|
|
||||||
chan._stream = stream
|
|
||||||
chan.set_msg_transport(stream)
|
|
||||||
return chan
|
|
||||||
|
|
||||||
def set_msg_transport(
|
|
||||||
self,
|
|
||||||
stream: trio.SocketStream,
|
|
||||||
type_key: Optional[tuple[str, str]] = None,
|
|
||||||
|
|
||||||
) -> MsgTransport:
|
|
||||||
type_key = type_key or self._transport_key
|
|
||||||
self.msgstream = get_msg_transport(type_key)(stream)
|
|
||||||
return self.msgstream
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
if self.msgstream:
|
|
||||||
return repr(
|
|
||||||
self.msgstream.stream.socket._sock).replace( # type: ignore
|
|
||||||
"socket.socket", "Channel")
|
|
||||||
return object.__repr__(self)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def laddr(self) -> Optional[tuple[str, int]]:
|
|
||||||
return self.msgstream.laddr if self.msgstream else None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def raddr(self) -> Optional[tuple[str, int]]:
|
|
||||||
return self.msgstream.raddr if self.msgstream else None
|
|
||||||
|
|
||||||
async def connect(
|
|
||||||
self,
|
|
||||||
destaddr: tuple[Any, ...] | None = None,
|
|
||||||
**kwargs
|
|
||||||
|
|
||||||
) -> MsgTransport:
|
|
||||||
|
|
||||||
if self.connected():
|
|
||||||
raise RuntimeError("channel is already connected?")
|
|
||||||
|
|
||||||
destaddr = destaddr or self._destaddr
|
|
||||||
assert isinstance(destaddr, tuple)
|
|
||||||
|
|
||||||
stream = await trio.open_tcp_stream(
|
|
||||||
*destaddr,
|
|
||||||
**kwargs
|
|
||||||
)
|
|
||||||
msgstream = self.set_msg_transport(stream)
|
|
||||||
|
|
||||||
log.transport(
|
|
||||||
f'Opened channel[{type(msgstream)}]: {self.laddr} -> {self.raddr}'
|
|
||||||
)
|
|
||||||
return msgstream
|
|
||||||
|
|
||||||
async def send(self, item: Any) -> None:
|
|
||||||
|
|
||||||
log.transport(f"send `{item}`") # type: ignore
|
|
||||||
assert self.msgstream
|
|
||||||
|
|
||||||
await self.msgstream.send(item)
|
|
||||||
|
|
||||||
async def recv(self) -> Any:
|
|
||||||
assert self.msgstream
|
|
||||||
return await self.msgstream.recv()
|
|
||||||
|
|
||||||
# try:
|
|
||||||
# return await self.msgstream.recv()
|
|
||||||
# except trio.BrokenResourceError:
|
|
||||||
# if self._autorecon:
|
|
||||||
# await self._reconnect()
|
|
||||||
# return await self.recv()
|
|
||||||
# raise
|
|
||||||
|
|
||||||
async def aclose(self) -> None:
|
|
||||||
|
|
||||||
log.transport(
|
|
||||||
f'Closing channel to {self.uid} '
|
|
||||||
f'{self.laddr} -> {self.raddr}'
|
|
||||||
)
|
|
||||||
assert self.msgstream
|
|
||||||
await self.msgstream.stream.aclose()
|
|
||||||
self._closed = True
|
|
||||||
|
|
||||||
async def __aenter__(self):
|
|
||||||
await self.connect()
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __aexit__(self, *args):
|
|
||||||
await self.aclose(*args)
|
|
||||||
|
|
||||||
def __aiter__(self):
|
|
||||||
return self._agen
|
|
||||||
|
|
||||||
# async def _reconnect(self) -> None:
|
|
||||||
# """Handle connection failures by polling until a reconnect can be
|
|
||||||
# established.
|
|
||||||
# """
|
|
||||||
# down = False
|
|
||||||
# while True:
|
|
||||||
# try:
|
|
||||||
# with trio.move_on_after(3) as cancel_scope:
|
|
||||||
# await self.connect()
|
|
||||||
# cancelled = cancel_scope.cancelled_caught
|
|
||||||
# if cancelled:
|
|
||||||
# log.transport(
|
|
||||||
# "Reconnect timed out after 3 seconds, retrying...")
|
|
||||||
# continue
|
|
||||||
# else:
|
|
||||||
# log.transport("Stream connection re-established!")
|
|
||||||
|
|
||||||
# # TODO: run any reconnection sequence
|
|
||||||
# # on_recon = self._recon_seq
|
|
||||||
# # if on_recon:
|
|
||||||
# # await on_recon(self)
|
|
||||||
|
|
||||||
# break
|
|
||||||
# except (OSError, ConnectionRefusedError):
|
|
||||||
# if not down:
|
|
||||||
# down = True
|
|
||||||
# log.transport(
|
|
||||||
# f"Connection to {self.raddr} went down, waiting"
|
|
||||||
# " for re-establishment")
|
|
||||||
# await trio.sleep(1)
|
|
||||||
|
|
||||||
async def _aiter_recv(
|
|
||||||
self
|
|
||||||
) -> AsyncGenerator[Any, None]:
|
|
||||||
'''
|
|
||||||
Async iterate items from underlying stream.
|
|
||||||
|
|
||||||
'''
|
|
||||||
assert self.msgstream
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
async for item in self.msgstream:
|
|
||||||
yield item
|
|
||||||
# sent = yield item
|
|
||||||
# if sent is not None:
|
|
||||||
# # optimization, passing None through all the
|
|
||||||
# # time is pointless
|
|
||||||
# await self.msgstream.send(sent)
|
|
||||||
except trio.BrokenResourceError:
|
|
||||||
|
|
||||||
# if not self._autorecon:
|
|
||||||
raise
|
|
||||||
|
|
||||||
await self.aclose()
|
|
||||||
|
|
||||||
# if self._autorecon: # attempt reconnect
|
|
||||||
# await self._reconnect()
|
|
||||||
# continue
|
|
||||||
|
|
||||||
def connected(self) -> bool:
|
|
||||||
return self.msgstream.connected() if self.msgstream else False
|
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def _connect_chan(
|
|
||||||
host: str, port: int
|
|
||||||
) -> typing.AsyncGenerator[Channel, None]:
|
|
||||||
'''
|
|
||||||
Create and connect a channel with disconnect on context manager
|
|
||||||
teardown.
|
|
||||||
|
|
||||||
'''
|
|
||||||
chan = Channel((host, port))
|
|
||||||
await chan.connect()
|
|
||||||
yield chan
|
|
||||||
await chan.aclose()
|
|
||||||
|
|
@ -0,0 +1,151 @@
|
||||||
|
# tractor: structured concurrent "actors".
|
||||||
|
# Copyright 2018-eternity Tyler Goodlet.
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Multiaddress parser and utils according the spec(s) defined by
|
||||||
|
`libp2p` and used in dependent project such as `ipfs`:
|
||||||
|
|
||||||
|
- https://docs.libp2p.io/concepts/fundamentals/addressing/
|
||||||
|
- https://github.com/libp2p/specs/blob/master/addressing/README.md
|
||||||
|
|
||||||
|
'''
|
||||||
|
from typing import Iterator
|
||||||
|
|
||||||
|
from bidict import bidict
|
||||||
|
|
||||||
|
# TODO: see if we can leverage libp2p ecosys projects instead of
|
||||||
|
# rolling our own (parser) impls of the above addressing specs:
|
||||||
|
# - https://github.com/libp2p/py-libp2p
|
||||||
|
# - https://docs.libp2p.io/concepts/nat/circuit-relay/#relay-addresses
|
||||||
|
# prots: bidict[int, str] = bidict({
|
||||||
|
prots: bidict[int, str] = {
|
||||||
|
'ipv4': 3,
|
||||||
|
'ipv6': 3,
|
||||||
|
'wg': 3,
|
||||||
|
|
||||||
|
'tcp': 4,
|
||||||
|
'udp': 4,
|
||||||
|
|
||||||
|
# TODO: support the next-gen shite Bo
|
||||||
|
# 'quic': 4,
|
||||||
|
# 'ssh': 7, # via rsyscall bootstrapping
|
||||||
|
}
|
||||||
|
|
||||||
|
prot_params: dict[str, tuple[str]] = {
|
||||||
|
'ipv4': ('addr',),
|
||||||
|
'ipv6': ('addr',),
|
||||||
|
'wg': ('addr', 'port', 'pubkey'),
|
||||||
|
|
||||||
|
'tcp': ('port',),
|
||||||
|
'udp': ('port',),
|
||||||
|
|
||||||
|
# 'quic': ('port',),
|
||||||
|
# 'ssh': ('port',),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def iter_prot_layers(
|
||||||
|
multiaddr: str,
|
||||||
|
) -> Iterator[
|
||||||
|
tuple[
|
||||||
|
int,
|
||||||
|
list[str]
|
||||||
|
]
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Unpack a libp2p style "multiaddress" into multiple "segments"
|
||||||
|
for each "layer" of the protocoll stack (in OSI terms).
|
||||||
|
|
||||||
|
'''
|
||||||
|
tokens: list[str] = multiaddr.split('/')
|
||||||
|
root, tokens = tokens[0], tokens[1:]
|
||||||
|
assert not root # there is a root '/' on LHS
|
||||||
|
itokens = iter(tokens)
|
||||||
|
|
||||||
|
prot: str | None = None
|
||||||
|
params: list[str] = []
|
||||||
|
for token in itokens:
|
||||||
|
# every prot path should start with a known
|
||||||
|
# key-str.
|
||||||
|
if token in prots:
|
||||||
|
if prot is None:
|
||||||
|
prot: str = token
|
||||||
|
else:
|
||||||
|
yield prot, params
|
||||||
|
prot = token
|
||||||
|
|
||||||
|
params = []
|
||||||
|
|
||||||
|
elif token not in prots:
|
||||||
|
params.append(token)
|
||||||
|
|
||||||
|
else:
|
||||||
|
yield prot, params
|
||||||
|
|
||||||
|
|
||||||
|
def parse_maddr(
|
||||||
|
multiaddr: str,
|
||||||
|
) -> dict[str, str | int | dict]:
|
||||||
|
'''
|
||||||
|
Parse a libp2p style "multiaddress" into its distinct protocol
|
||||||
|
segments where each segment is of the form:
|
||||||
|
|
||||||
|
`../<protocol>/<param0>/<param1>/../<paramN>`
|
||||||
|
|
||||||
|
and is loaded into a (order preserving) `layers: dict[str,
|
||||||
|
dict[str, Any]` which holds each protocol-layer-segment of the
|
||||||
|
original `str` path as a separate entry according to its approx
|
||||||
|
OSI "layer number".
|
||||||
|
|
||||||
|
Any `paramN` in the path must be distinctly defined by a str-token in the
|
||||||
|
(module global) `prot_params` table.
|
||||||
|
|
||||||
|
For eg. for wireguard which requires an address, port number and publickey
|
||||||
|
the protocol params are specified as the entry:
|
||||||
|
|
||||||
|
'wg': ('addr', 'port', 'pubkey'),
|
||||||
|
|
||||||
|
and are thus parsed from a maddr in that order:
|
||||||
|
`'/wg/1.1.1.1/51820/<pubkey>'`
|
||||||
|
|
||||||
|
'''
|
||||||
|
layers: dict[str, str | int | dict] = {}
|
||||||
|
for (
|
||||||
|
prot_key,
|
||||||
|
params,
|
||||||
|
) in iter_prot_layers(multiaddr):
|
||||||
|
|
||||||
|
layer: int = prots[prot_key] # OSI layer used for sorting
|
||||||
|
ep: dict[str, int | str] = {'layer': layer}
|
||||||
|
layers[prot_key] = ep
|
||||||
|
|
||||||
|
# TODO; validation and resolving of names:
|
||||||
|
# - each param via a validator provided as part of the
|
||||||
|
# prot_params def? (also see `"port"` case below..)
|
||||||
|
# - do a resolv step that will check addrs against
|
||||||
|
# any loaded network.resolv: dict[str, str]
|
||||||
|
rparams: list = list(reversed(params))
|
||||||
|
for key in prot_params[prot_key]:
|
||||||
|
val: str | int = rparams.pop()
|
||||||
|
|
||||||
|
# TODO: UGHH, dunno what we should do for validation
|
||||||
|
# here, put it in the params spec somehow?
|
||||||
|
if key == 'port':
|
||||||
|
val = int(val)
|
||||||
|
|
||||||
|
ep[key] = val
|
||||||
|
|
||||||
|
return layers
|
||||||
|
|
@ -15,70 +15,73 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
Memory boundary "Portals": an API for structured
|
Memory "portal" contruct.
|
||||||
concurrency linked tasks running in disparate memory domains.
|
|
||||||
|
"Memory portals" are both an API and set of IPC wrapping primitives
|
||||||
|
for managing structured concurrency "cancel-scope linked" tasks
|
||||||
|
running in disparate virtual memory domains - at least in different
|
||||||
|
OS processes, possibly on different (hardware) hosts.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
import importlib
|
import importlib
|
||||||
import inspect
|
import inspect
|
||||||
from typing import (
|
from typing import (
|
||||||
Any, Optional,
|
Any,
|
||||||
Callable, AsyncGenerator,
|
Callable,
|
||||||
Type,
|
AsyncGenerator,
|
||||||
|
TYPE_CHECKING,
|
||||||
)
|
)
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from pprint import pformat
|
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
from async_generator import asynccontextmanager
|
|
||||||
|
|
||||||
from .trionics import maybe_open_nursery
|
from .trionics import (
|
||||||
from ._state import current_actor
|
maybe_open_nursery,
|
||||||
from ._ipc import Channel
|
collapse_eg,
|
||||||
from .log import get_logger
|
)
|
||||||
from .msg import NamespacePath
|
from ._state import (
|
||||||
from ._exceptions import (
|
current_actor,
|
||||||
unpack_error,
|
)
|
||||||
NoResult,
|
from .ipc import Channel
|
||||||
ContextCancelled,
|
from .log import get_logger
|
||||||
|
from .msg import (
|
||||||
|
# Error,
|
||||||
|
PayloadMsg,
|
||||||
|
NamespacePath,
|
||||||
|
Return,
|
||||||
|
)
|
||||||
|
from ._exceptions import (
|
||||||
|
NoResult,
|
||||||
|
TransportClosed,
|
||||||
|
)
|
||||||
|
from ._context import (
|
||||||
|
Context,
|
||||||
|
open_context_from_portal,
|
||||||
|
)
|
||||||
|
from ._streaming import (
|
||||||
|
MsgStream,
|
||||||
)
|
)
|
||||||
from ._streaming import Context, ReceiveMsgStream
|
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._runtime import Actor
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def _unwrap_msg(
|
|
||||||
msg: dict[str, Any],
|
|
||||||
channel: Channel
|
|
||||||
|
|
||||||
) -> Any:
|
|
||||||
__tracebackhide__ = True
|
|
||||||
try:
|
|
||||||
return msg['return']
|
|
||||||
except KeyError:
|
|
||||||
# internal error should never get here
|
|
||||||
assert msg.get('cid'), "Received internal error at portal?"
|
|
||||||
raise unpack_error(msg, channel) from None
|
|
||||||
|
|
||||||
|
|
||||||
class MessagingError(Exception):
|
|
||||||
'Some kind of unexpected SC messaging dialog issue'
|
|
||||||
|
|
||||||
|
|
||||||
class Portal:
|
class Portal:
|
||||||
'''
|
'''
|
||||||
A 'portal' to a(n) (remote) ``Actor``.
|
A 'portal' to a memory-domain-separated `Actor`.
|
||||||
|
|
||||||
A portal is "opened" (and eventually closed) by one side of an
|
A portal is "opened" (and eventually closed) by one side of an
|
||||||
inter-actor communication context. The side which opens the portal
|
inter-actor communication context. The side which opens the portal
|
||||||
is equivalent to a "caller" in function parlance and usually is
|
is equivalent to a "caller" in function parlance and usually is
|
||||||
either the called actor's parent (in process tree hierarchy terms)
|
either the called actor's parent (in process tree hierarchy terms)
|
||||||
or a client interested in scheduling work to be done remotely in a
|
or a client interested in scheduling work to be done remotely in a
|
||||||
far process.
|
process which has a separate (virtual) memory domain.
|
||||||
|
|
||||||
The portal api allows the "caller" actor to invoke remote routines
|
The portal api allows the "caller" actor to invoke remote routines
|
||||||
and receive results through an underlying ``tractor.Channel`` as
|
and receive results through an underlying ``tractor.Channel`` as
|
||||||
|
|
@ -88,22 +91,53 @@ class Portal:
|
||||||
like having a "portal" between the seperate actor memory spaces.
|
like having a "portal" between the seperate actor memory spaces.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# the timeout for a remote cancel request sent to
|
# global timeout for remote cancel requests sent to
|
||||||
# a(n) (peer) actor.
|
# connected (peer) actors.
|
||||||
cancel_timeout = 0.5
|
cancel_timeout: float = 0.5
|
||||||
|
|
||||||
def __init__(self, channel: Channel) -> None:
|
def __init__(
|
||||||
self.channel = channel
|
self,
|
||||||
|
channel: Channel,
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
self._chan: Channel = channel
|
||||||
# during the portal's lifetime
|
# during the portal's lifetime
|
||||||
self._result_msg: Optional[dict] = None
|
self._final_result_pld: Any|None = None
|
||||||
|
self._final_result_msg: PayloadMsg|None = None
|
||||||
|
|
||||||
# When set to a ``Context`` (when _submit_for_result is called)
|
# When set to a ``Context`` (when _submit_for_result is called)
|
||||||
# it is expected that ``result()`` will be awaited at some
|
# it is expected that ``result()`` will be awaited at some
|
||||||
# point.
|
# point.
|
||||||
self._expect_result: Optional[Context] = None
|
self._expect_result_ctx: Context|None = None
|
||||||
self._streams: set[ReceiveMsgStream] = set()
|
self._streams: set[MsgStream] = set()
|
||||||
self.actor = current_actor()
|
|
||||||
|
|
||||||
|
# TODO, this should be PRIVATE (and never used publicly)! since it's just
|
||||||
|
# a cached ref to the local runtime instead of calling
|
||||||
|
# `current_actor()` everywhere.. XD
|
||||||
|
self.actor: Actor = current_actor()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def chan(self) -> Channel:
|
||||||
|
'''
|
||||||
|
Ref to this ctx's underlying `tractor.ipc.Channel`.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return self._chan
|
||||||
|
|
||||||
|
@property
|
||||||
|
def channel(self) -> Channel:
|
||||||
|
'''
|
||||||
|
Proxy to legacy attr name..
|
||||||
|
|
||||||
|
Consider the shorter `Portal.chan` instead of `.channel` ;)
|
||||||
|
'''
|
||||||
|
log.debug(
|
||||||
|
'Consider the shorter `Portal.chan` instead of `.channel` ;)'
|
||||||
|
)
|
||||||
|
return self.chan
|
||||||
|
|
||||||
|
# TODO: factor this out into a `.highlevel` API-wrapper that uses
|
||||||
|
# a single `.open_context()` call underneath.
|
||||||
async def _submit_for_result(
|
async def _submit_for_result(
|
||||||
self,
|
self,
|
||||||
ns: str,
|
ns: str,
|
||||||
|
|
@ -111,32 +145,34 @@ class Portal:
|
||||||
**kwargs
|
**kwargs
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
assert self._expect_result is None, \
|
if self._expect_result_ctx is not None:
|
||||||
"A pending main result has already been submitted"
|
raise RuntimeError(
|
||||||
|
'A pending main result has already been submitted'
|
||||||
|
)
|
||||||
|
|
||||||
self._expect_result = await self.actor.start_remote_task(
|
self._expect_result_ctx: Context = await self.actor.start_remote_task(
|
||||||
self.channel,
|
self.channel,
|
||||||
ns,
|
nsf=NamespacePath(f'{ns}:{func}'),
|
||||||
func,
|
kwargs=kwargs,
|
||||||
kwargs
|
portal=self,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _return_once(
|
# TODO: we should deprecate this API right? since if we remove
|
||||||
|
# `.run_in_actor()` (and instead move it to a `.highlevel`
|
||||||
|
# wrapper api (around a single `.open_context()` call) we don't
|
||||||
|
# really have any notion of a "main" remote task any more?
|
||||||
|
#
|
||||||
|
# @api_frame
|
||||||
|
async def wait_for_result(
|
||||||
self,
|
self,
|
||||||
ctx: Context,
|
hide_tb: bool = True,
|
||||||
|
) -> Any:
|
||||||
) -> dict[str, Any]:
|
|
||||||
|
|
||||||
assert ctx._remote_func_type == 'asyncfunc' # single response
|
|
||||||
msg = await ctx._recv_chan.receive()
|
|
||||||
return msg
|
|
||||||
|
|
||||||
async def result(self) -> Any:
|
|
||||||
'''
|
'''
|
||||||
Return the result(s) from the remote actor's "main" task.
|
Return the final result delivered by a `Return`-msg from the
|
||||||
|
remote peer actor's "main" task's `return` statement.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# __tracebackhide__ = True
|
__tracebackhide__: bool = hide_tb
|
||||||
# Check for non-rpc errors slapped on the
|
# Check for non-rpc errors slapped on the
|
||||||
# channel for which we always raise
|
# channel for which we always raise
|
||||||
exc = self.channel._exc
|
exc = self.channel._exc
|
||||||
|
|
@ -144,29 +180,66 @@ class Portal:
|
||||||
raise exc
|
raise exc
|
||||||
|
|
||||||
# not expecting a "main" result
|
# not expecting a "main" result
|
||||||
if self._expect_result is None:
|
if self._expect_result_ctx is None:
|
||||||
|
peer_id: str = f'{self.channel.aid.reprol()!r}'
|
||||||
log.warning(
|
log.warning(
|
||||||
f"Portal for {self.channel.uid} not expecting a final"
|
f'Portal to peer {peer_id} will not deliver a final result?\n'
|
||||||
" result?\nresult() should only be called if subactor"
|
f'\n'
|
||||||
" was spawned with `ActorNursery.run_in_actor()`")
|
f'Context.result() can only be called by the parent of '
|
||||||
|
f'a sub-actor when it was spawned with '
|
||||||
|
f'`ActorNursery.run_in_actor()`'
|
||||||
|
f'\n'
|
||||||
|
f'Further this `ActorNursery`-method-API will deprecated in the'
|
||||||
|
f'near fututre!\n'
|
||||||
|
)
|
||||||
return NoResult
|
return NoResult
|
||||||
|
|
||||||
# expecting a "main" result
|
# expecting a "main" result
|
||||||
assert self._expect_result
|
assert self._expect_result_ctx
|
||||||
|
|
||||||
if self._result_msg is None:
|
if self._final_result_msg is None:
|
||||||
self._result_msg = await self._return_once(
|
try:
|
||||||
self._expect_result
|
(
|
||||||
)
|
self._final_result_msg,
|
||||||
|
self._final_result_pld,
|
||||||
|
) = await self._expect_result_ctx._pld_rx.recv_msg(
|
||||||
|
ipc=self._expect_result_ctx,
|
||||||
|
expect_msg=Return,
|
||||||
|
)
|
||||||
|
except BaseException as err:
|
||||||
|
# TODO: wrap this into `@api_frame` optionally with
|
||||||
|
# some kinda filtering mechanism like log levels?
|
||||||
|
__tracebackhide__: bool = False
|
||||||
|
raise err
|
||||||
|
|
||||||
return _unwrap_msg(self._result_msg, self.channel)
|
return self._final_result_pld
|
||||||
|
|
||||||
|
# TODO: factor this out into a `.highlevel` API-wrapper that uses
|
||||||
|
# a single `.open_context()` call underneath.
|
||||||
|
async def result(
|
||||||
|
self,
|
||||||
|
*args,
|
||||||
|
**kwargs,
|
||||||
|
) -> Any|Exception:
|
||||||
|
typname: str = type(self).__name__
|
||||||
|
log.warning(
|
||||||
|
f'`{typname}.result()` is DEPRECATED!\n'
|
||||||
|
f'\n'
|
||||||
|
f'Use `{typname}.wait_for_result()` instead!\n'
|
||||||
|
)
|
||||||
|
return await self.wait_for_result(
|
||||||
|
*args,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
async def _cancel_streams(self):
|
async def _cancel_streams(self):
|
||||||
# terminate all locally running async generator
|
# terminate all locally running async generator
|
||||||
# IPC calls
|
# IPC calls
|
||||||
if self._streams:
|
if self._streams:
|
||||||
log.cancel(
|
peer_id: str = f'{self.channel.aid.reprol()!r}'
|
||||||
f"Cancelling all streams with {self.channel.uid}")
|
report: str = (
|
||||||
|
f'Cancelling all msg-streams with {peer_id}\n'
|
||||||
|
)
|
||||||
for stream in self._streams.copy():
|
for stream in self._streams.copy():
|
||||||
try:
|
try:
|
||||||
await stream.aclose()
|
await stream.aclose()
|
||||||
|
|
@ -175,10 +248,18 @@ class Portal:
|
||||||
# (unless of course at some point down the road we
|
# (unless of course at some point down the road we
|
||||||
# won't expect this to always be the case or need to
|
# won't expect this to always be the case or need to
|
||||||
# detect it for respawning purposes?)
|
# detect it for respawning purposes?)
|
||||||
log.debug(f"{stream} was already closed.")
|
report += (
|
||||||
|
f'->) {stream!r} already closed\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
log.cancel(report)
|
||||||
|
|
||||||
async def aclose(self):
|
async def aclose(self):
|
||||||
log.debug(f"Closing {self}")
|
log.debug(
|
||||||
|
f'Closing portal\n'
|
||||||
|
f'>}}\n'
|
||||||
|
f'|_{self}\n'
|
||||||
|
)
|
||||||
# TODO: once we move to implementing our own `ReceiveChannel`
|
# TODO: once we move to implementing our own `ReceiveChannel`
|
||||||
# (including remote task cancellation inside its `.aclose()`)
|
# (including remote task cancellation inside its `.aclose()`)
|
||||||
# we'll need to .aclose all those channels here
|
# we'll need to .aclose all those channels here
|
||||||
|
|
@ -190,43 +271,97 @@ class Portal:
|
||||||
|
|
||||||
) -> bool:
|
) -> bool:
|
||||||
'''
|
'''
|
||||||
Cancel the actor on the other end of this portal.
|
Cancel the actor runtime (and thus process) on the far
|
||||||
|
end of this portal.
|
||||||
|
|
||||||
|
**NOTE** THIS CANCELS THE ENTIRE RUNTIME AND THE
|
||||||
|
SUBPROCESS, it DOES NOT just cancel the remote task. If you
|
||||||
|
want to have a handle to cancel a remote ``tri.Task`` look
|
||||||
|
at `.open_context()` and the definition of
|
||||||
|
`._context.Context.cancel()` which CAN be used for this
|
||||||
|
purpose.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
if not self.channel.connected():
|
__runtimeframe__: int = 1 # noqa
|
||||||
log.cancel("This channel is already closed can't cancel")
|
|
||||||
|
chan: Channel = self.channel
|
||||||
|
peer_id: str = f'{self.channel.aid.reprol()!r}'
|
||||||
|
if not chan.connected():
|
||||||
|
log.runtime(
|
||||||
|
'Peer {peer_id} is already disconnected\n'
|
||||||
|
'-> skipping cancel request..\n'
|
||||||
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
log.cancel(
|
log.cancel(
|
||||||
f"Sending actor cancel request to {self.channel.uid} on "
|
f'Sending actor-runtime-cancel-req to peer\n'
|
||||||
f"{self.channel}")
|
f'\n'
|
||||||
|
f'c)=> {peer_id}\n'
|
||||||
self.channel._cancel_called = True
|
)
|
||||||
|
|
||||||
|
# XXX the one spot we set it?
|
||||||
|
chan._cancel_called: bool = True
|
||||||
try:
|
try:
|
||||||
# send cancel cmd - might not get response
|
# send cancel cmd - might not get response
|
||||||
# XXX: sure would be nice to make this work with a proper shield
|
# XXX: sure would be nice to make this work with
|
||||||
with trio.move_on_after(timeout or self.cancel_timeout) as cs:
|
# a proper shield
|
||||||
cs.shield = True
|
with trio.move_on_after(
|
||||||
|
timeout
|
||||||
await self.run_from_ns('self', 'cancel')
|
or
|
||||||
|
self.cancel_timeout
|
||||||
|
) as cs:
|
||||||
|
cs.shield: bool = True
|
||||||
|
await self.run_from_ns(
|
||||||
|
'self',
|
||||||
|
'cancel',
|
||||||
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if cs.cancelled_caught:
|
if cs.cancelled_caught:
|
||||||
log.cancel(f"May have failed to cancel {self.channel.uid}")
|
# may timeout and we never get an ack (obvi racy)
|
||||||
|
# but that doesn't mean it wasn't cancelled.
|
||||||
|
log.debug(
|
||||||
|
f'May have failed to cancel peer?\n'
|
||||||
|
f'\n'
|
||||||
|
f'c)=?> {peer_id}\n'
|
||||||
|
)
|
||||||
|
|
||||||
# if we get here some weird cancellation case happened
|
# if we get here some weird cancellation case happened
|
||||||
return False
|
return False
|
||||||
|
|
||||||
except (
|
except (
|
||||||
|
# XXX, should never really get raised unless we aren't
|
||||||
|
# wrapping them in the below type by mistake?
|
||||||
|
#
|
||||||
|
# Leaving the catch here for now until we're very sure
|
||||||
|
# all the cases (for various tpt protos) have indeed been
|
||||||
|
# re-wrapped ;p
|
||||||
trio.ClosedResourceError,
|
trio.ClosedResourceError,
|
||||||
trio.BrokenResourceError,
|
trio.BrokenResourceError,
|
||||||
):
|
|
||||||
log.cancel(
|
TransportClosed,
|
||||||
f"{self.channel} for {self.channel.uid} was already "
|
) as tpt_err:
|
||||||
"closed or broken?")
|
ipc_borked_report: str = (
|
||||||
|
f'IPC for actor already closed/broken?\n\n'
|
||||||
|
f'\n'
|
||||||
|
f'c)=x> {peer_id}\n'
|
||||||
|
)
|
||||||
|
match tpt_err:
|
||||||
|
case TransportClosed():
|
||||||
|
log.debug(ipc_borked_report)
|
||||||
|
case _:
|
||||||
|
ipc_borked_report += (
|
||||||
|
f'\n'
|
||||||
|
f'Unhandled low-level transport-closed/error during\n'
|
||||||
|
f'Portal.cancel_actor()` request?\n'
|
||||||
|
f'<{type(tpt_err).__name__}( {tpt_err} )>\n'
|
||||||
|
)
|
||||||
|
log.warning(ipc_borked_report)
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# TODO: do we still need this for low level `Actor`-runtime
|
||||||
|
# method calls or can we also remove it?
|
||||||
async def run_from_ns(
|
async def run_from_ns(
|
||||||
self,
|
self,
|
||||||
namespace_path: str,
|
namespace_path: str,
|
||||||
|
|
@ -243,27 +378,35 @@ class Portal:
|
||||||
|
|
||||||
Note::
|
Note::
|
||||||
|
|
||||||
A special namespace `self` can be used to invoke `Actor`
|
A special namespace `self` can be used to invoke `Actor`
|
||||||
instance methods in the remote runtime. Currently this
|
instance methods in the remote runtime. Currently this
|
||||||
should only be used solely for ``tractor`` runtime
|
should only ever be used for `Actor` (method) runtime
|
||||||
internals.
|
internals!
|
||||||
|
|
||||||
'''
|
'''
|
||||||
ctx = await self.actor.start_remote_task(
|
__runtimeframe__: int = 1 # noqa
|
||||||
self.channel,
|
nsf = NamespacePath(
|
||||||
namespace_path,
|
f'{namespace_path}:{function_name}'
|
||||||
function_name,
|
)
|
||||||
kwargs,
|
ctx: Context = await self.actor.start_remote_task(
|
||||||
|
chan=self.channel,
|
||||||
|
nsf=nsf,
|
||||||
|
kwargs=kwargs,
|
||||||
|
portal=self,
|
||||||
|
)
|
||||||
|
return await ctx._pld_rx.recv_pld(
|
||||||
|
ipc=ctx,
|
||||||
|
expect_msg=Return,
|
||||||
)
|
)
|
||||||
ctx._portal = self
|
|
||||||
msg = await self._return_once(ctx)
|
|
||||||
return _unwrap_msg(msg, self.channel)
|
|
||||||
|
|
||||||
|
# TODO: factor this out into a `.highlevel` API-wrapper that uses
|
||||||
|
# a single `.open_context()` call underneath.
|
||||||
async def run(
|
async def run(
|
||||||
self,
|
self,
|
||||||
func: str,
|
func: str,
|
||||||
fn_name: Optional[str] = None,
|
fn_name: str|None = None,
|
||||||
**kwargs
|
**kwargs
|
||||||
|
|
||||||
) -> Any:
|
) -> Any:
|
||||||
'''
|
'''
|
||||||
Submit a remote function to be scheduled and run by actor, in
|
Submit a remote function to be scheduled and run by actor, in
|
||||||
|
|
@ -273,6 +416,8 @@ class Portal:
|
||||||
remote rpc task or a local async generator instance.
|
remote rpc task or a local async generator instance.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
__runtimeframe__: int = 1 # noqa
|
||||||
|
|
||||||
if isinstance(func, str):
|
if isinstance(func, str):
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
"`Portal.run(namespace: str, funcname: str)` is now"
|
"`Portal.run(namespace: str, funcname: str)` is now"
|
||||||
|
|
@ -282,8 +427,9 @@ class Portal:
|
||||||
DeprecationWarning,
|
DeprecationWarning,
|
||||||
stacklevel=2,
|
stacklevel=2,
|
||||||
)
|
)
|
||||||
fn_mod_path = func
|
fn_mod_path: str = func
|
||||||
assert isinstance(fn_name, str)
|
assert isinstance(fn_name, str)
|
||||||
|
nsf = NamespacePath(f'{fn_mod_path}:{fn_name}')
|
||||||
|
|
||||||
else: # function reference was passed directly
|
else: # function reference was passed directly
|
||||||
if (
|
if (
|
||||||
|
|
@ -296,27 +442,36 @@ class Portal:
|
||||||
raise TypeError(
|
raise TypeError(
|
||||||
f'{func} must be a non-streaming async function!')
|
f'{func} must be a non-streaming async function!')
|
||||||
|
|
||||||
fn_mod_path, fn_name = NamespacePath.from_ref(func).to_tuple()
|
nsf = NamespacePath.from_ref(func)
|
||||||
|
|
||||||
ctx = await self.actor.start_remote_task(
|
ctx = await self.actor.start_remote_task(
|
||||||
self.channel,
|
self.channel,
|
||||||
fn_mod_path,
|
nsf=nsf,
|
||||||
fn_name,
|
kwargs=kwargs,
|
||||||
kwargs,
|
portal=self,
|
||||||
)
|
)
|
||||||
ctx._portal = self
|
return await ctx._pld_rx.recv_pld(
|
||||||
return _unwrap_msg(
|
ipc=ctx,
|
||||||
await self._return_once(ctx),
|
expect_msg=Return,
|
||||||
self.channel,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@asynccontextmanager
|
# TODO: factor this out into a `.highlevel` API-wrapper that uses
|
||||||
|
# a single `.open_context()` call underneath.
|
||||||
|
@acm
|
||||||
async def open_stream_from(
|
async def open_stream_from(
|
||||||
self,
|
self,
|
||||||
async_gen_func: Callable, # typing: ignore
|
async_gen_func: Callable, # typing: ignore
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> AsyncGenerator[ReceiveMsgStream, None]:
|
) -> AsyncGenerator[MsgStream, None]:
|
||||||
|
'''
|
||||||
|
Legacy one-way streaming API.
|
||||||
|
|
||||||
|
TODO: re-impl on top `Portal.open_context()` + an async gen
|
||||||
|
around `Context.open_stream()`.
|
||||||
|
|
||||||
|
'''
|
||||||
|
__runtimeframe__: int = 1 # noqa
|
||||||
|
|
||||||
if not inspect.isasyncgenfunction(async_gen_func):
|
if not inspect.isasyncgenfunction(async_gen_func):
|
||||||
if not (
|
if not (
|
||||||
|
|
@ -326,29 +481,27 @@ class Portal:
|
||||||
raise TypeError(
|
raise TypeError(
|
||||||
f'{async_gen_func} must be an async generator function!')
|
f'{async_gen_func} must be an async generator function!')
|
||||||
|
|
||||||
fn_mod_path, fn_name = NamespacePath.from_ref(
|
ctx: Context = await self.actor.start_remote_task(
|
||||||
async_gen_func).to_tuple()
|
|
||||||
ctx = await self.actor.start_remote_task(
|
|
||||||
self.channel,
|
self.channel,
|
||||||
fn_mod_path,
|
nsf=NamespacePath.from_ref(async_gen_func),
|
||||||
fn_name,
|
kwargs=kwargs,
|
||||||
kwargs
|
portal=self,
|
||||||
)
|
)
|
||||||
ctx._portal = self
|
|
||||||
|
|
||||||
# ensure receive-only stream entrypoint
|
# ensure receive-only stream entrypoint
|
||||||
assert ctx._remote_func_type == 'asyncgen'
|
assert ctx._remote_func_type == 'asyncgen'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# deliver receive only stream
|
# deliver receive only stream
|
||||||
async with ReceiveMsgStream(
|
async with MsgStream(
|
||||||
ctx, ctx._recv_chan,
|
ctx=ctx,
|
||||||
) as rchan:
|
rx_chan=ctx._rx_chan,
|
||||||
self._streams.add(rchan)
|
) as stream:
|
||||||
yield rchan
|
self._streams.add(stream)
|
||||||
|
ctx._stream = stream
|
||||||
|
yield stream
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
|
|
||||||
# cancel the far end task on consumer close
|
# cancel the far end task on consumer close
|
||||||
# NOTE: this is a special case since we assume that if using
|
# NOTE: this is a special case since we assume that if using
|
||||||
# this ``.open_fream_from()`` api, the stream is one a one
|
# this ``.open_fream_from()`` api, the stream is one a one
|
||||||
|
|
@ -360,186 +513,24 @@ class Portal:
|
||||||
with trio.CancelScope(shield=True):
|
with trio.CancelScope(shield=True):
|
||||||
await ctx.cancel()
|
await ctx.cancel()
|
||||||
|
|
||||||
except trio.ClosedResourceError:
|
except trio.ClosedResourceError as cre:
|
||||||
# if the far end terminates before we send a cancel the
|
# if the far end terminates before we send a cancel the
|
||||||
# underlying transport-channel may already be closed.
|
# underlying transport-channel may already be closed.
|
||||||
log.cancel(f'Context {ctx} was already closed?')
|
log.cancel(
|
||||||
|
f'Context.cancel() -> {cre!r}\n'
|
||||||
|
f'cid: {ctx.cid!r} already closed?\n'
|
||||||
|
)
|
||||||
|
|
||||||
# XXX: should this always be done?
|
# XXX: should this always be done?
|
||||||
# await recv_chan.aclose()
|
# await recv_chan.aclose()
|
||||||
self._streams.remove(rchan)
|
self._streams.remove(stream)
|
||||||
|
|
||||||
@asynccontextmanager
|
# NOTE: impl is found in `._context`` mod to make
|
||||||
async def open_context(
|
# reading/groking the details simpler code-org-wise. This
|
||||||
|
# method does not have to be used over that `@acm` module func
|
||||||
self,
|
# directly, it is for conventience and from the original API
|
||||||
func: Callable,
|
# design.
|
||||||
**kwargs,
|
open_context = open_context_from_portal
|
||||||
|
|
||||||
) -> AsyncGenerator[tuple[Context, Any], None]:
|
|
||||||
'''
|
|
||||||
Open an inter-actor task context.
|
|
||||||
|
|
||||||
This is a synchronous API which allows for deterministic
|
|
||||||
setup/teardown of a remote task. The yielded ``Context`` further
|
|
||||||
allows for opening bidirectional streams, explicit cancellation
|
|
||||||
and synchronized final result collection. See ``tractor.Context``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# conduct target func method structural checks
|
|
||||||
if not inspect.iscoroutinefunction(func) and (
|
|
||||||
getattr(func, '_tractor_contex_function', False)
|
|
||||||
):
|
|
||||||
raise TypeError(
|
|
||||||
f'{func} must be an async generator function!')
|
|
||||||
|
|
||||||
fn_mod_path, fn_name = NamespacePath.from_ref(func).to_tuple()
|
|
||||||
|
|
||||||
ctx = await self.actor.start_remote_task(
|
|
||||||
self.channel,
|
|
||||||
fn_mod_path,
|
|
||||||
fn_name,
|
|
||||||
kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
assert ctx._remote_func_type == 'context'
|
|
||||||
msg = await ctx._recv_chan.receive()
|
|
||||||
|
|
||||||
try:
|
|
||||||
# the "first" value here is delivered by the callee's
|
|
||||||
# ``Context.started()`` call.
|
|
||||||
first = msg['started']
|
|
||||||
ctx._started_called = True
|
|
||||||
|
|
||||||
except KeyError:
|
|
||||||
assert msg.get('cid'), ("Received internal error at context?")
|
|
||||||
|
|
||||||
if msg.get('error'):
|
|
||||||
# raise kerr from unpack_error(msg, self.channel)
|
|
||||||
raise unpack_error(msg, self.channel) from None
|
|
||||||
else:
|
|
||||||
raise MessagingError(
|
|
||||||
f'Context for {ctx.cid} was expecting a `started` message'
|
|
||||||
f' but received a non-error msg:\n{pformat(msg)}'
|
|
||||||
)
|
|
||||||
|
|
||||||
_err: Optional[BaseException] = None
|
|
||||||
ctx._portal = self
|
|
||||||
|
|
||||||
uid = self.channel.uid
|
|
||||||
cid = ctx.cid
|
|
||||||
etype: Optional[Type[BaseException]] = None
|
|
||||||
|
|
||||||
# deliver context instance and .started() msg value in open tuple.
|
|
||||||
try:
|
|
||||||
async with trio.open_nursery() as scope_nursery:
|
|
||||||
ctx._scope_nursery = scope_nursery
|
|
||||||
|
|
||||||
# do we need this?
|
|
||||||
# await trio.lowlevel.checkpoint()
|
|
||||||
|
|
||||||
yield ctx, first
|
|
||||||
|
|
||||||
except ContextCancelled as err:
|
|
||||||
_err = err
|
|
||||||
if not ctx._cancel_called:
|
|
||||||
# context was cancelled at the far end but was
|
|
||||||
# not part of this end requesting that cancel
|
|
||||||
# so raise for the local task to respond and handle.
|
|
||||||
raise
|
|
||||||
|
|
||||||
# if the context was cancelled by client code
|
|
||||||
# then we don't need to raise since user code
|
|
||||||
# is expecting this and the block should exit.
|
|
||||||
else:
|
|
||||||
log.debug(f'Context {ctx} cancelled gracefully')
|
|
||||||
|
|
||||||
except (
|
|
||||||
BaseException,
|
|
||||||
|
|
||||||
# more specifically, we need to handle these but not
|
|
||||||
# sure it's worth being pedantic:
|
|
||||||
# Exception,
|
|
||||||
# trio.Cancelled,
|
|
||||||
# KeyboardInterrupt,
|
|
||||||
|
|
||||||
) as err:
|
|
||||||
etype = type(err)
|
|
||||||
# the context cancels itself on any cancel
|
|
||||||
# causing error.
|
|
||||||
|
|
||||||
if ctx.chan.connected():
|
|
||||||
log.cancel(
|
|
||||||
'Context cancelled for task, sending cancel request..\n'
|
|
||||||
f'task:{cid}\n'
|
|
||||||
f'actor:{uid}'
|
|
||||||
)
|
|
||||||
await ctx.cancel()
|
|
||||||
else:
|
|
||||||
log.warning(
|
|
||||||
'IPC connection for context is broken?\n'
|
|
||||||
f'task:{cid}\n'
|
|
||||||
f'actor:{uid}'
|
|
||||||
)
|
|
||||||
|
|
||||||
raise
|
|
||||||
|
|
||||||
finally:
|
|
||||||
# in the case where a runtime nursery (due to internal bug)
|
|
||||||
# or a remote actor transmits an error we want to be
|
|
||||||
# sure we get the error the underlying feeder mem chan.
|
|
||||||
# if it's not raised here it *should* be raised from the
|
|
||||||
# msg loop nursery right?
|
|
||||||
if ctx.chan.connected():
|
|
||||||
log.info(
|
|
||||||
'Waiting on final context-task result for\n'
|
|
||||||
f'task: {cid}\n'
|
|
||||||
f'actor: {uid}'
|
|
||||||
)
|
|
||||||
result = await ctx.result()
|
|
||||||
|
|
||||||
# though it should be impossible for any tasks
|
|
||||||
# operating *in* this scope to have survived
|
|
||||||
# we tear down the runtime feeder chan last
|
|
||||||
# to avoid premature stream clobbers.
|
|
||||||
if ctx._recv_chan is not None:
|
|
||||||
# should we encapsulate this in the context api?
|
|
||||||
await ctx._recv_chan.aclose()
|
|
||||||
|
|
||||||
if etype:
|
|
||||||
if ctx._cancel_called:
|
|
||||||
log.cancel(
|
|
||||||
f'Context {fn_name} cancelled by caller with\n{etype}'
|
|
||||||
)
|
|
||||||
elif _err is not None:
|
|
||||||
log.cancel(
|
|
||||||
f'Context for task cancelled by callee with {etype}\n'
|
|
||||||
f'target: `{fn_name}`\n'
|
|
||||||
f'task:{cid}\n'
|
|
||||||
f'actor:{uid}'
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
log.runtime(
|
|
||||||
f'Context {fn_name} returned '
|
|
||||||
f'value from callee `{result}`'
|
|
||||||
)
|
|
||||||
|
|
||||||
# XXX: (MEGA IMPORTANT) if this is a root opened process we
|
|
||||||
# wait for any immediate child in debug before popping the
|
|
||||||
# context from the runtime msg loop otherwise inside
|
|
||||||
# ``Actor._push_result()`` the msg will be discarded and in
|
|
||||||
# the case where that msg is global debugger unlock (via
|
|
||||||
# a "stop" msg for a stream), this can result in a deadlock
|
|
||||||
# where the root is waiting on the lock to clear but the
|
|
||||||
# child has already cleared it and clobbered IPC.
|
|
||||||
from ._debug import maybe_wait_for_debugger
|
|
||||||
await maybe_wait_for_debugger()
|
|
||||||
|
|
||||||
# remove the context from runtime tracking
|
|
||||||
self.actor._contexts.pop(
|
|
||||||
(self.channel.uid, ctx.cid),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
|
@ -554,22 +545,31 @@ class LocalPortal:
|
||||||
actor: 'Actor' # type: ignore # noqa
|
actor: 'Actor' # type: ignore # noqa
|
||||||
channel: Channel
|
channel: Channel
|
||||||
|
|
||||||
async def run_from_ns(self, ns: str, func_name: str, **kwargs) -> Any:
|
async def run_from_ns(
|
||||||
|
self,
|
||||||
|
ns: str,
|
||||||
|
func_name: str,
|
||||||
|
**kwargs,
|
||||||
|
) -> Any:
|
||||||
'''
|
'''
|
||||||
Run a requested local function from a namespace path and
|
Run a requested local function from a namespace path and
|
||||||
return it's result.
|
return it's result.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
obj = self.actor if ns == 'self' else importlib.import_module(ns)
|
obj = (
|
||||||
func = getattr(obj, func_name)
|
self.actor
|
||||||
|
if ns == 'self'
|
||||||
|
else importlib.import_module(ns)
|
||||||
|
)
|
||||||
|
func: Callable = getattr(obj, func_name)
|
||||||
return await func(**kwargs)
|
return await func(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def open_portal(
|
async def open_portal(
|
||||||
|
|
||||||
channel: Channel,
|
channel: Channel,
|
||||||
nursery: Optional[trio.Nursery] = None,
|
tn: trio.Nursery|None = None,
|
||||||
start_msg_loop: bool = True,
|
start_msg_loop: bool = True,
|
||||||
shield: bool = False,
|
shield: bool = False,
|
||||||
|
|
||||||
|
|
@ -577,31 +577,39 @@ async def open_portal(
|
||||||
'''
|
'''
|
||||||
Open a ``Portal`` through the provided ``channel``.
|
Open a ``Portal`` through the provided ``channel``.
|
||||||
|
|
||||||
Spawns a background task to handle message processing (normally
|
Spawns a background task to handle RPC processing, normally
|
||||||
done by the actor-runtime implicitly).
|
done by the actor-runtime implicitly via a call to
|
||||||
|
`._rpc.process_messages()`. just after connection establishment.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
actor = current_actor()
|
actor = current_actor()
|
||||||
assert actor
|
assert actor
|
||||||
was_connected = False
|
was_connected: bool = False
|
||||||
|
|
||||||
async with maybe_open_nursery(nursery, shield=shield) as nursery:
|
async with (
|
||||||
|
collapse_eg(),
|
||||||
|
maybe_open_nursery(
|
||||||
|
tn,
|
||||||
|
shield=shield,
|
||||||
|
) as tn,
|
||||||
|
):
|
||||||
|
|
||||||
if not channel.connected():
|
if not channel.connected():
|
||||||
await channel.connect()
|
await channel.connect()
|
||||||
was_connected = True
|
was_connected = True
|
||||||
|
|
||||||
if channel.uid is None:
|
if channel.aid is None:
|
||||||
await actor._do_handshake(channel)
|
await channel._do_handshake(
|
||||||
|
aid=actor.aid,
|
||||||
|
)
|
||||||
|
|
||||||
msg_loop_cs: Optional[trio.CancelScope] = None
|
msg_loop_cs: trio.CancelScope|None = None
|
||||||
if start_msg_loop:
|
if start_msg_loop:
|
||||||
from ._runtime import process_messages
|
from . import _rpc
|
||||||
msg_loop_cs = await nursery.start(
|
msg_loop_cs = await tn.start(
|
||||||
partial(
|
partial(
|
||||||
process_messages,
|
_rpc.process_messages,
|
||||||
actor,
|
chan=channel,
|
||||||
channel,
|
|
||||||
# if the local task is cancelled we want to keep
|
# if the local task is cancelled we want to keep
|
||||||
# the msg loop running until our block ends
|
# the msg loop running until our block ends
|
||||||
shield=True,
|
shield=True,
|
||||||
|
|
@ -614,12 +622,10 @@ async def open_portal(
|
||||||
await portal.aclose()
|
await portal.aclose()
|
||||||
|
|
||||||
if was_connected:
|
if was_connected:
|
||||||
# gracefully signal remote channel-msg loop
|
await channel.aclose()
|
||||||
await channel.send(None)
|
|
||||||
# await channel.aclose()
|
|
||||||
|
|
||||||
# cancel background msg loop task
|
# cancel background msg loop task
|
||||||
if msg_loop_cs:
|
if msg_loop_cs is not None:
|
||||||
msg_loop_cs.cancel()
|
msg_loop_cs.cancel()
|
||||||
|
|
||||||
nursery.cancel_scope.cancel()
|
tn.cancel_scope.cancel()
|
||||||
|
|
|
||||||
729
tractor/_root.py
729
tractor/_root.py
|
|
@ -18,253 +18,609 @@
|
||||||
Root actor runtime ignition(s).
|
Root actor runtime ignition(s).
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import (
|
||||||
|
asynccontextmanager as acm,
|
||||||
|
)
|
||||||
from functools import partial
|
from functools import partial
|
||||||
import importlib
|
import importlib
|
||||||
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import signal
|
import signal
|
||||||
|
import sys
|
||||||
from typing import (
|
from typing import (
|
||||||
Optional,
|
Any,
|
||||||
|
Callable,
|
||||||
)
|
)
|
||||||
import typing
|
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
|
|
||||||
from exceptiongroup import BaseExceptionGroup
|
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from ._runtime import (
|
from . import _runtime
|
||||||
Actor,
|
from .devx import (
|
||||||
Arbiter,
|
debug,
|
||||||
async_main,
|
_frame_stack,
|
||||||
|
pformat as _pformat,
|
||||||
)
|
)
|
||||||
from . import _debug
|
|
||||||
from . import _spawn
|
from . import _spawn
|
||||||
from . import _state
|
from . import _state
|
||||||
from . import log
|
from . import log
|
||||||
from ._ipc import _connect_chan
|
from .ipc import (
|
||||||
from ._exceptions import is_multi_cancelled
|
_connect_chan,
|
||||||
|
)
|
||||||
|
from ._addr import (
|
||||||
# set at startup and after forks
|
Address,
|
||||||
_default_arbiter_host: str = '127.0.0.1'
|
UnwrappedAddress,
|
||||||
_default_arbiter_port: int = 1616
|
default_lo_addrs,
|
||||||
|
mk_uuid,
|
||||||
|
wrap_address,
|
||||||
|
)
|
||||||
|
from .trionics import (
|
||||||
|
is_multi_cancelled,
|
||||||
|
collapse_eg,
|
||||||
|
)
|
||||||
|
from ._exceptions import (
|
||||||
|
RuntimeFailure,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
logger = log.get_logger('tractor')
|
logger = log.get_logger('tractor')
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
# TODO: stick this in a `@acm` defined in `devx.debug`?
|
||||||
|
# -[ ] also maybe consider making this a `wrapt`-deco to
|
||||||
|
# save an indent level?
|
||||||
|
#
|
||||||
|
@acm
|
||||||
|
async def maybe_block_bp(
|
||||||
|
debug_mode: bool,
|
||||||
|
maybe_enable_greenback: bool,
|
||||||
|
) -> bool:
|
||||||
|
# Override the global debugger hook to make it play nice with
|
||||||
|
# ``trio``, see much discussion in:
|
||||||
|
# https://github.com/python-trio/trio/issues/1155#issuecomment-742964018
|
||||||
|
builtin_bp_handler: Callable = sys.breakpointhook
|
||||||
|
orig_bp_path: str|None = os.environ.get(
|
||||||
|
'PYTHONBREAKPOINT',
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
bp_blocked: bool
|
||||||
|
if (
|
||||||
|
debug_mode
|
||||||
|
and maybe_enable_greenback
|
||||||
|
and (
|
||||||
|
maybe_mod := await debug.maybe_init_greenback(
|
||||||
|
raise_not_found=False,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
):
|
||||||
|
logger.info(
|
||||||
|
f'Found `greenback` installed @ {maybe_mod}\n'
|
||||||
|
f'Enabling `tractor.pause_from_sync()` support!\n'
|
||||||
|
)
|
||||||
|
os.environ['PYTHONBREAKPOINT'] = (
|
||||||
|
'tractor.devx.debug._sync_pause_from_builtin'
|
||||||
|
)
|
||||||
|
_state._runtime_vars['use_greenback'] = True
|
||||||
|
bp_blocked = False
|
||||||
|
|
||||||
|
else:
|
||||||
|
# TODO: disable `breakpoint()` by default (without
|
||||||
|
# `greenback`) since it will break any multi-actor
|
||||||
|
# usage by a clobbered TTY's stdstreams!
|
||||||
|
def block_bps(*args, **kwargs):
|
||||||
|
raise RuntimeError(
|
||||||
|
'Trying to use `breakpoint()` eh?\n\n'
|
||||||
|
'Welp, `tractor` blocks `breakpoint()` built-in calls by default!\n'
|
||||||
|
'If you need to use it please install `greenback` and set '
|
||||||
|
'`debug_mode=True` when opening the runtime '
|
||||||
|
'(either via `.open_nursery()` or `open_root_actor()`)\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
sys.breakpointhook = block_bps
|
||||||
|
# lol ok,
|
||||||
|
# https://docs.python.org/3/library/sys.html#sys.breakpointhook
|
||||||
|
os.environ['PYTHONBREAKPOINT'] = "0"
|
||||||
|
bp_blocked = True
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield bp_blocked
|
||||||
|
finally:
|
||||||
|
# restore any prior built-in `breakpoint()` hook state
|
||||||
|
if builtin_bp_handler is not None:
|
||||||
|
sys.breakpointhook = builtin_bp_handler
|
||||||
|
|
||||||
|
if orig_bp_path is not None:
|
||||||
|
os.environ['PYTHONBREAKPOINT'] = orig_bp_path
|
||||||
|
|
||||||
|
else:
|
||||||
|
# clear env back to having no entry
|
||||||
|
os.environ.pop('PYTHONBREAKPOINT', None)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
async def open_root_actor(
|
async def open_root_actor(
|
||||||
|
*,
|
||||||
|
# defaults are above
|
||||||
|
registry_addrs: list[UnwrappedAddress]|None = None,
|
||||||
|
|
||||||
# defaults are above
|
# defaults are above
|
||||||
arbiter_addr: Optional[tuple[str, int]] = (
|
arbiter_addr: tuple[UnwrappedAddress]|None = None,
|
||||||
_default_arbiter_host,
|
|
||||||
_default_arbiter_port,
|
|
||||||
),
|
|
||||||
|
|
||||||
name: Optional[str] = 'root',
|
enable_transports: list[
|
||||||
|
# TODO, this should eventually be the pairs as
|
||||||
|
# defined by (codec, proto) as on `MsgTransport.
|
||||||
|
_state.TransportProtocolKey,
|
||||||
|
]|None = None,
|
||||||
|
|
||||||
|
name: str|None = 'root',
|
||||||
|
|
||||||
# either the `multiprocessing` start method:
|
# either the `multiprocessing` start method:
|
||||||
# https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
# https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
||||||
# OR `trio` (the new default).
|
# OR `trio` (the new default).
|
||||||
start_method: Optional[_spawn.SpawnMethodKey] = None,
|
start_method: _spawn.SpawnMethodKey|None = None,
|
||||||
|
|
||||||
# enables the multi-process debugger support
|
# enables the multi-process debugger support
|
||||||
debug_mode: bool = False,
|
debug_mode: bool = False,
|
||||||
|
maybe_enable_greenback: bool = False, # `.pause_from_sync()/breakpoint()` support
|
||||||
|
# ^XXX NOTE^ the perf implications of use,
|
||||||
|
# https://greenback.readthedocs.io/en/latest/principle.html#performance
|
||||||
|
enable_stack_on_sig: bool = False,
|
||||||
|
|
||||||
# internal logging
|
# internal logging
|
||||||
loglevel: Optional[str] = None,
|
loglevel: str|None = None,
|
||||||
|
|
||||||
enable_modules: Optional[list] = None,
|
enable_modules: list|None = None,
|
||||||
rpc_module_paths: Optional[list] = None,
|
rpc_module_paths: list|None = None,
|
||||||
|
|
||||||
) -> typing.Any:
|
# NOTE: allow caller to ensure that only one registry exists
|
||||||
|
# and that this call creates it.
|
||||||
|
ensure_registry: bool = False,
|
||||||
|
|
||||||
|
hide_tb: bool = True,
|
||||||
|
|
||||||
|
# XXX, proxied directly to `.devx.debug._maybe_enter_pm()`
|
||||||
|
# for REPL-entry logic.
|
||||||
|
debug_filter: Callable[
|
||||||
|
[BaseException|BaseExceptionGroup],
|
||||||
|
bool,
|
||||||
|
] = lambda err: not is_multi_cancelled(err),
|
||||||
|
|
||||||
|
# TODO, a way for actors to augment passing derived
|
||||||
|
# read-only state to sublayers?
|
||||||
|
# extra_rt_vars: dict|None = None,
|
||||||
|
|
||||||
|
) -> _runtime.Actor:
|
||||||
'''
|
'''
|
||||||
Runtime init entry point for ``tractor``.
|
Initialize the `tractor` runtime by starting a "root actor" in
|
||||||
|
a parent-most Python process.
|
||||||
|
|
||||||
|
All (disjoint) actor-process-trees-as-programs are created via
|
||||||
|
this entrypoint.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# Override the global debugger hook to make it play nice with
|
# XXX NEVER allow nested actor-trees!
|
||||||
# ``trio``, see:
|
if already_actor := _state.current_actor(
|
||||||
# https://github.com/python-trio/trio/issues/1155#issuecomment-742964018
|
err_on_no_runtime=False,
|
||||||
os.environ['PYTHONBREAKPOINT'] = 'tractor._debug._set_trace'
|
):
|
||||||
|
rtvs: dict[str, Any] = _state._runtime_vars
|
||||||
# attempt to retreive ``trio``'s sigint handler and stash it
|
root_mailbox: list[str, int] = rtvs['_root_mailbox']
|
||||||
# on our debugger lock state.
|
registry_addrs: list[list[str, int]] = rtvs['_registry_addrs']
|
||||||
_debug.Lock._trio_handler = signal.getsignal(signal.SIGINT)
|
raise RuntimeFailure(
|
||||||
|
f'A current actor already exists !?\n'
|
||||||
# mark top most level process as root actor
|
f'({already_actor}\n'
|
||||||
_state._runtime_vars['_is_root'] = True
|
f'\n'
|
||||||
|
f'You can NOT open a second root actor from within '
|
||||||
# caps based rpc list
|
f'an existing tree and the current root of this '
|
||||||
enable_modules = enable_modules or []
|
f'already exists !!\n'
|
||||||
|
f'\n'
|
||||||
if rpc_module_paths:
|
f'_root_mailbox: {root_mailbox!r}\n'
|
||||||
warnings.warn(
|
f'_registry_addrs: {registry_addrs!r}\n'
|
||||||
"`rpc_module_paths` is now deprecated, use "
|
|
||||||
" `enable_modules` instead.",
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
enable_modules.extend(rpc_module_paths)
|
|
||||||
|
|
||||||
if start_method is not None:
|
|
||||||
_spawn.try_set_start_method(start_method)
|
|
||||||
|
|
||||||
arbiter_addr = (host, port) = arbiter_addr or (
|
|
||||||
_default_arbiter_host,
|
|
||||||
_default_arbiter_port,
|
|
||||||
)
|
|
||||||
|
|
||||||
loglevel = (loglevel or log._default_loglevel).upper()
|
|
||||||
|
|
||||||
if debug_mode and _spawn._spawn_method == 'trio':
|
|
||||||
_state._runtime_vars['_debug_mode'] = True
|
|
||||||
|
|
||||||
# expose internal debug module to every actor allowing
|
|
||||||
# for use of ``await tractor.breakpoint()``
|
|
||||||
enable_modules.append('tractor._debug')
|
|
||||||
|
|
||||||
# if debug mode get's enabled *at least* use that level of
|
|
||||||
# logging for some informative console prompts.
|
|
||||||
if (
|
|
||||||
logging.getLevelName(
|
|
||||||
# lul, need the upper case for the -> int map?
|
|
||||||
# sweet "dynamic function behaviour" stdlib...
|
|
||||||
loglevel,
|
|
||||||
) > logging.getLevelName('PDB')
|
|
||||||
):
|
|
||||||
loglevel = 'PDB'
|
|
||||||
|
|
||||||
elif debug_mode:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Debug mode is only supported for the `trio` backend!"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
log.get_console_log(loglevel)
|
async with maybe_block_bp(
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
maybe_enable_greenback=maybe_enable_greenback,
|
||||||
|
):
|
||||||
|
if enable_transports is None:
|
||||||
|
enable_transports: list[str] = _state.current_ipc_protos()
|
||||||
|
else:
|
||||||
|
_state._runtime_vars['_enable_tpts'] = enable_transports
|
||||||
|
|
||||||
try:
|
# TODO! support multi-tpts per actor!
|
||||||
# make a temporary connection to see if an arbiter exists,
|
# Bo
|
||||||
# if one can't be made quickly we assume none exists.
|
if not len(enable_transports) == 1:
|
||||||
arbiter_found = False
|
raise RuntimeError(
|
||||||
|
f'No multi-tpt support yet!\n'
|
||||||
# TODO: this connect-and-bail forces us to have to carefully
|
f'enable_transports={enable_transports!r}\n'
|
||||||
# rewrap TCP 104-connection-reset errors as EOF so as to avoid
|
|
||||||
# propagating cancel-causing errors to the channel-msg loop
|
|
||||||
# machinery. Likely it would be better to eventually have
|
|
||||||
# a "discovery" protocol with basic handshake instead.
|
|
||||||
with trio.move_on_after(1):
|
|
||||||
async with _connect_chan(host, port):
|
|
||||||
arbiter_found = True
|
|
||||||
|
|
||||||
except OSError:
|
|
||||||
# TODO: make this a "discovery" log level?
|
|
||||||
logger.warning(f"No actor could be found @ {host}:{port}")
|
|
||||||
|
|
||||||
# create a local actor and start up its main routine/task
|
|
||||||
if arbiter_found:
|
|
||||||
|
|
||||||
# we were able to connect to an arbiter
|
|
||||||
logger.info(f"Arbiter seems to exist @ {host}:{port}")
|
|
||||||
|
|
||||||
actor = Actor(
|
|
||||||
name or 'anonymous',
|
|
||||||
arbiter_addr=arbiter_addr,
|
|
||||||
loglevel=loglevel,
|
|
||||||
enable_modules=enable_modules,
|
|
||||||
)
|
|
||||||
host, port = (host, 0)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# start this local actor as the arbiter (aka a regular actor who
|
|
||||||
# manages the local registry of "mailboxes")
|
|
||||||
|
|
||||||
# Note that if the current actor is the arbiter it is desirable
|
|
||||||
# for it to stay up indefinitely until a re-election process has
|
|
||||||
# taken place - which is not implemented yet FYI).
|
|
||||||
|
|
||||||
actor = Arbiter(
|
|
||||||
name or 'arbiter',
|
|
||||||
arbiter_addr=arbiter_addr,
|
|
||||||
loglevel=loglevel,
|
|
||||||
enable_modules=enable_modules,
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# assign process-local actor
|
|
||||||
_state._current_actor = actor
|
|
||||||
|
|
||||||
# start local channel-server and fake the portal API
|
|
||||||
# NOTE: this won't block since we provide the nursery
|
|
||||||
logger.info(f"Starting local {actor} @ {host}:{port}")
|
|
||||||
|
|
||||||
# start the actor runtime in a new task
|
|
||||||
async with trio.open_nursery() as nursery:
|
|
||||||
|
|
||||||
# ``_runtime.async_main()`` creates an internal nursery and
|
|
||||||
# thus blocks here until the entire underlying actor tree has
|
|
||||||
# terminated thereby conducting structured concurrency.
|
|
||||||
|
|
||||||
await nursery.start(
|
|
||||||
partial(
|
|
||||||
async_main,
|
|
||||||
actor,
|
|
||||||
accept_addr=(host, port),
|
|
||||||
parent_addr=None
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
_frame_stack.hide_runtime_frames()
|
||||||
|
__tracebackhide__: bool = hide_tb
|
||||||
|
|
||||||
|
# attempt to retreive ``trio``'s sigint handler and stash it
|
||||||
|
# on our debugger lock state.
|
||||||
|
debug.DebugStatus._trio_handler = signal.getsignal(signal.SIGINT)
|
||||||
|
|
||||||
|
# mark top most level process as root actor
|
||||||
|
_state._runtime_vars['_is_root'] = True
|
||||||
|
|
||||||
|
# caps based rpc list
|
||||||
|
enable_modules = (
|
||||||
|
enable_modules
|
||||||
|
or
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
|
||||||
|
if rpc_module_paths:
|
||||||
|
warnings.warn(
|
||||||
|
"`rpc_module_paths` is now deprecated, use "
|
||||||
|
" `enable_modules` instead.",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
enable_modules.extend(rpc_module_paths)
|
||||||
|
|
||||||
|
if start_method is not None:
|
||||||
|
_spawn.try_set_start_method(start_method)
|
||||||
|
|
||||||
|
# TODO! remove this ASAP!
|
||||||
|
if arbiter_addr is not None:
|
||||||
|
warnings.warn(
|
||||||
|
'`arbiter_addr` is now deprecated\n'
|
||||||
|
'Use `registry_addrs: list[tuple]` instead..',
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
uw_reg_addrs = [arbiter_addr]
|
||||||
|
|
||||||
|
uw_reg_addrs = registry_addrs
|
||||||
|
if not uw_reg_addrs:
|
||||||
|
uw_reg_addrs: list[UnwrappedAddress] = default_lo_addrs(
|
||||||
|
enable_transports
|
||||||
|
)
|
||||||
|
|
||||||
|
# must exist by now since all below code is dependent
|
||||||
|
assert uw_reg_addrs
|
||||||
|
registry_addrs: list[Address] = [
|
||||||
|
wrap_address(uw_addr)
|
||||||
|
for uw_addr in uw_reg_addrs
|
||||||
|
]
|
||||||
|
|
||||||
|
loglevel = (
|
||||||
|
loglevel
|
||||||
|
or log._default_loglevel
|
||||||
|
).upper()
|
||||||
|
|
||||||
|
if (
|
||||||
|
debug_mode
|
||||||
|
and
|
||||||
|
_spawn._spawn_method == 'trio'
|
||||||
|
):
|
||||||
|
_state._runtime_vars['_debug_mode'] = True
|
||||||
|
|
||||||
|
# expose internal debug module to every actor allowing for
|
||||||
|
# use of ``await tractor.pause()``
|
||||||
|
enable_modules.append('tractor.devx.debug._tty_lock')
|
||||||
|
|
||||||
|
# if debug mode get's enabled *at least* use that level of
|
||||||
|
# logging for some informative console prompts.
|
||||||
|
if (
|
||||||
|
logging.getLevelName(
|
||||||
|
# lul, need the upper case for the -> int map?
|
||||||
|
# sweet "dynamic function behaviour" stdlib...
|
||||||
|
loglevel,
|
||||||
|
) > logging.getLevelName('PDB')
|
||||||
|
):
|
||||||
|
loglevel = 'PDB'
|
||||||
|
|
||||||
|
|
||||||
|
elif debug_mode:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Debug mode is only supported for the `trio` backend!"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert loglevel
|
||||||
|
_log = log.get_console_log(loglevel)
|
||||||
|
assert _log
|
||||||
|
|
||||||
|
# TODO: factor this into `.devx._stackscope`!!
|
||||||
|
if (
|
||||||
|
debug_mode
|
||||||
|
and
|
||||||
|
enable_stack_on_sig
|
||||||
|
):
|
||||||
|
from .devx._stackscope import enable_stack_on_sig
|
||||||
|
enable_stack_on_sig()
|
||||||
|
|
||||||
|
# closed into below ping task-func
|
||||||
|
ponged_addrs: list[Address] = []
|
||||||
|
|
||||||
|
async def ping_tpt_socket(
|
||||||
|
addr: Address,
|
||||||
|
timeout: float = 1,
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Attempt temporary connection to see if a registry is
|
||||||
|
listening at the requested address by a tranport layer
|
||||||
|
ping.
|
||||||
|
|
||||||
|
If a connection can't be made quickly we assume none no
|
||||||
|
server is listening at that addr.
|
||||||
|
|
||||||
|
'''
|
||||||
try:
|
try:
|
||||||
yield actor
|
# TODO: this connect-and-bail forces us to have to
|
||||||
|
# carefully rewrap TCP 104-connection-reset errors as
|
||||||
|
# EOF so as to avoid propagating cancel-causing errors
|
||||||
|
# to the channel-msg loop machinery. Likely it would
|
||||||
|
# be better to eventually have a "discovery" protocol
|
||||||
|
# with basic handshake instead?
|
||||||
|
with trio.move_on_after(timeout):
|
||||||
|
async with _connect_chan(addr.unwrap()):
|
||||||
|
ponged_addrs.append(addr)
|
||||||
|
|
||||||
except (
|
except OSError:
|
||||||
Exception,
|
# ?TODO, make this a "discovery" log level?
|
||||||
BaseExceptionGroup,
|
logger.info(
|
||||||
) as err:
|
f'No root-actor registry found @ {addr!r}\n'
|
||||||
|
)
|
||||||
|
|
||||||
entered = await _debug._maybe_enter_pm(err)
|
# !TODO, this is basically just another (abstract)
|
||||||
|
# happy-eyeballs, so we should try for formalize it somewhere
|
||||||
|
# in a `.[_]discovery` ya?
|
||||||
|
#
|
||||||
|
async with trio.open_nursery() as tn:
|
||||||
|
for uw_addr in uw_reg_addrs:
|
||||||
|
addr: Address = wrap_address(uw_addr)
|
||||||
|
tn.start_soon(
|
||||||
|
ping_tpt_socket,
|
||||||
|
addr,
|
||||||
|
)
|
||||||
|
|
||||||
if not entered and not is_multi_cancelled(err):
|
trans_bind_addrs: list[UnwrappedAddress] = []
|
||||||
logger.exception("Root actor crashed:")
|
|
||||||
|
|
||||||
# always re-raise
|
# Create a new local root-actor instance which IS NOT THE
|
||||||
raise
|
# REGISTRAR
|
||||||
|
if ponged_addrs:
|
||||||
|
if ensure_registry:
|
||||||
|
raise RuntimeError(
|
||||||
|
f'Failed to open `{name}`@{ponged_addrs}: '
|
||||||
|
'registry socket(s) already bound'
|
||||||
|
)
|
||||||
|
|
||||||
finally:
|
# we were able to connect to an arbiter
|
||||||
# NOTE: not sure if we'll ever need this but it's
|
logger.info(
|
||||||
# possibly better for even more determinism?
|
f'Registry(s) seem(s) to exist @ {ponged_addrs}'
|
||||||
# logger.cancel(
|
)
|
||||||
# f'Waiting on {len(nurseries)} nurseries in root..')
|
|
||||||
# nurseries = actor._actoruid2nursery.values()
|
|
||||||
# async with trio.open_nursery() as tempn:
|
|
||||||
# for an in nurseries:
|
|
||||||
# tempn.start_soon(an.exited.wait)
|
|
||||||
|
|
||||||
logger.cancel("Shutting down root actor")
|
actor = _runtime.Actor(
|
||||||
await actor.cancel()
|
name=name or 'anonymous',
|
||||||
finally:
|
uuid=mk_uuid(),
|
||||||
_state._current_actor = None
|
registry_addrs=ponged_addrs,
|
||||||
logger.runtime("Root actor terminated")
|
loglevel=loglevel,
|
||||||
|
enable_modules=enable_modules,
|
||||||
|
)
|
||||||
|
# **DO NOT** use the registry_addrs as the
|
||||||
|
# ipc-transport-server's bind-addrs as this is
|
||||||
|
# a new NON-registrar, ROOT-actor.
|
||||||
|
#
|
||||||
|
# XXX INSTEAD, bind random addrs using the same tpt
|
||||||
|
# proto.
|
||||||
|
for addr in ponged_addrs:
|
||||||
|
trans_bind_addrs.append(
|
||||||
|
addr.get_random(
|
||||||
|
bindspace=addr.bindspace,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Start this local actor as the "registrar", aka a regular
|
||||||
|
# actor who manages the local registry of "mailboxes" of
|
||||||
|
# other process-tree-local sub-actors.
|
||||||
|
else:
|
||||||
|
# NOTE that if the current actor IS THE REGISTAR, the
|
||||||
|
# following init steps are taken:
|
||||||
|
# - the tranport layer server is bound to each addr
|
||||||
|
# pair defined in provided registry_addrs, or the default.
|
||||||
|
trans_bind_addrs = uw_reg_addrs
|
||||||
|
|
||||||
|
# - it is normally desirable for any registrar to stay up
|
||||||
|
# indefinitely until either all registered (child/sub)
|
||||||
|
# actors are terminated (via SC supervision) or,
|
||||||
|
# a re-election process has taken place.
|
||||||
|
# NOTE: all of ^ which is not implemented yet - see:
|
||||||
|
# https://github.com/goodboy/tractor/issues/216
|
||||||
|
# https://github.com/goodboy/tractor/pull/348
|
||||||
|
# https://github.com/goodboy/tractor/issues/296
|
||||||
|
|
||||||
|
# TODO: rename as `RootActor` or is that even necessary?
|
||||||
|
actor = _runtime.Arbiter(
|
||||||
|
name=name or 'registrar',
|
||||||
|
uuid=mk_uuid(),
|
||||||
|
registry_addrs=registry_addrs,
|
||||||
|
loglevel=loglevel,
|
||||||
|
enable_modules=enable_modules,
|
||||||
|
)
|
||||||
|
# XXX, in case the root actor runtime was actually run from
|
||||||
|
# `tractor.to_asyncio.run_as_asyncio_guest()` and NOt
|
||||||
|
# `.trio.run()`.
|
||||||
|
actor._infected_aio = _state._runtime_vars['_is_infected_aio']
|
||||||
|
|
||||||
|
# NOTE, only set the loopback addr for the
|
||||||
|
# process-tree-global "root" mailbox since all sub-actors
|
||||||
|
# should be able to speak to their root actor over that
|
||||||
|
# channel.
|
||||||
|
raddrs: list[Address] = _state._runtime_vars['_root_addrs']
|
||||||
|
raddrs.extend(trans_bind_addrs)
|
||||||
|
# TODO, remove once we have also removed all usage;
|
||||||
|
# eventually all (root-)registry apis should expect > 1 addr.
|
||||||
|
_state._runtime_vars['_root_mailbox'] = raddrs[0]
|
||||||
|
|
||||||
|
# Start up main task set via core actor-runtime nurseries.
|
||||||
|
try:
|
||||||
|
# assign process-local actor
|
||||||
|
_state._current_actor = actor
|
||||||
|
|
||||||
|
# start local channel-server and fake the portal API
|
||||||
|
# NOTE: this won't block since we provide the nursery
|
||||||
|
report: str = f'Starting actor-runtime for {actor.aid.reprol()!r}\n'
|
||||||
|
if reg_addrs := actor.registry_addrs:
|
||||||
|
report += (
|
||||||
|
'-> Opening new registry @ '
|
||||||
|
+
|
||||||
|
'\n'.join(
|
||||||
|
f'{addr}' for addr in reg_addrs
|
||||||
|
)
|
||||||
|
)
|
||||||
|
logger.info(f'{report}\n')
|
||||||
|
|
||||||
|
# start runtime in a bg sub-task, yield to caller.
|
||||||
|
async with (
|
||||||
|
collapse_eg(),
|
||||||
|
trio.open_nursery() as root_tn,
|
||||||
|
|
||||||
|
# ?TODO? finally-footgun below?
|
||||||
|
# -> see note on why shielding.
|
||||||
|
# maybe_raise_from_masking_exc(),
|
||||||
|
):
|
||||||
|
actor._root_tn = root_tn
|
||||||
|
# `_runtime.async_main()` creates an internal nursery
|
||||||
|
# and blocks here until any underlying actor(-process)
|
||||||
|
# tree has terminated thereby conducting so called
|
||||||
|
# "end-to-end" structured concurrency throughout an
|
||||||
|
# entire hierarchical python sub-process set; all
|
||||||
|
# "actor runtime" primitives are SC-compat and thus all
|
||||||
|
# transitively spawned actors/processes must be as
|
||||||
|
# well.
|
||||||
|
await root_tn.start(
|
||||||
|
partial(
|
||||||
|
_runtime.async_main,
|
||||||
|
actor,
|
||||||
|
accept_addrs=trans_bind_addrs,
|
||||||
|
parent_addr=None
|
||||||
|
)
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
yield actor
|
||||||
|
except (
|
||||||
|
Exception,
|
||||||
|
BaseExceptionGroup,
|
||||||
|
) as err:
|
||||||
|
|
||||||
|
# TODO, in beginning to handle the subsubactor with
|
||||||
|
# crashed grandparent cases..
|
||||||
|
#
|
||||||
|
# was_locked: bool = await debug.maybe_wait_for_debugger(
|
||||||
|
# child_in_debug=True,
|
||||||
|
# )
|
||||||
|
# XXX NOTE XXX see equiv note inside
|
||||||
|
# `._runtime.Actor._stream_handler()` where in the
|
||||||
|
# non-root or root-that-opened-this-mahually case we
|
||||||
|
# wait for the local actor-nursery to exit before
|
||||||
|
# exiting the transport channel handler.
|
||||||
|
entered: bool = await debug._maybe_enter_pm(
|
||||||
|
err,
|
||||||
|
api_frame=inspect.currentframe(),
|
||||||
|
debug_filter=debug_filter,
|
||||||
|
|
||||||
|
# XXX NOTE, required to debug root-actor
|
||||||
|
# crashes under cancellation conditions; so
|
||||||
|
# most of them!
|
||||||
|
shield=root_tn.cancel_scope.cancel_called,
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
not entered
|
||||||
|
and
|
||||||
|
not is_multi_cancelled(
|
||||||
|
err,
|
||||||
|
)
|
||||||
|
):
|
||||||
|
logger.exception(
|
||||||
|
'Root actor crashed\n'
|
||||||
|
f'>x)\n'
|
||||||
|
f' |_{actor}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
# ALWAYS re-raise any error bubbled up from the
|
||||||
|
# runtime!
|
||||||
|
raise
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# NOTE/TODO?, not sure if we'll ever need this but it's
|
||||||
|
# possibly better for even more determinism?
|
||||||
|
# logger.cancel(
|
||||||
|
# f'Waiting on {len(nurseries)} nurseries in root..')
|
||||||
|
# nurseries = actor._actoruid2nursery.values()
|
||||||
|
# async with trio.open_nursery() as tempn:
|
||||||
|
# for an in nurseries:
|
||||||
|
# tempn.start_soon(an.exited.wait)
|
||||||
|
|
||||||
|
op_nested_actor_repr: str = _pformat.nest_from_op(
|
||||||
|
input_op='>) ',
|
||||||
|
text=actor.pformat(),
|
||||||
|
nest_prefix='|_',
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
f'Closing down root actor\n'
|
||||||
|
f'{op_nested_actor_repr}'
|
||||||
|
)
|
||||||
|
# XXX, THIS IS A *finally-footgun*!
|
||||||
|
# (also mentioned in with-block above)
|
||||||
|
# -> though already shields iternally it can
|
||||||
|
# taskc here and mask underlying errors raised in
|
||||||
|
# the try-block above?
|
||||||
|
with trio.CancelScope(shield=True):
|
||||||
|
await actor.cancel(None) # self cancel
|
||||||
|
finally:
|
||||||
|
# revert all process-global runtime state
|
||||||
|
if (
|
||||||
|
debug_mode
|
||||||
|
and
|
||||||
|
_spawn._spawn_method == 'trio'
|
||||||
|
):
|
||||||
|
_state._runtime_vars['_debug_mode'] = False
|
||||||
|
|
||||||
|
_state._current_actor = None
|
||||||
|
_state._last_actor_terminated = actor
|
||||||
|
|
||||||
|
sclang_repr: str = _pformat.nest_from_op(
|
||||||
|
input_op=')>',
|
||||||
|
text=actor.pformat(),
|
||||||
|
nest_prefix='|_',
|
||||||
|
nest_indent=1,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f'Root actor terminated\n'
|
||||||
|
f'{sclang_repr}'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def run_daemon(
|
def run_daemon(
|
||||||
enable_modules: list[str],
|
enable_modules: list[str],
|
||||||
|
|
||||||
# runtime kwargs
|
# runtime kwargs
|
||||||
name: Optional[str] = 'root',
|
name: str | None = 'root',
|
||||||
arbiter_addr: tuple[str, int] = (
|
registry_addrs: list[UnwrappedAddress]|None = None,
|
||||||
_default_arbiter_host,
|
|
||||||
_default_arbiter_port,
|
|
||||||
),
|
|
||||||
|
|
||||||
start_method: Optional[str] = None,
|
start_method: str | None = None,
|
||||||
debug_mode: bool = False,
|
debug_mode: bool = False,
|
||||||
|
|
||||||
|
# TODO, support `infected_aio=True` mode by,
|
||||||
|
# - calling the appropriate entrypoint-func from `.to_asyncio`
|
||||||
|
# - maybe init-ing `greenback` as done above in
|
||||||
|
# `open_root_actor()`.
|
||||||
|
|
||||||
**kwargs
|
**kwargs
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
Spawn daemon actor which will respond to RPC; the main task simply
|
Spawn a root (daemon) actor which will respond to RPC; the main
|
||||||
starts the runtime and then sleeps forever.
|
task simply starts the runtime and then blocks via embedded
|
||||||
|
`trio.sleep_forever()`.
|
||||||
|
|
||||||
This is a very minimal convenience wrapper around starting
|
This is a very minimal convenience wrapper around starting
|
||||||
a "run-until-cancelled" root actor which can be started with a set
|
a "run-until-cancelled" root actor which can be started with a set
|
||||||
|
|
@ -277,9 +633,8 @@ def run_daemon(
|
||||||
importlib.import_module(path)
|
importlib.import_module(path)
|
||||||
|
|
||||||
async def _main():
|
async def _main():
|
||||||
|
|
||||||
async with open_root_actor(
|
async with open_root_actor(
|
||||||
arbiter_addr=arbiter_addr,
|
registry_addrs=registry_addrs,
|
||||||
name=name,
|
name=name,
|
||||||
start_method=start_method,
|
start_method=start_method,
|
||||||
debug_mode=debug_mode,
|
debug_mode=debug_mode,
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load Diff
2873
tractor/_runtime.py
2873
tractor/_runtime.py
File diff suppressed because it is too large
Load Diff
|
|
@ -19,48 +19,57 @@ Machinery for actor process spawning using multiple backends.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
import multiprocessing as mp
|
||||||
import sys
|
import sys
|
||||||
import platform
|
import platform
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
|
Awaitable,
|
||||||
Literal,
|
Literal,
|
||||||
Optional,
|
|
||||||
Callable,
|
Callable,
|
||||||
TypeVar,
|
TypeVar,
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
)
|
)
|
||||||
from collections.abc import Awaitable
|
|
||||||
|
|
||||||
from exceptiongroup import BaseExceptionGroup
|
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio import TaskStatus
|
||||||
|
|
||||||
from ._debug import (
|
from .devx import (
|
||||||
maybe_wait_for_debugger,
|
debug,
|
||||||
acquire_debug_lock,
|
pformat as _pformat
|
||||||
)
|
)
|
||||||
from ._state import (
|
from tractor._state import (
|
||||||
current_actor,
|
current_actor,
|
||||||
is_main_process,
|
is_main_process,
|
||||||
is_root_process,
|
is_root_process,
|
||||||
debug_mode,
|
debug_mode,
|
||||||
|
_runtime_vars,
|
||||||
|
)
|
||||||
|
from tractor.log import get_logger
|
||||||
|
from tractor._addr import UnwrappedAddress
|
||||||
|
from tractor._portal import Portal
|
||||||
|
from tractor._runtime import Actor
|
||||||
|
from tractor._entry import _mp_main
|
||||||
|
from tractor._exceptions import ActorFailure
|
||||||
|
from tractor.msg import (
|
||||||
|
types as msgtypes,
|
||||||
|
pretty_struct,
|
||||||
)
|
)
|
||||||
from .log import get_logger
|
|
||||||
from ._portal import Portal
|
|
||||||
from ._runtime import Actor
|
|
||||||
from ._entry import _mp_main
|
|
||||||
from ._exceptions import ActorFailure
|
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
from ipc import (
|
||||||
|
_server,
|
||||||
|
Channel,
|
||||||
|
)
|
||||||
from ._supervise import ActorNursery
|
from ._supervise import ActorNursery
|
||||||
import multiprocessing as mp
|
|
||||||
ProcessType = TypeVar('ProcessType', mp.Process, trio.Process)
|
ProcessType = TypeVar('ProcessType', mp.Process, trio.Process)
|
||||||
|
|
||||||
|
|
||||||
log = get_logger('tractor')
|
log = get_logger('tractor')
|
||||||
|
|
||||||
# placeholder for an mp start context if so using that backend
|
# placeholder for an mp start context if so using that backend
|
||||||
_ctx: Optional[mp.context.BaseContext] = None
|
_ctx: mp.context.BaseContext | None = None
|
||||||
SpawnMethodKey = Literal[
|
SpawnMethodKey = Literal[
|
||||||
'trio', # supported on all platforms
|
'trio', # supported on all platforms
|
||||||
'mp_spawn',
|
'mp_spawn',
|
||||||
|
|
@ -71,7 +80,6 @@ _spawn_method: SpawnMethodKey = 'trio'
|
||||||
|
|
||||||
if platform.system() == 'Windows':
|
if platform.system() == 'Windows':
|
||||||
|
|
||||||
import multiprocessing as mp
|
|
||||||
_ctx = mp.get_context("spawn")
|
_ctx = mp.get_context("spawn")
|
||||||
|
|
||||||
async def proc_waiter(proc: mp.Process) -> None:
|
async def proc_waiter(proc: mp.Process) -> None:
|
||||||
|
|
@ -86,7 +94,7 @@ else:
|
||||||
def try_set_start_method(
|
def try_set_start_method(
|
||||||
key: SpawnMethodKey
|
key: SpawnMethodKey
|
||||||
|
|
||||||
) -> Optional[mp.context.BaseContext]:
|
) -> mp.context.BaseContext | None:
|
||||||
'''
|
'''
|
||||||
Attempt to set the method for process starting, aka the "actor
|
Attempt to set the method for process starting, aka the "actor
|
||||||
spawning backend".
|
spawning backend".
|
||||||
|
|
@ -142,11 +150,13 @@ async def exhaust_portal(
|
||||||
'''
|
'''
|
||||||
__tracebackhide__ = True
|
__tracebackhide__ = True
|
||||||
try:
|
try:
|
||||||
log.debug(f"Waiting on final result from {actor.uid}")
|
log.debug(
|
||||||
|
f'Waiting on final result from {actor.uid}'
|
||||||
|
)
|
||||||
|
|
||||||
# XXX: streams should never be reaped here since they should
|
# XXX: streams should never be reaped here since they should
|
||||||
# always be established and shutdown using a context manager api
|
# always be established and shutdown using a context manager api
|
||||||
final = await portal.result()
|
final: Any = await portal.wait_for_result()
|
||||||
|
|
||||||
except (
|
except (
|
||||||
Exception,
|
Exception,
|
||||||
|
|
@ -154,13 +164,23 @@ async def exhaust_portal(
|
||||||
) as err:
|
) as err:
|
||||||
# we reraise in the parent task via a ``BaseExceptionGroup``
|
# we reraise in the parent task via a ``BaseExceptionGroup``
|
||||||
return err
|
return err
|
||||||
|
|
||||||
except trio.Cancelled as err:
|
except trio.Cancelled as err:
|
||||||
# lol, of course we need this too ;P
|
# lol, of course we need this too ;P
|
||||||
# TODO: merge with above?
|
# TODO: merge with above?
|
||||||
log.warning(f"Cancelled result waiter for {portal.actor.uid}")
|
log.warning(
|
||||||
|
'Cancelled portal result waiter task:\n'
|
||||||
|
f'uid: {portal.channel.aid}\n'
|
||||||
|
f'error: {err}\n'
|
||||||
|
)
|
||||||
return err
|
return err
|
||||||
|
|
||||||
else:
|
else:
|
||||||
log.debug(f"Returning final result: {final}")
|
log.debug(
|
||||||
|
f'Returning final result from portal:\n'
|
||||||
|
f'uid: {portal.channel.aid}\n'
|
||||||
|
f'result: {final}\n'
|
||||||
|
)
|
||||||
return final
|
return final
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -172,55 +192,139 @@ async def cancel_on_completion(
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
Cancel actor gracefully once it's "main" portal's
|
Cancel actor gracefully once its "main" portal's
|
||||||
result arrives.
|
result arrives.
|
||||||
|
|
||||||
Should only be called for actors spawned with `run_in_actor()`.
|
Should only be called for actors spawned via the
|
||||||
|
`Portal.run_in_actor()` API.
|
||||||
|
|
||||||
|
=> and really this API will be deprecated and should be
|
||||||
|
re-implemented as a `.hilevel.one_shot_task_nursery()`..)
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# if this call errors we store the exception for later
|
# if this call errors we store the exception for later
|
||||||
# in ``errors`` which will be reraised inside
|
# in ``errors`` which will be reraised inside
|
||||||
# an exception group and we still send out a cancel request
|
# an exception group and we still send out a cancel request
|
||||||
result = await exhaust_portal(portal, actor)
|
result: Any|Exception = await exhaust_portal(
|
||||||
|
portal,
|
||||||
|
actor,
|
||||||
|
)
|
||||||
if isinstance(result, Exception):
|
if isinstance(result, Exception):
|
||||||
errors[actor.uid] = result
|
errors[actor.uid]: Exception = result
|
||||||
log.warning(
|
log.cancel(
|
||||||
f"Cancelling {portal.channel.uid} after error {result}"
|
'Cancelling subactor runtime due to error:\n\n'
|
||||||
|
f'Portal.cancel_actor() => {portal.channel.uid}\n\n'
|
||||||
|
f'error: {result}\n'
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
log.runtime(
|
log.runtime(
|
||||||
f"Cancelling {portal.channel.uid} gracefully "
|
'Cancelling subactor gracefully:\n\n'
|
||||||
f"after result {result}")
|
f'Portal.cancel_actor() => {portal.channel.uid}\n\n'
|
||||||
|
f'result: {result}\n'
|
||||||
|
)
|
||||||
|
|
||||||
# cancel the process now that we have a final result
|
# cancel the process now that we have a final result
|
||||||
await portal.cancel_actor()
|
await portal.cancel_actor()
|
||||||
|
|
||||||
|
|
||||||
async def do_hard_kill(
|
async def hard_kill(
|
||||||
proc: trio.Process,
|
proc: trio.Process,
|
||||||
terminate_after: int = 3,
|
|
||||||
|
terminate_after: int = 1.6,
|
||||||
|
# NOTE: for mucking with `.pause()`-ing inside the runtime
|
||||||
|
# whilst also hacking on it XD
|
||||||
|
# terminate_after: int = 99999,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
'''
|
||||||
|
Un-gracefully terminate an OS level `trio.Process` after timeout.
|
||||||
|
|
||||||
|
Used in 2 main cases:
|
||||||
|
|
||||||
|
- "unknown remote runtime state": a hanging/stalled actor that
|
||||||
|
isn't responding after sending a (graceful) runtime cancel
|
||||||
|
request via an IPC msg.
|
||||||
|
- "cancelled during spawn": a process who's actor runtime was
|
||||||
|
cancelled before full startup completed (such that
|
||||||
|
cancel-request-handling machinery was never fully
|
||||||
|
initialized) and thus a "cancel request msg" is never going
|
||||||
|
to be handled.
|
||||||
|
|
||||||
|
'''
|
||||||
|
log.cancel(
|
||||||
|
'Terminating sub-proc\n'
|
||||||
|
f'>x)\n'
|
||||||
|
f' |_{proc}\n'
|
||||||
|
)
|
||||||
# NOTE: this timeout used to do nothing since we were shielding
|
# NOTE: this timeout used to do nothing since we were shielding
|
||||||
# the ``.wait()`` inside ``new_proc()`` which will pretty much
|
# the ``.wait()`` inside ``new_proc()`` which will pretty much
|
||||||
# never release until the process exits, now it acts as
|
# never release until the process exits, now it acts as
|
||||||
# a hard-kill time ultimatum.
|
# a hard-kill time ultimatum.
|
||||||
with trio.move_on_after(terminate_after) as cs:
|
with trio.move_on_after(terminate_after) as cs:
|
||||||
|
|
||||||
# NOTE: This ``__aexit__()`` shields internally.
|
# NOTE: code below was copied verbatim from the now deprecated
|
||||||
async with proc: # calls ``trio.Process.aclose()``
|
# (in 0.20.0) ``trio._subrocess.Process.aclose()``, orig doc
|
||||||
log.debug(f"Terminating {proc}")
|
# string:
|
||||||
|
#
|
||||||
|
# Close any pipes we have to the process (both input and output)
|
||||||
|
# and wait for it to exit. If cancelled, kills the process and
|
||||||
|
# waits for it to finish exiting before propagating the
|
||||||
|
# cancellation.
|
||||||
|
#
|
||||||
|
# This code was originally triggred by ``proc.__aexit__()``
|
||||||
|
# but now must be called manually.
|
||||||
|
with trio.CancelScope(shield=True):
|
||||||
|
if proc.stdin is not None:
|
||||||
|
await proc.stdin.aclose()
|
||||||
|
if proc.stdout is not None:
|
||||||
|
await proc.stdout.aclose()
|
||||||
|
if proc.stderr is not None:
|
||||||
|
await proc.stderr.aclose()
|
||||||
|
try:
|
||||||
|
await proc.wait()
|
||||||
|
finally:
|
||||||
|
if proc.returncode is None:
|
||||||
|
proc.kill()
|
||||||
|
with trio.CancelScope(shield=True):
|
||||||
|
await proc.wait()
|
||||||
|
|
||||||
|
# XXX NOTE XXX: zombie squad dispatch:
|
||||||
|
# (should ideally never, but) If we do get here it means
|
||||||
|
# graceful termination of a process failed and we need to
|
||||||
|
# resort to OS level signalling to interrupt and cancel the
|
||||||
|
# (presumably stalled or hung) actor. Since we never allow
|
||||||
|
# zombies (as a feature) we ask the OS to do send in the
|
||||||
|
# removal swad as the last resort.
|
||||||
if cs.cancelled_caught:
|
if cs.cancelled_caught:
|
||||||
# XXX: should pretty much never get here unless we have
|
|
||||||
# to move the bits from ``proc.__aexit__()`` out and
|
# TODO? attempt at intermediary-rent-sub
|
||||||
# into here.
|
# with child in debug lock?
|
||||||
log.critical(f"#ZOMBIE_LORD_IS_HERE: {proc}")
|
# |_https://github.com/goodboy/tractor/issues/320
|
||||||
|
#
|
||||||
|
# if not is_root_process():
|
||||||
|
# log.warning(
|
||||||
|
# 'Attempting to acquire debug-REPL-lock before zombie reap!'
|
||||||
|
# )
|
||||||
|
# with trio.CancelScope(shield=True):
|
||||||
|
# async with debug.acquire_debug_lock(
|
||||||
|
# subactor_uid=current_actor().uid,
|
||||||
|
# ) as _ctx:
|
||||||
|
# log.warning(
|
||||||
|
# 'Acquired debug lock, child ready to be killed ??\n'
|
||||||
|
# )
|
||||||
|
|
||||||
|
# TODO: toss in the skynet-logo face as ascii art?
|
||||||
|
log.critical(
|
||||||
|
# 'Well, the #ZOMBIE_LORD_IS_HERE# to collect\n'
|
||||||
|
'#T-800 deployed to collect zombie B0\n'
|
||||||
|
f'>x)\n'
|
||||||
|
f' |_{proc}\n'
|
||||||
|
)
|
||||||
proc.kill()
|
proc.kill()
|
||||||
|
|
||||||
|
|
||||||
async def soft_wait(
|
async def soft_kill(
|
||||||
|
|
||||||
proc: ProcessType,
|
proc: ProcessType,
|
||||||
wait_func: Callable[
|
wait_func: Callable[
|
||||||
[ProcessType],
|
[ProcessType],
|
||||||
|
|
@ -229,15 +333,42 @@ async def soft_wait(
|
||||||
portal: Portal,
|
portal: Portal,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
# Wait for proc termination but **dont' yet** call
|
'''
|
||||||
# ``trio.Process.__aexit__()`` (it tears down stdio
|
Wait for proc termination but **don't yet** teardown
|
||||||
# which will kill any waiting remote pdb trace).
|
std-streams since it will clobber any ongoing pdb REPL
|
||||||
# This is a "soft" (cancellable) join/reap.
|
session.
|
||||||
uid = portal.channel.uid
|
|
||||||
|
This is our "soft"/graceful, and thus itself also cancellable,
|
||||||
|
join/reap on an actor-runtime-in-process shutdown; it is
|
||||||
|
**not** the same as a "hard kill" via an OS signal (for that
|
||||||
|
see `.hard_kill()`).
|
||||||
|
|
||||||
|
'''
|
||||||
|
chan: Channel = portal.channel
|
||||||
|
peer_aid: msgtypes.Aid = chan.aid
|
||||||
try:
|
try:
|
||||||
log.cancel(f'Soft waiting on actor:\n{uid}')
|
log.cancel(
|
||||||
|
f'Soft killing sub-actor via portal request\n'
|
||||||
|
f'\n'
|
||||||
|
f'c)=> {peer_aid.reprol()}@[{chan.maddr}]\n'
|
||||||
|
f' |_{proc}\n'
|
||||||
|
)
|
||||||
|
# wait on sub-proc to signal termination
|
||||||
await wait_func(proc)
|
await wait_func(proc)
|
||||||
|
|
||||||
except trio.Cancelled:
|
except trio.Cancelled:
|
||||||
|
with trio.CancelScope(shield=True):
|
||||||
|
await debug.maybe_wait_for_debugger(
|
||||||
|
child_in_debug=_runtime_vars.get(
|
||||||
|
'_debug_mode', False
|
||||||
|
),
|
||||||
|
header_msg=(
|
||||||
|
'Delaying `soft_kill()` subproc reaper while debugger locked..\n'
|
||||||
|
),
|
||||||
|
# TODO: need a diff value then default?
|
||||||
|
# poll_steps=9999999,
|
||||||
|
)
|
||||||
|
|
||||||
# if cancelled during a soft wait, cancel the child
|
# if cancelled during a soft wait, cancel the child
|
||||||
# actor before entering the hard reap sequence
|
# actor before entering the hard reap sequence
|
||||||
# below. This means we try to do a graceful teardown
|
# below. This means we try to do a graceful teardown
|
||||||
|
|
@ -248,20 +379,29 @@ async def soft_wait(
|
||||||
|
|
||||||
async def cancel_on_proc_deth():
|
async def cancel_on_proc_deth():
|
||||||
'''
|
'''
|
||||||
Cancel the actor cancel request if we detect that
|
"Cancel-the-cancel" request: if we detect that the
|
||||||
that the process terminated.
|
underlying sub-process exited prior to
|
||||||
|
a `Portal.cancel_actor()` call completing .
|
||||||
|
|
||||||
'''
|
'''
|
||||||
await wait_func(proc)
|
await wait_func(proc)
|
||||||
n.cancel_scope.cancel()
|
n.cancel_scope.cancel()
|
||||||
|
|
||||||
|
# start a task to wait on the termination of the
|
||||||
|
# process by itself waiting on a (caller provided) wait
|
||||||
|
# function which should unblock when the target process
|
||||||
|
# has terminated.
|
||||||
n.start_soon(cancel_on_proc_deth)
|
n.start_soon(cancel_on_proc_deth)
|
||||||
|
|
||||||
|
# send the actor-runtime a cancel request.
|
||||||
await portal.cancel_actor()
|
await portal.cancel_actor()
|
||||||
|
|
||||||
if proc.poll() is None: # type: ignore
|
if proc.poll() is None: # type: ignore
|
||||||
log.warning(
|
log.warning(
|
||||||
f'Process still alive after cancel request:\n{uid}')
|
'Subactor still alive after cancel request?\n\n'
|
||||||
|
f'uid: {peer_aid}\n'
|
||||||
|
f'|_{proc}\n'
|
||||||
|
)
|
||||||
n.cancel_scope.cancel()
|
n.cancel_scope.cancel()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
@ -273,19 +413,20 @@ async def new_proc(
|
||||||
errors: dict[tuple[str, str], Exception],
|
errors: dict[tuple[str, str], Exception],
|
||||||
|
|
||||||
# passed through to actor main
|
# passed through to actor main
|
||||||
bind_addr: tuple[str, int],
|
bind_addrs: list[UnwrappedAddress],
|
||||||
parent_addr: tuple[str, int],
|
parent_addr: UnwrappedAddress,
|
||||||
_runtime_vars: dict[str, Any], # serialized and sent to _child
|
_runtime_vars: dict[str, Any], # serialized and sent to _child
|
||||||
|
|
||||||
*,
|
*,
|
||||||
|
|
||||||
infect_asyncio: bool = False,
|
infect_asyncio: bool = False,
|
||||||
task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED
|
task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED,
|
||||||
|
proc_kwargs: dict[str, any] = {}
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# lookup backend spawning target
|
# lookup backend spawning target
|
||||||
target = _methods[_spawn_method]
|
target: Callable = _methods[_spawn_method]
|
||||||
|
|
||||||
# mark the new actor with the global spawn method
|
# mark the new actor with the global spawn method
|
||||||
subactor._spawn_method = _spawn_method
|
subactor._spawn_method = _spawn_method
|
||||||
|
|
@ -295,11 +436,12 @@ async def new_proc(
|
||||||
actor_nursery,
|
actor_nursery,
|
||||||
subactor,
|
subactor,
|
||||||
errors,
|
errors,
|
||||||
bind_addr,
|
bind_addrs,
|
||||||
parent_addr,
|
parent_addr,
|
||||||
_runtime_vars, # run time vars
|
_runtime_vars, # run time vars
|
||||||
infect_asyncio=infect_asyncio,
|
infect_asyncio=infect_asyncio,
|
||||||
task_status=task_status,
|
task_status=task_status,
|
||||||
|
proc_kwargs=proc_kwargs
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -310,12 +452,13 @@ async def trio_proc(
|
||||||
errors: dict[tuple[str, str], Exception],
|
errors: dict[tuple[str, str], Exception],
|
||||||
|
|
||||||
# passed through to actor main
|
# passed through to actor main
|
||||||
bind_addr: tuple[str, int],
|
bind_addrs: list[UnwrappedAddress],
|
||||||
parent_addr: tuple[str, int],
|
parent_addr: UnwrappedAddress,
|
||||||
_runtime_vars: dict[str, Any], # serialized and sent to _child
|
_runtime_vars: dict[str, Any], # serialized and sent to _child
|
||||||
*,
|
*,
|
||||||
infect_asyncio: bool = False,
|
infect_asyncio: bool = False,
|
||||||
task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED
|
task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED,
|
||||||
|
proc_kwargs: dict[str, any] = {}
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
|
|
@ -337,6 +480,9 @@ async def trio_proc(
|
||||||
# the OS; it otherwise can be passed via the parent channel if
|
# the OS; it otherwise can be passed via the parent channel if
|
||||||
# we prefer in the future (for privacy).
|
# we prefer in the future (for privacy).
|
||||||
"--uid",
|
"--uid",
|
||||||
|
# TODO, how to pass this over "wire" encodings like
|
||||||
|
# cmdline args?
|
||||||
|
# -[ ] maybe we can add an `msgtypes.Aid.min_tuple()` ?
|
||||||
str(subactor.uid),
|
str(subactor.uid),
|
||||||
# Address the child must connect to on startup
|
# Address the child must connect to on startup
|
||||||
"--parent_addr",
|
"--parent_addr",
|
||||||
|
|
@ -353,20 +499,23 @@ async def trio_proc(
|
||||||
spawn_cmd.append("--asyncio")
|
spawn_cmd.append("--asyncio")
|
||||||
|
|
||||||
cancelled_during_spawn: bool = False
|
cancelled_during_spawn: bool = False
|
||||||
proc: Optional[trio.Process] = None
|
proc: trio.Process|None = None
|
||||||
|
ipc_server: _server.Server = actor_nursery._actor.ipc_server
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
# TODO: needs ``trio_typing`` patch?
|
proc: trio.Process = await trio.lowlevel.open_process(spawn_cmd, **proc_kwargs)
|
||||||
proc = await trio.lowlevel.open_process( # type: ignore
|
log.runtime(
|
||||||
spawn_cmd)
|
f'Started new child subproc\n'
|
||||||
|
f'(>\n'
|
||||||
log.runtime(f"Started {proc}")
|
f' |_{proc}\n'
|
||||||
|
)
|
||||||
|
|
||||||
# wait for actor to spawn and connect back to us
|
# wait for actor to spawn and connect back to us
|
||||||
# channel should have handshake completed by the
|
# channel should have handshake completed by the
|
||||||
# local actor by the time we get a ref to it
|
# local actor by the time we get a ref to it
|
||||||
event, chan = await actor_nursery._actor.wait_for_peer(
|
event, chan = await ipc_server.wait_for_peer(
|
||||||
subactor.uid)
|
subactor.uid
|
||||||
|
)
|
||||||
|
|
||||||
except trio.Cancelled:
|
except trio.Cancelled:
|
||||||
cancelled_during_spawn = True
|
cancelled_during_spawn = True
|
||||||
|
|
@ -376,10 +525,10 @@ async def trio_proc(
|
||||||
with trio.CancelScope(shield=True):
|
with trio.CancelScope(shield=True):
|
||||||
# don't clobber an ongoing pdb
|
# don't clobber an ongoing pdb
|
||||||
if is_root_process():
|
if is_root_process():
|
||||||
await maybe_wait_for_debugger()
|
await debug.maybe_wait_for_debugger()
|
||||||
|
|
||||||
elif proc is not None:
|
elif proc is not None:
|
||||||
async with acquire_debug_lock(subactor.uid):
|
async with debug.acquire_debug_lock(subactor.uid):
|
||||||
# soft wait on the proc to terminate
|
# soft wait on the proc to terminate
|
||||||
with trio.move_on_after(0.5):
|
with trio.move_on_after(0.5):
|
||||||
await proc.wait()
|
await proc.wait()
|
||||||
|
|
@ -395,18 +544,25 @@ async def trio_proc(
|
||||||
portal,
|
portal,
|
||||||
)
|
)
|
||||||
|
|
||||||
# send additional init params
|
# send a "spawning specification" which configures the
|
||||||
await chan.send({
|
# initial runtime state of the child.
|
||||||
"_parent_main_data": subactor._parent_main_data,
|
sspec = msgtypes.SpawnSpec(
|
||||||
"enable_modules": subactor.enable_modules,
|
_parent_main_data=subactor._parent_main_data,
|
||||||
"_arb_addr": subactor._arb_addr,
|
enable_modules=subactor.enable_modules,
|
||||||
"bind_host": bind_addr[0],
|
reg_addrs=subactor.reg_addrs,
|
||||||
"bind_port": bind_addr[1],
|
bind_addrs=bind_addrs,
|
||||||
"_runtime_vars": _runtime_vars,
|
_runtime_vars=_runtime_vars,
|
||||||
})
|
)
|
||||||
|
log.runtime(
|
||||||
|
f'Sending spawn spec to child\n'
|
||||||
|
f'{{}}=> {chan.aid.reprol()!r}\n'
|
||||||
|
f'\n'
|
||||||
|
f'{pretty_struct.pformat(sspec)}\n'
|
||||||
|
)
|
||||||
|
await chan.send(sspec)
|
||||||
|
|
||||||
# track subactor in current nursery
|
# track subactor in current nursery
|
||||||
curr_actor = current_actor()
|
curr_actor: Actor = current_actor()
|
||||||
curr_actor._actoruid2nursery[subactor.uid] = actor_nursery
|
curr_actor._actoruid2nursery[subactor.uid] = actor_nursery
|
||||||
|
|
||||||
# resume caller at next checkpoint now that child is up
|
# resume caller at next checkpoint now that child is up
|
||||||
|
|
@ -428,50 +584,76 @@ async def trio_proc(
|
||||||
# This is a "soft" (cancellable) join/reap which
|
# This is a "soft" (cancellable) join/reap which
|
||||||
# will remote cancel the actor on a ``trio.Cancelled``
|
# will remote cancel the actor on a ``trio.Cancelled``
|
||||||
# condition.
|
# condition.
|
||||||
await soft_wait(
|
await soft_kill(
|
||||||
proc,
|
proc,
|
||||||
trio.Process.wait,
|
trio.Process.wait, # XXX, uses `pidfd_open()` below.
|
||||||
portal
|
portal
|
||||||
)
|
)
|
||||||
|
|
||||||
# cancel result waiter that may have been spawned in
|
# cancel result waiter that may have been spawned in
|
||||||
# tandem if not done already
|
# tandem if not done already
|
||||||
log.warning(
|
log.cancel(
|
||||||
"Cancelling existing result waiter task for "
|
'Cancelling portal result reaper task\n'
|
||||||
f"{subactor.uid}")
|
f'c)> {subactor.aid.reprol()!r}\n'
|
||||||
|
)
|
||||||
nursery.cancel_scope.cancel()
|
nursery.cancel_scope.cancel()
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
# The "hard" reap since no actor zombies are allowed!
|
# XXX NOTE XXX: The "hard" reap since no actor zombies are
|
||||||
# XXX: do this **after** cancellation/tearfown to avoid
|
# allowed! Do this **after** cancellation/teardown to avoid
|
||||||
# killing the process too early.
|
# killing the process too early.
|
||||||
if proc:
|
if proc:
|
||||||
log.cancel(f'Hard reap sequence starting for {subactor.uid}')
|
reap_repr: str = _pformat.nest_from_op(
|
||||||
with trio.CancelScope(shield=True):
|
input_op='>x)',
|
||||||
|
text=subactor.pformat(),
|
||||||
|
)
|
||||||
|
log.cancel(
|
||||||
|
f'Hard reap sequence starting for subactor\n'
|
||||||
|
f'{reap_repr}'
|
||||||
|
)
|
||||||
|
|
||||||
|
with trio.CancelScope(shield=True):
|
||||||
# don't clobber an ongoing pdb
|
# don't clobber an ongoing pdb
|
||||||
if cancelled_during_spawn:
|
if cancelled_during_spawn:
|
||||||
# Try again to avoid TTY clobbering.
|
# Try again to avoid TTY clobbering.
|
||||||
async with acquire_debug_lock(subactor.uid):
|
async with debug.acquire_debug_lock(subactor.uid):
|
||||||
with trio.move_on_after(0.5):
|
with trio.move_on_after(0.5):
|
||||||
await proc.wait()
|
await proc.wait()
|
||||||
|
|
||||||
if is_root_process():
|
await debug.maybe_wait_for_debugger(
|
||||||
# TODO: solve the following issue where we need
|
child_in_debug=_runtime_vars.get(
|
||||||
# to do a similar wait like this but in an
|
'_debug_mode', False
|
||||||
# "intermediary" parent actor that itself isn't
|
),
|
||||||
# in debug but has a child that is, and we need
|
header_msg=(
|
||||||
# to hold off on relaying SIGINT until that child
|
'Delaying subproc reaper while debugger locked..\n'
|
||||||
# is complete.
|
),
|
||||||
# https://github.com/goodboy/tractor/issues/320
|
|
||||||
await maybe_wait_for_debugger(
|
# TODO: need a diff value then default?
|
||||||
child_in_debug=_runtime_vars.get(
|
# poll_steps=9999999,
|
||||||
'_debug_mode', False),
|
)
|
||||||
)
|
# TODO: solve the following issue where we need
|
||||||
|
# to do a similar wait like this but in an
|
||||||
|
# "intermediary" parent actor that itself isn't
|
||||||
|
# in debug but has a child that is, and we need
|
||||||
|
# to hold off on relaying SIGINT until that child
|
||||||
|
# is complete.
|
||||||
|
# https://github.com/goodboy/tractor/issues/320
|
||||||
|
# -[ ] we need to handle non-root parent-actors specially
|
||||||
|
# by somehow determining if a child is in debug and then
|
||||||
|
# avoiding cancel/kill of said child by this
|
||||||
|
# (intermediary) parent until such a time as the root says
|
||||||
|
# the pdb lock is released and we are good to tear down
|
||||||
|
# (our children)..
|
||||||
|
#
|
||||||
|
# -[ ] so maybe something like this where we try to
|
||||||
|
# acquire the lock and get notified of who has it,
|
||||||
|
# check that uid against our known children?
|
||||||
|
# this_uid: tuple[str, str] = current_actor().uid
|
||||||
|
# await debug.acquire_debug_lock(this_uid)
|
||||||
|
|
||||||
if proc.poll() is None:
|
if proc.poll() is None:
|
||||||
log.cancel(f"Attempting to hard kill {proc}")
|
log.cancel(f"Attempting to hard kill {proc}")
|
||||||
await do_hard_kill(proc)
|
await hard_kill(proc)
|
||||||
|
|
||||||
log.debug(f"Joined {proc}")
|
log.debug(f"Joined {proc}")
|
||||||
else:
|
else:
|
||||||
|
|
@ -489,12 +671,13 @@ async def mp_proc(
|
||||||
subactor: Actor,
|
subactor: Actor,
|
||||||
errors: dict[tuple[str, str], Exception],
|
errors: dict[tuple[str, str], Exception],
|
||||||
# passed through to actor main
|
# passed through to actor main
|
||||||
bind_addr: tuple[str, int],
|
bind_addrs: list[UnwrappedAddress],
|
||||||
parent_addr: tuple[str, int],
|
parent_addr: UnwrappedAddress,
|
||||||
_runtime_vars: dict[str, Any], # serialized and sent to _child
|
_runtime_vars: dict[str, Any], # serialized and sent to _child
|
||||||
*,
|
*,
|
||||||
infect_asyncio: bool = False,
|
infect_asyncio: bool = False,
|
||||||
task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED
|
task_status: TaskStatus[Portal] = trio.TASK_STATUS_IGNORED,
|
||||||
|
proc_kwargs: dict[str, any] = {}
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
|
|
@ -547,7 +730,7 @@ async def mp_proc(
|
||||||
target=_mp_main,
|
target=_mp_main,
|
||||||
args=(
|
args=(
|
||||||
subactor,
|
subactor,
|
||||||
bind_addr,
|
bind_addrs,
|
||||||
fs_info,
|
fs_info,
|
||||||
_spawn_method,
|
_spawn_method,
|
||||||
parent_addr,
|
parent_addr,
|
||||||
|
|
@ -569,12 +752,14 @@ async def mp_proc(
|
||||||
|
|
||||||
log.runtime(f"Started {proc}")
|
log.runtime(f"Started {proc}")
|
||||||
|
|
||||||
|
ipc_server: _server.Server = actor_nursery._actor.ipc_server
|
||||||
try:
|
try:
|
||||||
# wait for actor to spawn and connect back to us
|
# wait for actor to spawn and connect back to us
|
||||||
# channel should have handshake completed by the
|
# channel should have handshake completed by the
|
||||||
# local actor by the time we get a ref to it
|
# local actor by the time we get a ref to it
|
||||||
event, chan = await actor_nursery._actor.wait_for_peer(
|
event, chan = await ipc_server.wait_for_peer(
|
||||||
subactor.uid)
|
subactor.uid,
|
||||||
|
)
|
||||||
|
|
||||||
# XXX: monkey patch poll API to match the ``subprocess`` API..
|
# XXX: monkey patch poll API to match the ``subprocess`` API..
|
||||||
# not sure why they don't expose this but kk.
|
# not sure why they don't expose this but kk.
|
||||||
|
|
@ -615,7 +800,7 @@ async def mp_proc(
|
||||||
# This is a "soft" (cancellable) join/reap which
|
# This is a "soft" (cancellable) join/reap which
|
||||||
# will remote cancel the actor on a ``trio.Cancelled``
|
# will remote cancel the actor on a ``trio.Cancelled``
|
||||||
# condition.
|
# condition.
|
||||||
await soft_wait(
|
await soft_kill(
|
||||||
proc,
|
proc,
|
||||||
proc_waiter,
|
proc_waiter,
|
||||||
portal
|
portal
|
||||||
|
|
|
||||||
|
|
@ -14,50 +14,190 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
Per process state
|
Per actor-process runtime state mgmt APIs.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
|
from __future__ import annotations
|
||||||
|
from contextvars import (
|
||||||
|
ContextVar,
|
||||||
|
)
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
from typing import (
|
from typing import (
|
||||||
Optional,
|
|
||||||
Any,
|
Any,
|
||||||
|
Literal,
|
||||||
|
TYPE_CHECKING,
|
||||||
)
|
)
|
||||||
|
|
||||||
import trio
|
from trio.lowlevel import current_task
|
||||||
|
|
||||||
from ._exceptions import NoRuntime
|
if TYPE_CHECKING:
|
||||||
|
from ._runtime import Actor
|
||||||
|
from ._context import Context
|
||||||
|
|
||||||
|
|
||||||
_current_actor: Optional['Actor'] = None # type: ignore # noqa
|
# default IPC transport protocol settings
|
||||||
|
TransportProtocolKey = Literal[
|
||||||
|
'tcp',
|
||||||
|
'uds',
|
||||||
|
]
|
||||||
|
_def_tpt_proto: TransportProtocolKey = 'tcp'
|
||||||
|
|
||||||
|
_current_actor: Actor|None = None # type: ignore # noqa
|
||||||
|
_last_actor_terminated: Actor|None = None
|
||||||
|
|
||||||
|
# TODO: mk this a `msgspec.Struct`!
|
||||||
|
# -[ ] type out all fields obvi!
|
||||||
|
# -[ ] (eventually) mk wire-ready for monitoring?
|
||||||
_runtime_vars: dict[str, Any] = {
|
_runtime_vars: dict[str, Any] = {
|
||||||
'_debug_mode': False,
|
# root of actor-process tree info
|
||||||
'_is_root': False,
|
'_is_root': False, # bool
|
||||||
'_root_mailbox': (None, None)
|
'_root_mailbox': (None, None), # tuple[str|None, str|None]
|
||||||
|
'_root_addrs': [], # tuple[str|None, str|None]
|
||||||
|
|
||||||
|
# parent->chld ipc protocol caps
|
||||||
|
'_enable_tpts': [_def_tpt_proto],
|
||||||
|
|
||||||
|
# registrar info
|
||||||
|
'_registry_addrs': [],
|
||||||
|
|
||||||
|
# `debug_mode: bool` settings
|
||||||
|
'_debug_mode': False, # bool
|
||||||
|
'repl_fixture': False, # |AbstractContextManager[bool]
|
||||||
|
# for `tractor.pause_from_sync()` & `breakpoint()` support
|
||||||
|
'use_greenback': False,
|
||||||
|
|
||||||
|
# infected-`asyncio`-mode: `trio` running as guest.
|
||||||
|
'_is_infected_aio': False,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def current_actor(err_on_no_runtime: bool = True) -> 'Actor': # type: ignore # noqa
|
def last_actor() -> Actor|None:
|
||||||
"""Get the process-local actor instance.
|
'''
|
||||||
"""
|
Try to return last active `Actor` singleton
|
||||||
if _current_actor is None and err_on_no_runtime:
|
for this process.
|
||||||
raise NoRuntime("No local actor has been initialized yet")
|
|
||||||
|
For case where runtime already exited but someone is asking
|
||||||
|
about the "last" actor probably to get its `.uid: tuple`.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return _last_actor_terminated
|
||||||
|
|
||||||
|
|
||||||
|
def current_actor(
|
||||||
|
err_on_no_runtime: bool = True,
|
||||||
|
) -> Actor:
|
||||||
|
'''
|
||||||
|
Get the process-local actor instance.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if (
|
||||||
|
err_on_no_runtime
|
||||||
|
and
|
||||||
|
_current_actor is None
|
||||||
|
):
|
||||||
|
msg: str = 'No local actor has been initialized yet?\n'
|
||||||
|
from ._exceptions import NoRuntime
|
||||||
|
|
||||||
|
if last := last_actor():
|
||||||
|
msg += (
|
||||||
|
f'Apparently the lact active actor was\n'
|
||||||
|
f'|_{last}\n'
|
||||||
|
f'|_{last.uid}\n'
|
||||||
|
)
|
||||||
|
# no actor runtime has (as of yet) ever been started for
|
||||||
|
# this process.
|
||||||
|
else:
|
||||||
|
msg += (
|
||||||
|
# 'No last actor found?\n'
|
||||||
|
'\nDid you forget to call one of,\n'
|
||||||
|
'- `tractor.open_root_actor()`\n'
|
||||||
|
'- `tractor.open_nursery()`\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
raise NoRuntime(msg)
|
||||||
|
|
||||||
return _current_actor
|
return _current_actor
|
||||||
|
|
||||||
|
|
||||||
def is_main_process() -> bool:
|
def is_root_process() -> bool:
|
||||||
"""Bool determining if this actor is running in the top-most process.
|
'''
|
||||||
"""
|
Bool determining if this actor is running in the top-most process.
|
||||||
|
|
||||||
|
'''
|
||||||
import multiprocessing as mp
|
import multiprocessing as mp
|
||||||
return mp.current_process().name == 'MainProcess'
|
return mp.current_process().name == 'MainProcess'
|
||||||
|
|
||||||
|
|
||||||
def debug_mode() -> bool:
|
is_main_process = is_root_process
|
||||||
"""Bool determining if "debug mode" is on which enables
|
|
||||||
|
|
||||||
|
def is_debug_mode() -> bool:
|
||||||
|
'''
|
||||||
|
Bool determining if "debug mode" is on which enables
|
||||||
remote subactor pdb entry on crashes.
|
remote subactor pdb entry on crashes.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
return bool(_runtime_vars['_debug_mode'])
|
return bool(_runtime_vars['_debug_mode'])
|
||||||
|
|
||||||
|
|
||||||
|
debug_mode = is_debug_mode
|
||||||
|
|
||||||
|
|
||||||
def is_root_process() -> bool:
|
def is_root_process() -> bool:
|
||||||
return _runtime_vars['_is_root']
|
return _runtime_vars['_is_root']
|
||||||
|
|
||||||
|
|
||||||
|
_ctxvar_Context: ContextVar[Context] = ContextVar(
|
||||||
|
'ipc_context',
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def current_ipc_ctx(
|
||||||
|
error_on_not_set: bool = False,
|
||||||
|
) -> Context|None:
|
||||||
|
ctx: Context = _ctxvar_Context.get()
|
||||||
|
|
||||||
|
if (
|
||||||
|
not ctx
|
||||||
|
and error_on_not_set
|
||||||
|
):
|
||||||
|
from ._exceptions import InternalError
|
||||||
|
raise InternalError(
|
||||||
|
'No IPC context has been allocated for this task yet?\n'
|
||||||
|
f'|_{current_task()}\n'
|
||||||
|
)
|
||||||
|
return ctx
|
||||||
|
|
||||||
|
|
||||||
|
# std ODE (mutable) app state location
|
||||||
|
_rtdir: Path = Path(os.environ['XDG_RUNTIME_DIR'])
|
||||||
|
|
||||||
|
|
||||||
|
def get_rt_dir(
|
||||||
|
subdir: str = 'tractor'
|
||||||
|
) -> Path:
|
||||||
|
'''
|
||||||
|
Return the user "runtime dir" where most userspace apps stick
|
||||||
|
their IPC and cache related system util-files; we take hold
|
||||||
|
of a `'XDG_RUNTIME_DIR'/tractor/` subdir by default.
|
||||||
|
|
||||||
|
'''
|
||||||
|
rtdir: Path = _rtdir / subdir
|
||||||
|
if not rtdir.is_dir():
|
||||||
|
rtdir.mkdir()
|
||||||
|
return rtdir
|
||||||
|
|
||||||
|
|
||||||
|
def current_ipc_protos() -> list[str]:
|
||||||
|
'''
|
||||||
|
Return the list of IPC transport protocol keys currently
|
||||||
|
in use by this actor.
|
||||||
|
|
||||||
|
The keys are as declared by `MsgTransport` and `Address`
|
||||||
|
concrete-backend sub-types defined throughout `tractor.ipc`.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return _runtime_vars['_enable_tpts']
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue