1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
|
"""Combination operators."""
from __future__ import annotations
import asyncio
import builtins
from typing import (
Awaitable,
Protocol,
TypeVar,
AsyncIterable,
AsyncIterator,
Callable,
cast,
)
from typing_extensions import ParamSpec
from ..aiter_utils import AsyncExitStack, anext
from ..core import streamcontext, pipable_operator
from . import create
from . import select
from . import advanced
from . import aggregate
__all__ = ["chain", "zip", "map", "merge", "ziplatest", "amap", "smap"]
T = TypeVar("T")
U = TypeVar("U")
K = TypeVar("K")
P = ParamSpec("P")
@pipable_operator
async def chain(
source: AsyncIterable[T], *more_sources: AsyncIterable[T]
) -> AsyncIterator[T]:
"""Chain asynchronous sequences together, in the order they are given.
Note: the sequences are not iterated until it is required,
so if the operation is interrupted, the remaining sequences
will be left untouched.
"""
sources = source, *more_sources
for source in sources:
async with streamcontext(source) as streamer:
async for item in streamer:
yield item
@pipable_operator
async def zip(
source: AsyncIterable[T], *more_sources: AsyncIterable[T]
) -> AsyncIterator[tuple[T, ...]]:
"""Combine and forward the elements of several asynchronous sequences.
Each generated value is a tuple of elements, using the same order as
their respective sources. The generation continues until the shortest
sequence is exhausted.
Note: the different sequences are awaited in parrallel, so that their
waiting times don't add up.
"""
sources = source, *more_sources
# One sources
if len(sources) == 1:
(source,) = sources
async with streamcontext(source) as streamer:
async for item in streamer:
yield (item,)
return
# N sources
async with AsyncExitStack() as stack:
# Handle resources
streamers = [
await stack.enter_async_context(streamcontext(source)) for source in sources
]
# Loop over items
while True:
try:
coros = builtins.map(anext, streamers)
items = await asyncio.gather(*coros)
except StopAsyncIteration:
break
else:
yield tuple(items)
X = TypeVar("X", contravariant=True)
Y = TypeVar("Y", covariant=True)
class SmapCallable(Protocol[X, Y]):
def __call__(self, arg: X, /, *args: X) -> Y:
...
class AmapCallable(Protocol[X, Y]):
async def __call__(self, arg: X, /, *args: X) -> Y:
...
class MapCallable(Protocol[X, Y]):
def __call__(self, arg: X, /, *args: X) -> Awaitable[Y] | Y:
...
@pipable_operator
async def smap(
source: AsyncIterable[T],
func: SmapCallable[T, U],
*more_sources: AsyncIterable[T],
) -> AsyncIterator[U]:
"""Apply a given function to the elements of one or several
asynchronous sequences.
Each element is used as a positional argument, using the same order as
their respective sources. The generation continues until the shortest
sequence is exhausted. The function is treated synchronously.
Note: if more than one sequence is provided, they're awaited concurrently
so that their waiting times don't add up.
"""
stream = zip(source, *more_sources)
async with streamcontext(stream) as streamer:
async for item in streamer:
yield func(*item)
@pipable_operator
def amap(
source: AsyncIterable[T],
corofn: AmapCallable[T, U],
*more_sources: AsyncIterable[T],
ordered: bool = True,
task_limit: int | None = None,
) -> AsyncIterator[U]:
"""Apply a given coroutine function to the elements of one or several
asynchronous sequences.
Each element is used as a positional argument, using the same order as
their respective sources. The generation continues until the shortest
sequence is exhausted.
The results can either be returned in or out of order, depending on
the corresponding ``ordered`` argument.
The coroutines run concurrently but their amount can be limited using
the ``task_limit`` argument. A value of ``1`` will cause the coroutines
to run sequentially.
If more than one sequence is provided, they're also awaited concurrently,
so that their waiting times don't add up.
"""
async def func(arg: T, *args: T) -> AsyncIterable[U]:
yield await corofn(arg, *args)
if ordered:
return advanced.concatmap.raw(
source, func, *more_sources, task_limit=task_limit
)
return advanced.flatmap.raw(source, func, *more_sources, task_limit=task_limit)
@pipable_operator
def map(
source: AsyncIterable[T],
func: MapCallable[T, U],
*more_sources: AsyncIterable[T],
ordered: bool = True,
task_limit: int | None = None,
) -> AsyncIterator[U]:
"""Apply a given function to the elements of one or several
asynchronous sequences.
Each element is used as a positional argument, using the same order as
their respective sources. The generation continues until the shortest
sequence is exhausted. The function can either be synchronous or
asynchronous (coroutine function).
The results can either be returned in or out of order, depending on
the corresponding ``ordered`` argument. This argument is ignored if the
provided function is synchronous.
The coroutines run concurrently but their amount can be limited using
the ``task_limit`` argument. A value of ``1`` will cause the coroutines
to run sequentially. This argument is ignored if the provided function
is synchronous.
If more than one sequence is provided, they're also awaited concurrently,
so that their waiting times don't add up.
It might happen that the provided function returns a coroutine but is not
a coroutine function per se. In this case, one can wrap the function with
``aiostream.async_`` in order to force ``map`` to await the resulting
coroutine. The following example illustrates the use ``async_`` with a
lambda function::
from aiostream import stream, async_
...
ys = stream.map(xs, async_(lambda ms: asyncio.sleep(ms / 1000)))
"""
if asyncio.iscoroutinefunction(func):
return amap.raw(
source, func, *more_sources, ordered=ordered, task_limit=task_limit
)
sync_func = cast("SmapCallable[T, U]", func)
return smap.raw(source, sync_func, *more_sources)
@pipable_operator
def merge(
source: AsyncIterable[T], *more_sources: AsyncIterable[T]
) -> AsyncIterator[T]:
"""Merge several asynchronous sequences together.
All the sequences are iterated simultaneously and their elements
are forwarded as soon as they're available. The generation continues
until all the sequences are exhausted.
"""
sources = [source, *more_sources]
source_stream: AsyncIterable[AsyncIterable[T]] = create.iterate.raw(sources)
return advanced.flatten.raw(source_stream)
@pipable_operator
def ziplatest(
source: AsyncIterable[T],
*more_sources: AsyncIterable[T],
partial: bool = True,
default: T | None = None,
) -> AsyncIterator[tuple[T | None, ...]]:
"""Combine several asynchronous sequences together, producing a tuple with
the lastest element of each sequence whenever a new element is received.
The value to use when a sequence has not procuded any element yet is given
by the ``default`` argument (defaulting to ``None``).
The producing of partial results can be disabled by setting the optional
argument ``partial`` to ``False``.
All the sequences are iterated simultaneously and their elements
are forwarded as soon as they're available. The generation continues
until all the sequences are exhausted.
"""
sources = source, *more_sources
n = len(sources)
# Custom getter
def getter(dct: dict[int, T]) -> Callable[[int], T | None]:
return lambda key: dct.get(key, default)
# Add source index to the items
def make_func(i: int) -> SmapCallable[T, dict[int, T]]:
def func(x: T, *_: object) -> dict[int, T]:
return {i: x}
return func
new_sources = [smap.raw(source, make_func(i)) for i, source in enumerate(sources)]
# Merge the sources
merged = merge.raw(*new_sources)
# Accumulate the current state in a dict
accumulated = aggregate.accumulate.raw(merged, lambda x, e: {**x, **e})
# Filter partial result
filtered = (
accumulated
if partial
else select.filter.raw(accumulated, lambda x: len(x) == n)
)
# Convert the state dict to a tuple
def dict_to_tuple(x: dict[int, T], *_: object) -> tuple[T | None, ...]:
return tuple(builtins.map(getter(x), range(n)))
return smap.raw(filtered, dict_to_tuple)
|