print("5) Concurrency progress: thread_map / process_map")
def cpuish(n: int) -> int:
x = 0
for i in vary(50_000):
x = (x + (n * i)) % 1_000_003
return x
nums = record(vary(80))
thread_results = thread_map(cpuish, nums, max_workers=8, desc="thread_map")
print("thread_map finished:", len(thread_results))
proc_results = process_map(cpuish, nums[:20], max_workers=2, chunksize=2, desc="process_map")
print("process_map finished:", len(proc_results))
print()
print("6) logging_redirect_tqdm (logs received’t break bars)")
logger = logging.getLogger("demo")
logger.setLevel(logging.INFO)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
logger.handlers = [handler]
with logging_redirect_tqdm():
for ok in tqdm(vary(60), desc="Work with logs"):
time.sleep(0.01)
if ok in (5, 25, 45):
logger.information(f"checkpoint ok={ok}")
print()
print("7) asyncio progress (as_completed) — Colab/Jupyter-safe")
async def io_task(i: int):
await asyncio.sleep(random.uniform(0.02, 0.12))
return i, random.random()
async def run_async():
duties = [asyncio.create_task(io_task(i)) for i in range(80)]
outcomes = []
for fut in tqdm(asyncio.as_completed(duties), whole=len(duties), desc="async duties"):
outcomes.append(await fut)
return outcomes
outcomes = await run_async()
print("async finished:", len(outcomes), "outcomes")
