Merging upstream version 4.66.2.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-05 19:14:31 +01:00
parent ec03e12832
commit 6759e100fe
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
61 changed files with 917 additions and 4364 deletions

View file

@ -18,8 +18,6 @@ Options:
NOTSET
-d, --debug-trace Print lots of debugging information (-D NOTSET)
"""
from __future__ import print_function
import io
import logging
import os

View file

@ -1,6 +1,4 @@
"""
Asynchronous examples using `asyncio`, `async` and `await` on `python>=3.7`.
"""
"""Asynchronous examples using `asyncio`, `async` and `await`."""
import asyncio
from tqdm.asyncio import tqdm, trange

View file

@ -1,6 +1,3 @@
from __future__ import print_function
import sys
from concurrent.futures import ThreadPoolExecutor
from functools import partial
from multiprocessing import Pool, RLock, freeze_support
@ -12,21 +9,19 @@ from tqdm.auto import tqdm, trange
from tqdm.contrib.concurrent import process_map, thread_map
NUM_SUBITERS = 9
PY2 = sys.version_info[:1] <= (2,)
def progresser(n, auto_position=True, write_safe=False, blocking=True, progress=False):
interval = random() * 0.002 / (NUM_SUBITERS - n + 2) # nosec
total = 5000
text = "#{0}, est. {1:<04.2}s".format(n, interval * total)
text = f"#{n}, est. {interval * total:<04.2g}s"
for _ in trange(total, desc=text, disable=not progress,
lock_args=None if blocking else (False,),
position=None if auto_position else n):
sleep(interval)
# NB: may not clear instances with higher `position` upon completion
# since this worker may not know about other bars #796
if write_safe:
# we think we know about other bars (currently only py3 threading)
if write_safe: # we think we know about other bars
if n == 6:
tqdm.write("n == 6 completed")
return n + 1
@ -37,7 +32,7 @@ if __name__ == '__main__':
L = list(range(NUM_SUBITERS))[::-1]
print("Simple thread mapping")
thread_map(partial(progresser, write_safe=not PY2), L, max_workers=4)
thread_map(partial(progresser, write_safe=True), L, max_workers=4)
print("Simple process mapping")
process_map(partial(progresser), L, max_workers=4)
@ -54,8 +49,5 @@ if __name__ == '__main__':
print("Multi-threading")
tqdm.set_lock(TRLock())
pool_args = {}
if not PY2:
pool_args.update(initializer=tqdm.set_lock, initargs=(tqdm.get_lock(),))
with ThreadPoolExecutor(**pool_args) as p:
p.map(partial(progresser, progress=True, write_safe=not PY2, blocking=False), L)
with ThreadPoolExecutor(initializer=tqdm.set_lock, initargs=(tqdm.get_lock(),)) as p:
p.map(partial(progresser, progress=True, write_safe=True, blocking=False), L)

View file

@ -10,8 +10,6 @@ any input string to `tqdm.write()`, and supply the arguments
A reusable canonical example is given below:
"""
from __future__ import print_function
import contextlib
import sys
from time import sleep

View file

@ -2,7 +2,7 @@
# Simple tqdm examples and profiling
# Benchmark
for i in _range(int(1e8)):
for i in range(int(1e8)):
pass
# Basic demo
@ -33,7 +33,7 @@ try:
except ImportError:
pass
else:
for i in ProgressBar()(_range(int(1e8))):
for i in ProgressBar()(range(int(1e8))):
pass
# Dynamic miniters benchmark
@ -61,5 +61,4 @@ for _ in trange(16, leave=True):
stmts = filter(None, re.split(r'\n\s*#.*?\n', __doc__))
for s in stmts:
print(s.replace('import tqdm\n', ''))
print(timeit(stmt='try:\n\t_range = xrange'
'\nexcept:\n\t_range = range\n' + s, number=1), 'seconds')
print(timeit(stmt=s, number=1), 'seconds')

View file

@ -20,11 +20,8 @@ Options:
The local file path in which to save the url [default: /dev/null].
"""
try:
from urllib import request as urllib
except ImportError: # py2
import urllib
from os import devnull
from urllib import request as urllib
from docopt import docopt