Adding upstream version 4.64.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-05 19:13:00 +01:00
parent ee08d9327c
commit 2da88b2fbc
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
89 changed files with 16770 additions and 0 deletions

121
examples/7zx.py Normal file
View file

@ -0,0 +1,121 @@
# -*- coding: utf-8 -*-
"""Usage:
7zx.py [--help | options] <zipfiles>...
Options:
-h, --help Print this help and exit
-v, --version Print version and exit
-c, --compressed Use compressed (instead of uncompressed) file sizes
-s, --silent Do not print one row per zip file
-y, --yes Assume yes to all queries (for extraction)
-D=<level>, --debug=<level>
Print various types of debugging information. Choices:
CRITICAL|FATAL
ERROR
WARN(ING)
[default: INFO]
DEBUG
NOTSET
-d, --debug-trace Print lots of debugging information (-D NOTSET)
"""
from __future__ import print_function
import io
import logging
import os
import pty
import re
import subprocess # nosec
from argopt import argopt
from tqdm import tqdm
__author__ = "Casper da Costa-Luis <casper.dcl@physics.org>"
__licence__ = "MPLv2.0"
__version__ = "0.2.2"
__license__ = __licence__
RE_SCN = re.compile(r"([0-9]+)\s+([0-9]+)\s+(.*)$", flags=re.M)
def main():
args = argopt(__doc__, version=__version__).parse_args()
if args.debug_trace:
args.debug = "NOTSET"
logging.basicConfig(level=getattr(logging, args.debug, logging.INFO),
format='%(levelname)s:%(message)s')
log = logging.getLogger(__name__)
log.debug(args)
# Get compressed sizes
zips = {}
for fn in args.zipfiles:
info = subprocess.check_output(["7z", "l", fn]).strip() # nosec
finfo = RE_SCN.findall(info) # size|compressed|name
# builtin test: last line should be total sizes
log.debug(finfo)
totals = map(int, finfo[-1][:2])
# log.debug(totals)
for s in range(2): # size|compressed totals
totals_s = sum(map(int, (inf[s] for inf in finfo[:-1])))
if totals_s != totals[s]:
log.warn("%s: individual total %d != 7z total %d",
fn, totals_s, totals[s])
fcomp = {n: int(c if args.compressed else u) for (u, c, n) in finfo[:-1]}
# log.debug(fcomp)
# zips : {'zipname' : {'filename' : int(size)}}
zips[fn] = fcomp
# Extract
cmd7zx = ["7z", "x", "-bd"]
if args.yes:
cmd7zx += ["-y"]
log.info("Extracting from %d file(s)", len(zips))
with tqdm(total=sum(sum(fcomp.values()) for fcomp in zips.values()),
unit="B", unit_scale=True) as tall:
for fn, fcomp in zips.items():
md, sd = pty.openpty()
ex = subprocess.Popen( # nosec
cmd7zx + [fn],
bufsize=1,
stdout=md, # subprocess.PIPE,
stderr=subprocess.STDOUT)
os.close(sd)
with io.open(md, mode="rU", buffering=1) as m:
with tqdm(total=sum(fcomp.values()), disable=len(zips) < 2,
leave=False, unit="B", unit_scale=True) as t:
if not hasattr(t, "start_t"): # disabled
t.start_t = tall._time()
while True:
try:
l_raw = m.readline()
except IOError:
break
ln = l_raw.strip()
if ln.startswith("Extracting"):
exname = ln[len("Extracting"):].lstrip()
s = fcomp.get(exname, 0) # 0 is likely folders
t.update(s)
tall.update(s)
elif ln:
if not any(
ln.startswith(i)
for i in ("7-Zip ", "p7zip Version ",
"Everything is Ok", "Folders: ",
"Files: ", "Size: ", "Compressed: ")):
if ln.startswith("Processing archive: "):
if not args.silent:
t.write(t.format_interval(
t.start_t - tall.start_t) + ' ' +
ln.replace("Processing archive: ", ""))
else:
t.write(ln)
ex.wait()
main.__doc__ = __doc__
if __name__ == "__main__":
main()

View file

@ -0,0 +1,38 @@
"""
Asynchronous examples using `asyncio`, `async` and `await` on `python>=3.7`.
"""
import asyncio
from tqdm.asyncio import tqdm, trange
def count(start=0, step=1):
i = start
while True:
new_start = yield i
if new_start is None:
i += step
else:
i = new_start
async def main():
N = int(1e6)
async for row in tqdm(trange(N, desc="inner"), desc="outer"):
if row >= N:
break
with tqdm(count(), desc="coroutine", total=N + 2) as pbar:
async for row in pbar:
if row == N:
pbar.send(-10)
elif row < 0:
assert row == -9
break
# should be ~1sec rather than ~50s due to async scheduling
for i in tqdm.as_completed([asyncio.sleep(0.01 * i)
for i in range(100, 0, -1)], desc="as_completed"):
await i
if __name__ == "__main__":
asyncio.run(main())

View file

@ -0,0 +1,69 @@
"""
Inserting `tqdm` as a "pipe" in a chain of coroutines.
Not to be confused with `asyncio.coroutine`.
"""
from functools import wraps
from tqdm.auto import tqdm
def autonext(func):
@wraps(func)
def inner(*args, **kwargs):
res = func(*args, **kwargs)
next(res)
return res
return inner
@autonext
def tqdm_pipe(target, **tqdm_kwargs):
"""
Coroutine chain pipe `send()`ing to `target`.
This:
>>> r = receiver()
>>> p = producer(r)
>>> next(r)
>>> next(p)
Becomes:
>>> r = receiver()
>>> t = tqdm.pipe(r)
>>> p = producer(t)
>>> next(r)
>>> next(p)
"""
with tqdm(**tqdm_kwargs) as pbar:
while True:
obj = (yield)
target.send(obj)
pbar.update()
def source(target):
for i in ["foo", "bar", "baz", "pythonista", "python", "py"]:
target.send(i)
target.close()
@autonext
def grep(pattern, target):
while True:
line = (yield)
if pattern in line:
target.send(line)
@autonext
def sink():
while True:
line = (yield)
tqdm.write(line)
if __name__ == "__main__":
source(
tqdm_pipe(
grep('python',
sink())))

View file

@ -0,0 +1,11 @@
# How to import tqdm in any frontend without enforcing it as a dependency
try:
from tqdm.auto import tqdm
except ImportError:
def tqdm(*args, **kwargs):
if args:
return args[0]
return kwargs.get('iterable', None)
__all__ = ['tqdm']

View file

@ -0,0 +1,29 @@
import numpy as np
import pandas as pd
from tqdm.auto import tqdm
df = pd.DataFrame(np.random.randint(0, 100, (100000, 6)))
# Register `pandas.progress_apply` and `pandas.Series.map_apply` with `tqdm`
# (can use `tqdm.gui.tqdm`, `tqdm.notebook.tqdm`, optional kwargs, etc.)
tqdm.pandas(desc="my bar!")
# Now you can use `progress_apply` instead of `apply`
# and `progress_map` instead of `map`
df.progress_apply(lambda x: x**2)
# can also groupby:
# df.groupby(0).progress_apply(lambda x: x**2)
# -- Source code for `tqdm_pandas` (really simple!)
# def tqdm_pandas(t):
# from pandas.core.frame import DataFrame
# def inner(df, func, *args, **kwargs):
# t.total = groups.size // len(groups)
# def wrapper(*args, **kwargs):
# t.update(1)
# return func(*args, **kwargs)
# result = df.apply(wrapper, *args, **kwargs)
# t.close()
# return result
# DataFrame.progress_apply = inner

252
examples/paper.bib Normal file
View file

@ -0,0 +1,252 @@
@phdthesis{tqdm-ar,
author="Maḥmūd Alī Ġūl",
title="Early Southern Arabian Languages and Classical Arabic Sources: A Critical Examination of Literary and Lexicographical Sources by Comparison with the Inscriptions",
school="{SOAS} University of London",
year="1963"
}
@misc{tqdm-es,
year="2009",
title="¿Lenguaje sms que significa esto?",
url="https://es.answers.yahoo.com/question/index?qid=20090405052137AAF2YBo&guccounter=1",
author="{Yahoo Answers}"
}
@misc{pypi,
year="2019",
author="{Python Package Index ({PyPI})}",
publisher="Python Software Foundation",
title="{tqdm}",
url="https://pypi.org/project/tqdm/"
}
@misc{conda,
author="Anaconda",
year="2019",
title="{tqdm} :: Anaconda Cloud",
url="https://anaconda.org/conda-forge/tqdm"
}
@misc{docker,
year="2019",
author="{Docker Inc.}",
title="{tqdm}/{tqdm} - Docker Hub",
url="https://hub.docker.com/r/tqdm/tqdm"
}
@misc{snapcraft,
year="2019",
author="Snapcraft",
title="Installing {tqdm} for Linux using the Snap Store",
url="https://snapcraft.io/tqdm"
}
@article{zenodo,
year="2019",
author="Casper O. {da Costa-Luis} and {{tqdm} developers}",
title="{tqdm} stable",
publisher="Zenodo",
doi="10.5281/zenodo.595120"
}
@misc{notebooks,
year="2019",
author="{Notebooks {AI}}",
title="{tqdm}",
url="https://notebooks.ai/demo/gh/tqdm/tqdm"
}
@misc{binder,
year="2019",
author="Binder",
title="{tqdm}",
url="https://mybinder.org/v2/gh/tqdm/tqdm/master?filepath=DEMO.ipynb"
}
@misc{stdout,
year="2019",
author="{Stack Overflow}",
title="Why is printing to stdout so slow? Can it be sped up?",
url="https://stackoverflow.com/questions/3857052/why-is-printing-to-stdout-so-slow-can-it-be-sped-up"
}
@misc{pypi-downloads,
year="2019",
author="{Python Packaging Authority ({PyPA})}",
publisher="Python Software Foundation",
title="Analyzing {PyPI} package downloads -- Python Packaging User Guide",
url="https://packaging.python.org/guides/analyzing-pypi-package-downloads/"
}
@misc{keras,
year="2019",
author="Ben",
title="Keras integration with {tqdm} progress bars",
url="https://github.com/bstriner/keras-tqdm"
}
@misc{tqdm-results,
year="2019",
author="GitHub",
title="{tqdm} Code Results",
url="https://github.com/search?q=tqdm&type=Code"
}
@misc{tqdm-dependents,
year="2019",
author="GitHub",
title="{tqdm} dependents",
url="https://github.com/tqdm/tqdm/network/dependents"
}
@misc{lib-io,
year="2019",
author="Libraries.io",
title="{tqdm} on {PyPI}",
url="https://libraries.io/pypi/tqdm"
}
@misc{sourcerank,
year="2019",
author="Libraries.io",
title="SourceRank Breakdown for {tqdm}",
url="https://libraries.io/pypi/tqdm/sourcerank"
}
@misc{sourcerank-descending,
year="2019",
author="Libraries.io",
title="Libraries - The Open Source Discovery Service",
url="https://libraries.io/search?order=desc&platforms=PyPI&sort=rank"
}
@misc{stars,
year="2019",
author="GitHub",
title="{tqdm} Stargazers",
url="https://github.com/tqdm/tqdm/stargazers"
}
@misc{stars-hist,
year="2019",
author="{timqian}",
title="Star history",
url="https://timqian.com/star-history/#tqdm/tqdm"
}
@misc{trend-hist,
year="2018",
month="June",
day="19",
author="Nihey Takizawa",
title="GitHub Trending History",
url="https://github.com/nihey/trending-history/blob/master/histories/Python.md"
}
@misc{hits,
year="2019",
title="{tqdm} hits",
url="https://caspersci.uk.to/cgi-bin/hits.cgi?q=tqdm&a=plot",
author="Casper O. {da Costa-Luis}"
}
@book{miller,
year="2017",
author="Preston Miller and Chapin Bryce",
title="Python Digital Forensics Cookbook: Effective Python recipes for digital investigations",
publisher="Packt Publishing Ltd",
isbn="9781783987474"
}
@book{boxel,
year="2017",
author="Dan {Van Boxel}",
title="Hands-On Deep Learning with TensorFlow",
publisher="Packt Publishing",
isbn="9781787125827"
}
@incollection{nandy,
year="2018",
author="Abhishek Nandy and Manisha Biswas",
title="Reinforcement Learning with Keras, TensorFlow, and ChainerRL",
booktitle="Reinforcement Learning : With Open AI, TensorFlow and Keras Using Python",
publisher="Apress",
isbn="9781484232859",
pages="129--153",
doi="10.1007/978-1-4842-3285-9_5"
}
@journal{stein,
year="2019",
author="Helge S. Stein and Dan Guevarra and Paul F. Newhouse and Edwin Soedarmadji and John M. Gregoire",
title="Machine learning of optical properties of materials -- predicting spectra from images and images from spectra",
journal="Chemical Science",
volume="10",
issue="1",
pages="47--55",
doi="10.1039/C8SC03077D"
}
@journal{cook,
year="2018",
author="Neil J. Cook and Aleks Scholz and Ray Jayawardhana",
title="Very Low-mass Stars and Brown Dwarfs in Upper Scorpius Using Gaia DR1: Mass Function, Disks, and Kinematics",
journal="The Astronomical Journal",
volume="154",
issue="6",
pages="256",
doi="10.3847/1538-3881/aa9751",
url="https://arxiv.org/abs/1710.11625"
}
@journal{madhikar,
year="2018",
author="Pranav Madhikar and Jan Åström and Jan Westerholm and Mikko Karttunen",
title="CellSim3D: GPU accelerated software for simulations of cellular growth and division in three dimensions",
journal="Computer Physics Communications",
volume="232",
pages="206--213",
doi="10.1016/j.cpc.2018.05.024"
}
@journal{palmer,
year="2018",
author="Geraint I. Palmer and Vincent A. Knight and Paul R. Harper and Asyl L. Hawa",
title="Ciw: An open-source discrete event simulation library",
journal="Journal of Simulation",
pages="1--15",
doi="10.1080/17477778.2018.1473909"
}
@journal{knight,
year="2016",
author="Vincent Knight and Owen Campbell and Marc Harper and Karol Langner and James Campbell and Thomas Campbell and Alex Carney and Martin Chorley and Cameron Davidson-Pilon and Kristian Glass and Nikoleta Glynatsi and Tomáš Ehrlich and Martin Jones and Georgios Koutsovoulos and Holly Tibble and Müller Jochen and Geraint Palmer and Piotr Petunov and Paul Slavin and Timothy Standen and Luis Visintini and Karl Molden",
title="An open reproducible framework for the study of the iterated prisoner's dilemma",
journal="Journal of Open Research Software",
volume="4",
doi="10.5334/jors.125",
url="https://arxiv.org/abs/1604.00896",
issn="2049-9647"
}
@article{moriwaki,
title={Mordred: a molecular descriptor calculator},
author={Moriwaki, Hirotomo and Tian, Yu-Shi and Kawashita, Norihito and Takagi, Tatsuya},
doi={10.1186/s13321-018-0258-y},
number={1},
volume={10},
month={February},
year={2018},
journal={Journal of cheminformatics},
issn={1758-2946},
pages={4}
}
@article{jackson,
title={3D for the people: multi-camera motion capture in the field with consumer-grade cameras and open source software},
author={Jackson, Brandon E and Evangelista, Dennis J and Ray, Dylan D and hedrick, Tyson L},
doi={10.1242/bio.018713},
number={9},
volume={5},
month={September},
year={2016},
journal={Biology open},
issn={2046-6390},
pages={1334--1342}
}
@misc{travis,
year="2019",
author="{Travis {CI}}",
title="tqdm/tqdm build status",
url="https://travis-ci.org/tqdm/tqdm"
}
@misc{code-review,
year="2018",
author="Wikipedia",
title="List of tools for code review",
url="https://en.wikipedia.org/wiki/List_of_tools_for_code_review"
}
@misc{asv,
year="2019",
author="{{tqdm} developers}",
title="airspeed velocity",
url="https://tqdm.github.io/tqdm/"
}
@misc{licence,
year="2019",
author="{{tqdm} developers}",
title="{tqdm} Licence",
url="https://github.com/tqdm/tqdm/blob/master/LICENCE",
publisher="GitHub"
}

169
examples/paper.md Normal file
View file

@ -0,0 +1,169 @@
---
title: '`tqdm`: A Fast, Extensible Progress Meter for Python and CLI'
tags:
- progressbar
- progressmeter
- progress-bar
- meter
- rate
- eta
- console
- terminal
- time
- progress
- bar
- gui
- python
- parallel
- cli
- utilities
- shell
- batch
authors:
- name: Casper O da Costa-Luis
orcid: 0000-0002-7211-1557
affiliation: 1
affiliations:
- name: "Independent (Non-affiliated)"
index: 1
date: 16 February 2019
bibliography: paper.bib
---
# Introduction
**`tqdm`** is a progress bar library designed to be fast and extensible. It is
written in Python, though ports in other languages are available. `tqdm` means
**progress** in Arabic (*taqadum* [@tqdm-ar]) and is an abbreviation for
**I love you so much** in Spanish (*te quiero demasiado* [@tqdm-es]).
It is a common programming problem to have iterative operations where progress
monitoring is desirable or advantageous. Including statements within a `for` loop to `print` out the current iteration number is a common strategy. However, there are many improvements which could be made in such a scenario:
- preventing excessive printing, such as only displaying every $n$^th^
iteration;
- displaying iteration rate;
- displaying elapsed and estimated completion times, and
- showing all of the above on one continuously updating line.
Addressing all these issues may well take up more developer time and effort than
the rest of the content of the loop. Any changes to iteration rates or attempts
to re-use the printing logic in a different loop may well result in suboptimal
display rates -- displaying every $n$^th^ iteration may be too (in)frequent --
requiring manual adjustment of $n$ to fix.
`tqdm` addresses all of these problems once and for all, taking advantage of
Pythonic patterns to make it a trivial task to add visually appealing,
customisable progress bars without any significant performance degradation even
in the most demanding of scenarios.
`tqdm` is intended to be used in frontends (giving end users a visual indication
of progress of computations or data transfer). It is also useful for developers
for debugging purposes, both as a profiling tool and also as a way of displaying
logging information of an iterative task (such as error during training of
machine learning algorithms). Due to its ease of use, the library is also an
ideal candidate for inclusion in Python educational courses. For general (not
necessarily Python) purposes, the command-line interface (CLI) mode further
presents a useful tool for CLI users and system administrators monitoring data
flow through pipes.
# Features
Exhaustive documentation may be found on the project's [home
page](https://github.com/tqdm/tqdm/#documentation).
The two basic use cases are within Python code and within a CLI:
## Python Iterable Wrapper
`tqdm`'s primary (and original) use is as a wrapper around Python iterables. A
simple case would be:
```python
from tqdm import tqdm
from time import sleep
for i in tqdm(range(100)):
    sleep(0.1)
100%|#########################################| 100/100 [00:10<00:00,  9.95it/s]
```
Supported features include:
- Display customisation via arguments such as `desc`, `postfix` and `bar_format`
- Automatic limiting of display updates to avoid slowing down due to excessive
iteration rates [@stdout]
- Automatic detection of console width to fill the display
- Automatic use of Unicode to render smooth-filling progress bars on supported
terminals
- Support for custom rendering frontends, including:
* Command-line interface
* *Jupyter* HTML notebooks
* `matplotlib`
- Support for custom hooks/callbacks, including:
* `pandas`
* `keras` [@keras]
## Command-line Interface (CLI)
A CLI is also provided, where `tqdm` may be used a pipe:
```sh
# count lines of text in all *.txt files
$ cat *.txt | wc -l
1075075
# same but with continuously updating progress information
$ cat *.txt | python3 -m tqdm --unit loc --unit_scale | wc -l
1.08Mloc [00:07, 142kloc/s]
# same if `total` is known
$ cat *.txt | python3 -m tqdm --unit loc --unit_scale --total 1075075 | wc -l
100%|#####################################| 1.08/1.08M [00:07<00:00,  142kloc/s]
1075075
```
# Availability
The package supports both Python versions 2 and 3, and is available for download
via `conda` [@conda], `pip` [@pypi], `snap` [@snapcraft], `docker` [@docker],
and *Zenodo* [@zenodo].
Web-based Jupyter interactive demonstrations are also available
[@notebooks;@binder]
Unit tests are run at least weekly on cloud-based continuous integration
[@travis], with code style and security issues checked on
[Codacy](https://app.codacy.com/project/tqdm/tqdm/dashboard) [@code-review].
Coverage is reported on [Coveralls](https://coveralls.io/github/tqdm/tqdm) and
[Codecov](https://codecov.io/gh/tqdm/tqdm), and performance is monitored against
regression [@asv].
# Impact
As of January 2019, `tqdm` has accumulated over 20 million downloads
[@pypi-downloads], and 315 thousand code inclusions [@tqdm-results]. Dependants
of `tqdm` include 23 thousand repositories [@tqdm-dependents] and 7 thousand
libraries [@lib-io]. `tqdm` has a SourceRank of 22 [@sourcerank], placing it in
the world's top 20 Python packages as of early 2019 [@sourcerank-descending].
The source code of `tqdm` is hosted on GitHub, where it has received over 9
thousand stars [@stars;@stars-hist], and was top trending repository during a
period in December 2015 [@trend-hist]. The documentation has received over 500
thousand hits [@hits], with highest rates during weekdays. Historical reading
rates have also trended upwards at the end of holiday periods. This implies
widespread use in commercial and academic settings.
[OpenHub](https://www.openhub.net/p/tqdm) valuates the work according to the
constructive cost model (COCOMO) as being worth approximately $50,000.
The library has also been used in several textbooks [@miller;@boxel;@nandy] and
peer-reviewed scientific publications
[@stein;@cook;@madhikar;@palmer;@knight;@moriwaki;@jackson].
The [`tqdm` wiki](https://github.com/tqdm/tqdm/wiki) also lists other references
in public media.
# Licence
`tqdm`'s source code is OSS, and all versions are archived at the DOI
[10.5281/zenodo.595120](https://doi.org/10.5281/zenodo.595120). The primary
maintainer [Casper da Costa-Luis](https://github.com/casperdcl) releases
contributions under the terms of the MPLv2.0, while all other contributions are
released under the terms of the MIT licence [@licence].
# References

61
examples/parallel_bars.py Normal file
View file

@ -0,0 +1,61 @@
from __future__ import print_function
import sys
from concurrent.futures import ThreadPoolExecutor
from functools import partial
from multiprocessing import Pool, RLock, freeze_support
from random import random
from threading import RLock as TRLock
from time import sleep
from tqdm.auto import tqdm, trange
from tqdm.contrib.concurrent import process_map, thread_map
NUM_SUBITERS = 9
PY2 = sys.version_info[:1] <= (2,)
def progresser(n, auto_position=True, write_safe=False, blocking=True, progress=False):
interval = random() * 0.002 / (NUM_SUBITERS - n + 2) # nosec
total = 5000
text = "#{0}, est. {1:<04.2}s".format(n, interval * total)
for _ in trange(total, desc=text, disable=not progress,
lock_args=None if blocking else (False,),
position=None if auto_position else n):
sleep(interval)
# NB: may not clear instances with higher `position` upon completion
# since this worker may not know about other bars #796
if write_safe:
# we think we know about other bars (currently only py3 threading)
if n == 6:
tqdm.write("n == 6 completed")
return n + 1
if __name__ == '__main__':
freeze_support() # for Windows support
L = list(range(NUM_SUBITERS))[::-1]
print("Simple thread mapping")
thread_map(partial(progresser, write_safe=not PY2), L, max_workers=4)
print("Simple process mapping")
process_map(partial(progresser), L, max_workers=4)
print("Manual nesting")
for i in trange(16, desc="1"):
for _ in trange(16, desc="2 @ %d" % i, leave=i % 2):
sleep(0.01)
print("Multi-processing")
tqdm.set_lock(RLock())
p = Pool(initializer=tqdm.set_lock, initargs=(tqdm.get_lock(),))
p.map(partial(progresser, progress=True), L)
print("Multi-threading")
tqdm.set_lock(TRLock())
pool_args = {}
if not PY2:
pool_args.update(initializer=tqdm.set_lock, initargs=(tqdm.get_lock(),))
with ThreadPoolExecutor(**pool_args) as p:
p.map(partial(progresser, progress=True, write_safe=not PY2, blocking=False), L)

View file

@ -0,0 +1,52 @@
"""Redirecting writing
If using a library that can print messages to the console, editing the library
by replacing `print()` with `tqdm.write()` may not be desirable.
In that case, redirecting `sys.stdout` to `tqdm.write()` is an option.
To redirect `sys.stdout`, create a file-like class that will write
any input string to `tqdm.write()`, and supply the arguments
`file=sys.stdout, dynamic_ncols=True`.
A reusable canonical example is given below:
"""
from __future__ import print_function
import contextlib
import sys
from time import sleep
from tqdm import tqdm
from tqdm.contrib import DummyTqdmFile
@contextlib.contextmanager
def std_out_err_redirect_tqdm():
orig_out_err = sys.stdout, sys.stderr
try:
# sys.stdout = sys.stderr = DummyTqdmFile(orig_out_err[0])
sys.stdout, sys.stderr = map(DummyTqdmFile, orig_out_err)
yield orig_out_err[0]
# Relay exceptions
except Exception as exc:
raise exc
# Always restore sys.stdout/err if necessary
finally:
sys.stdout, sys.stderr = orig_out_err
def some_fun(i):
print("Fee, fi, fo,".split()[i])
# Redirect stdout to tqdm.write()
with std_out_err_redirect_tqdm() as orig_stdout:
# tqdm needs the original stdout
# and dynamic_ncols=True to autodetect console width
for i in tqdm(range(3), file=orig_stdout, dynamic_ncols=True):
# order of the following two lines should not matter
some_fun(i)
sleep(.5)
# After the `with`, printing is restored
print("Done!")

View file

@ -0,0 +1,65 @@
"""
# Simple tqdm examples and profiling
# Benchmark
for i in _range(int(1e8)):
pass
# Basic demo
import tqdm
for i in tqdm.trange(int(1e8)):
pass
# Some decorations
import tqdm
for i in tqdm.trange(int(1e8), miniters=int(1e6), ascii=True,
desc="cool", dynamic_ncols=True):
pass
# Nested bars
from tqdm import trange
for i in trange(10):
for j in trange(int(1e7), leave=False, unit_scale=True):
pass
# Experimental GUI demo
import tqdm
for i in tqdm.tgrange(int(1e8)):
pass
# Comparison to https://code.google.com/p/python-progressbar/
try:
from progressbar.progressbar import ProgressBar
except ImportError:
pass
else:
for i in ProgressBar()(_range(int(1e8))):
pass
# Dynamic miniters benchmark
from tqdm import trange
for i in trange(int(1e8), miniters=None, mininterval=0.1, smoothing=0):
pass
# Fixed miniters benchmark
from tqdm import trange
for i in trange(int(1e8), miniters=4500000, mininterval=0.1, smoothing=0):
pass
"""
import re
from time import sleep
from timeit import timeit
# Simple demo
from tqdm import trange
for _ in trange(16, leave=True):
sleep(0.1)
# Profiling/overhead tests
stmts = filter(None, re.split(r'\n\s*#.*?\n', __doc__))
for s in stmts:
print(s.replace('import tqdm\n', ''))
print(timeit(stmt='try:\n\t_range = xrange'
'\nexcept:\n\t_range = range\n' + s, number=1), 'seconds')

49
examples/tqdm_requests.py Normal file
View file

@ -0,0 +1,49 @@
"""An example of wrapping manual tqdm updates for `requests.get`.
See also: tqdm_wget.py.
Usage:
tqdm_requests.py [options]
Options:
-h, --help
Print this help message and exit
-u URL, --url URL : string, optional
The url to fetch.
[default: https://caspersci.uk.to/matryoshka.zip]
-o FILE, --output FILE : string, optional
The local file path in which to save the url [default: /dev/null].
"""
from os import devnull
import requests
from docopt import docopt
from tqdm.auto import tqdm
opts = docopt(__doc__)
eg_link = opts['--url']
eg_file = eg_link.replace('/', ' ').split()[-1]
eg_out = opts['--output'].replace("/dev/null", devnull)
response = requests.get(eg_link, stream=True)
with open(eg_out, "wb") as fout:
with tqdm(
# all optional kwargs
unit='B', unit_scale=True, unit_divisor=1024, miniters=1,
desc=eg_file, total=int(response.headers.get('content-length', 0))
) as pbar:
for chunk in response.iter_content(chunk_size=4096):
fout.write(chunk)
pbar.update(len(chunk))
# Even simpler progress by wrapping the output file's `write()`
response = requests.get(eg_link, stream=True)
with tqdm.wrapattr(
open(eg_out, "wb"), "write",
unit='B', unit_scale=True, unit_divisor=1024, miniters=1,
desc=eg_file, total=int(response.headers.get('content-length', 0))
) as fout:
for chunk in response.iter_content(chunk_size=4096):
fout.write(chunk)

113
examples/tqdm_wget.py Normal file
View file

@ -0,0 +1,113 @@
"""An example of wrapping manual tqdm updates for `urllib` reporthook.
See also: tqdm_requests.py.
# `urllib.urlretrieve` documentation
> If present, the hook function will be called once
> on establishment of the network connection and once after each block read
> thereafter. The hook will be passed three arguments; a count of blocks
> transferred so far, a block size in bytes, and the total size of the file.
Usage:
tqdm_wget.py [options]
Options:
-h, --help
Print this help message and exit
-u URL, --url URL : string, optional
The url to fetch.
[default: https://caspersci.uk.to/matryoshka.zip]
-o FILE, --output FILE : string, optional
The local file path in which to save the url [default: /dev/null].
"""
try:
from urllib import request as urllib
except ImportError: # py2
import urllib
from os import devnull
from docopt import docopt
from tqdm.auto import tqdm
def my_hook(t):
"""Wraps tqdm instance.
Don't forget to close() or __exit__()
the tqdm instance once you're done with it (easiest using `with` syntax).
Example
-------
>>> with tqdm(...) as t:
... reporthook = my_hook(t)
... urllib.urlretrieve(..., reporthook=reporthook)
"""
last_b = [0]
def update_to(b=1, bsize=1, tsize=None):
"""
b : int, optional
Number of blocks transferred so far [default: 1].
bsize : int, optional
Size of each block (in tqdm units) [default: 1].
tsize : int, optional
Total size (in tqdm units). If [default: None] or -1,
remains unchanged.
"""
if tsize not in (None, -1):
t.total = tsize
displayed = t.update((b - last_b[0]) * bsize)
last_b[0] = b
return displayed
return update_to
class TqdmUpTo(tqdm):
"""Alternative Class-based version of the above.
Provides `update_to(n)` which uses `tqdm.update(delta_n)`.
Inspired by [twine#242](https://github.com/pypa/twine/pull/242),
[here](https://github.com/pypa/twine/commit/42e55e06).
"""
def update_to(self, b=1, bsize=1, tsize=None):
"""
b : int, optional
Number of blocks transferred so far [default: 1].
bsize : int, optional
Size of each block (in tqdm units) [default: 1].
tsize : int, optional
Total size (in tqdm units). If [default: None] remains unchanged.
"""
if tsize is not None:
self.total = tsize
return self.update(b * bsize - self.n) # also sets self.n = b * bsize
opts = docopt(__doc__)
eg_link = opts['--url']
eg_file = eg_link.replace('/', ' ').split()[-1]
eg_out = opts['--output'].replace("/dev/null", devnull)
# with tqdm(unit='B', unit_scale=True, unit_divisor=1024, miniters=1,
# desc=eg_file) as t: # all optional kwargs
# urllib.urlretrieve(eg_link, filename=eg_out,
# reporthook=my_hook(t), data=None)
with TqdmUpTo(unit='B', unit_scale=True, unit_divisor=1024, miniters=1,
desc=eg_file) as t: # all optional kwargs
urllib.urlretrieve( # nosec
eg_link, filename=eg_out, reporthook=t.update_to, data=None)
t.total = t.n
# Even simpler progress by wrapping the output file's `write()`
response = urllib.urlopen(eg_link) # nosec
with tqdm.wrapattr(open(eg_out, "wb"), "write",
miniters=1, desc=eg_file,
total=getattr(response, 'length', None)) as fout:
for chunk in response:
fout.write(chunk)

View file

@ -0,0 +1,15 @@
import numpy as np
from tqdm.contrib import tenumerate, tmap, tzip
for _ in tenumerate(range(int(1e6)), desc="builtin enumerate"):
pass
for _ in tenumerate(np.random.random((999, 999)), desc="numpy.ndenumerate"):
pass
for _ in tzip(np.arange(1e6), np.arange(1e6) + 1, desc="builtin zip"):
pass
mapped = tmap(lambda x: x + 1, np.arange(1e6), desc="builtin map")
assert (np.arange(1e6) + 1 == list(mapped)).all()