From 009c8c052f5eb9f869c09029724ef194d8c161ca Mon Sep 17 00:00:00 2001 From: Pieter Eendebak Date: Wed, 4 Feb 2026 19:38:45 +0100 Subject: [PATCH 1/2] gh-123471: Make concurrent iteration over `itertools.permutations` and `itertools.combinations_with_replacement` thread-safe (gh-144402) --- .../test_free_threading/test_itertools.py | 107 ++++++------------ ...-02-03-08-50-58.gh-issue-123471.yF1Gym.rst | 1 + Modules/itertoolsmodule.c | 24 +++- 3 files changed, 59 insertions(+), 73 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2026-02-03-08-50-58.gh-issue-123471.yF1Gym.rst diff --git a/Lib/test/test_free_threading/test_itertools.py b/Lib/test/test_free_threading/test_itertools.py index 9d366041917bb3..bb6047e8669475 100644 --- a/Lib/test/test_free_threading/test_itertools.py +++ b/Lib/test/test_free_threading/test_itertools.py @@ -1,94 +1,59 @@ import unittest -from threading import Thread, Barrier -from itertools import batched, chain, cycle +from itertools import batched, chain, combinations_with_replacement, cycle, permutations from test.support import threading_helper threading_helper.requires_working_threading(module=True) -class ItertoolsThreading(unittest.TestCase): - - @threading_helper.reap_threads - def test_batched(self): - number_of_threads = 10 - number_of_iterations = 20 - barrier = Barrier(number_of_threads) - def work(it): - barrier.wait() - while True: - try: - next(it) - except StopIteration: - break - data = tuple(range(1000)) - for it in range(number_of_iterations): - batch_iterator = batched(data, 2) - worker_threads = [] - for ii in range(number_of_threads): - worker_threads.append( - Thread(target=work, args=[batch_iterator])) +def work_iterator(it): + while True: + try: + next(it) + except StopIteration: + break - with threading_helper.start_threads(worker_threads): - pass - barrier.reset() +class ItertoolsThreading(unittest.TestCase): @threading_helper.reap_threads - def test_cycle(self): - number_of_threads = 6 + def test_batched(self): number_of_iterations = 10 - number_of_cycles = 400 + for _ in range(number_of_iterations): + it = batched(tuple(range(1000)), 2) + threading_helper.run_concurrently(work_iterator, nthreads=10, args=[it]) - barrier = Barrier(number_of_threads) + @threading_helper.reap_threads + def test_cycle(self): def work(it): - barrier.wait() - for _ in range(number_of_cycles): - try: - next(it) - except StopIteration: - pass + for _ in range(400): + next(it) - data = (1, 2, 3, 4) - for it in range(number_of_iterations): - cycle_iterator = cycle(data) - worker_threads = [] - for ii in range(number_of_threads): - worker_threads.append( - Thread(target=work, args=[cycle_iterator])) - - with threading_helper.start_threads(worker_threads): - pass - - barrier.reset() + number_of_iterations = 6 + for _ in range(number_of_iterations): + it = cycle((1, 2, 3, 4)) + threading_helper.run_concurrently(work, nthreads=6, args=[it]) @threading_helper.reap_threads def test_chain(self): - number_of_threads = 6 - number_of_iterations = 20 - - barrier = Barrier(number_of_threads) - def work(it): - barrier.wait() - while True: - try: - next(it) - except StopIteration: - break - - data = [(1, )] * 200 - for it in range(number_of_iterations): - chain_iterator = chain(*data) - worker_threads = [] - for ii in range(number_of_threads): - worker_threads.append( - Thread(target=work, args=[chain_iterator])) - - with threading_helper.start_threads(worker_threads): - pass + number_of_iterations = 10 + for _ in range(number_of_iterations): + it = chain(*[(1,)] * 200) + threading_helper.run_concurrently(work_iterator, nthreads=6, args=[it]) - barrier.reset() + @threading_helper.reap_threads + def test_combinations_with_replacement(self): + number_of_iterations = 6 + for _ in range(number_of_iterations): + it = combinations_with_replacement(tuple(range(2)), 2) + threading_helper.run_concurrently(work_iterator, nthreads=6, args=[it]) + @threading_helper.reap_threads + def test_permutations(self): + number_of_iterations = 6 + for _ in range(number_of_iterations): + it = permutations(tuple(range(4)), 2) + threading_helper.run_concurrently(work_iterator, nthreads=6, args=[it]) if __name__ == "__main__": diff --git a/Misc/NEWS.d/next/Library/2026-02-03-08-50-58.gh-issue-123471.yF1Gym.rst b/Misc/NEWS.d/next/Library/2026-02-03-08-50-58.gh-issue-123471.yF1Gym.rst new file mode 100644 index 00000000000000..85e9a03426e1fc --- /dev/null +++ b/Misc/NEWS.d/next/Library/2026-02-03-08-50-58.gh-issue-123471.yF1Gym.rst @@ -0,0 +1 @@ +Make concurrent iteration over :class:`itertools.combinations_with_replacement` and :class:`itertools.permutations` safe under free-threading. diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c index 8685eff8be65c3..7e73f76bc20b58 100644 --- a/Modules/itertoolsmodule.c +++ b/Modules/itertoolsmodule.c @@ -2587,7 +2587,7 @@ cwr_traverse(PyObject *op, visitproc visit, void *arg) } static PyObject * -cwr_next(PyObject *op) +cwr_next_lock_held(PyObject *op) { cwrobject *co = cwrobject_CAST(op); PyObject *elem; @@ -2666,6 +2666,16 @@ cwr_next(PyObject *op) return NULL; } +static PyObject * +cwr_next(PyObject *op) +{ + PyObject *result; + Py_BEGIN_CRITICAL_SECTION(op); + result = cwr_next_lock_held(op); + Py_END_CRITICAL_SECTION() + return result; +} + static PyMethodDef cwr_methods[] = { {"__sizeof__", cwr_sizeof, METH_NOARGS, sizeof_doc}, {NULL, NULL} /* sentinel */ @@ -2846,7 +2856,7 @@ permutations_traverse(PyObject *op, visitproc visit, void *arg) } static PyObject * -permutations_next(PyObject *op) +permutations_next_lock_held(PyObject *op) { permutationsobject *po = permutationsobject_CAST(op); PyObject *elem; @@ -2936,6 +2946,16 @@ permutations_next(PyObject *op) return NULL; } +static PyObject * +permutations_next(PyObject *op) +{ + PyObject *result; + Py_BEGIN_CRITICAL_SECTION(op); + result = permutations_next_lock_held(op); + Py_END_CRITICAL_SECTION() + return result; +} + static PyMethodDef permuations_methods[] = { {"__sizeof__", permutations_sizeof, METH_NOARGS, sizeof_doc}, {NULL, NULL} /* sentinel */ From b6d8aa436b0108fcc90cb13f9be864cfd4ea2ca7 Mon Sep 17 00:00:00 2001 From: Raymond Hettinger Date: Wed, 4 Feb 2026 14:21:20 -0600 Subject: [PATCH 2/2] Itertools recipes: Replace the tabulate() example with running_mean() (gh-144483) --- Doc/library/itertools.rst | 27 +++++++++++++++++++++------ 1 file changed, 21 insertions(+), 6 deletions(-) diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst index 08dacb505f7748..4f73a74bdd17e2 100644 --- a/Doc/library/itertools.rst +++ b/Doc/library/itertools.rst @@ -845,7 +845,8 @@ and :term:`generators ` which incur interpreter overhead. from contextlib import suppress from functools import reduce from math import comb, isqrt, prod, sumprod - from operator import getitem, is_not, itemgetter, mul, neg + from operator import getitem, is_not, itemgetter, mul, neg, truediv + # ==== Basic one liners ==== @@ -858,9 +859,10 @@ and :term:`generators ` which incur interpreter overhead. # prepend(1, [2, 3, 4]) → 1 2 3 4 return chain([value], iterable) - def tabulate(function, start=0): - "Return function(0), function(1), ..." - return map(function, count(start)) + def running_mean(iterable): + "Yield the average of all values seen so far." + # running_mean([8.5, 9.5, 7.5, 6.5]) -> 8.5 9.0 8.5 8.0 + return map(truediv, accumulate(iterable), count(1)) def repeatfunc(function, times=None, *args): "Repeat calls to a function with specified arguments." @@ -913,6 +915,7 @@ and :term:`generators ` which incur interpreter overhead. # all_equal('4٤௪౪໔', key=int) → True return len(take(2, groupby(iterable, key))) <= 1 + # ==== Data pipelines ==== def unique_justseen(iterable, key=None): @@ -1021,6 +1024,7 @@ and :term:`generators ` which incur interpreter overhead. while True: yield function() + # ==== Mathematical operations ==== def multinomial(*counts): @@ -1040,6 +1044,7 @@ and :term:`generators ` which incur interpreter overhead. # sum_of_squares([10, 20, 30]) → 1400 return sumprod(*tee(iterable)) + # ==== Matrix operations ==== def reshape(matrix, columns): @@ -1058,6 +1063,7 @@ and :term:`generators ` which incur interpreter overhead. n = len(m2[0]) return batched(starmap(sumprod, product(m1, transpose(m2))), n) + # ==== Polynomial arithmetic ==== def convolve(signal, kernel): @@ -1114,6 +1120,7 @@ and :term:`generators ` which incur interpreter overhead. powers = reversed(range(1, n)) return list(map(mul, coefficients, powers)) + # ==== Number theory ==== def sieve(n): @@ -1230,8 +1237,8 @@ and :term:`generators ` which incur interpreter overhead. [(0, 'a'), (1, 'b'), (2, 'c')] - >>> list(islice(tabulate(lambda x: 2*x), 4)) - [0, 2, 4, 6] + >>> list(running_mean([8.5, 9.5, 7.5, 6.5])) + [8.5, 9.0, 8.5, 8.0] >>> for _ in loops(5): @@ -1798,6 +1805,10 @@ and :term:`generators ` which incur interpreter overhead. # Old recipes and their tests which are guaranteed to continue to work. + def tabulate(function, start=0): + "Return function(0), function(1), ..." + return map(function, count(start)) + def old_sumprod_recipe(vec1, vec2): "Compute a sum of products." return sum(starmap(operator.mul, zip(vec1, vec2, strict=True))) @@ -1877,6 +1888,10 @@ and :term:`generators ` which incur interpreter overhead. .. doctest:: :hide: + >>> list(islice(tabulate(lambda x: 2*x), 4)) + [0, 2, 4, 6] + + >>> dotproduct([1,2,3], [4,5,6]) 32