Skip to content

Commit c71be45

Browse files
committed
Merge branch 'master' into deprecate-struct-init/78724
2 parents ea0f8e2 + d1282ef commit c71be45

File tree

6 files changed

+189
-61
lines changed

6 files changed

+189
-61
lines changed

Doc/library/itertools.rst

Lines changed: 43 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ Iterator Arguments Results
4747
Iterator Arguments Results Example
4848
============================ ============================ ================================================= =============================================================
4949
:func:`accumulate` p [,func] p0, p0+p1, p0+p1+p2, ... ``accumulate([1,2,3,4,5]) → 1 3 6 10 15``
50-
:func:`batched` p, n (p0, p1, ..., p_n-1), ... ``batched('ABCDEFG', n=2) → AB CD EF G``
50+
:func:`batched` p, n (p0, p1, ..., p_n-1), ... ``batched('ABCDEFG', n=3) → ABC DEF G``
5151
:func:`chain` p, q, ... p0, p1, ... plast, q0, q1, ... ``chain('ABC', 'DEF') → A B C D E F``
5252
:func:`chain.from_iterable` iterable p0, p1, ... plast, q0, q1, ... ``chain.from_iterable(['ABC', 'DEF']) → A B C D E F``
5353
:func:`compress` data, selectors (d[0] if s[0]), (d[1] if s[1]), ... ``compress('ABCDEF', [1,0,1,0,1,1]) → A C E F``
@@ -181,7 +181,7 @@ loops that truncate the stream.
181181
Roughly equivalent to::
182182

183183
def batched(iterable, n, *, strict=False):
184-
# batched('ABCDEFG', 2) → AB CD EF G
184+
# batched('ABCDEFG', 3) → ABC DEF G
185185
if n < 1:
186186
raise ValueError('n must be at least one')
187187
iterator = iter(iterable)
@@ -842,7 +842,7 @@ and :term:`generators <generator>` which incur interpreter overhead.
842842
from contextlib import suppress
843843
from functools import reduce
844844
from math import comb, prod, sumprod, isqrt
845-
from operator import itemgetter, getitem, mul, neg
845+
from operator import is_not, itemgetter, getitem, mul, neg
846846

847847
def take(n, iterable):
848848
"Return first n items of the iterable as a list."
@@ -978,6 +978,16 @@ and :term:`generators <generator>` which incur interpreter overhead.
978978
slices = starmap(slice, combinations(range(len(seq) + 1), 2))
979979
return map(getitem, repeat(seq), slices)
980980

981+
def derangements(iterable, r=None):
982+
"Produce r length permutations without fixed points."
983+
# derangements('ABCD') → BADC BCDA BDAC CADB CDAB CDBA DABC DCAB DCBA
984+
# Algorithm credited to Stefan Pochmann
985+
seq = tuple(iterable)
986+
pos = tuple(range(len(seq)))
987+
have_moved = map(map, repeat(is_not), repeat(pos), permutations(pos, r=r))
988+
valid_derangements = map(all, have_moved)
989+
return compress(permutations(seq, r=r), valid_derangements)
990+
981991
def iter_index(iterable, value, start=0, stop=None):
982992
"Return indices where a value occurs in a sequence or iterable."
983993
# iter_index('AABCADEAF', 'A') → 0 1 4 7
@@ -1663,6 +1673,36 @@ The following recipes have a more mathematical flavor:
16631673
['A', 'AB', 'ABC', 'ABCD', 'B', 'BC', 'BCD', 'C', 'CD', 'D']
16641674

16651675

1676+
>>> ' '.join(map(''.join, derangements('ABCD')))
1677+
'BADC BCDA BDAC CADB CDAB CDBA DABC DCAB DCBA'
1678+
>>> ' '.join(map(''.join, derangements('ABCD', 3)))
1679+
'BAD BCA BCD BDA CAB CAD CDA CDB DAB DCA DCB'
1680+
>>> ' '.join(map(''.join, derangements('ABCD', 2)))
1681+
'BA BC BD CA CD DA DC'
1682+
>>> ' '.join(map(''.join, derangements('ABCD', 1)))
1683+
'B C D'
1684+
>>> ' '.join(map(''.join, derangements('ABCD', 0)))
1685+
''
1686+
>>> # Compare number of derangements to https://oeis.org/A000166
1687+
>>> [len(list(derangements(range(n)))) for n in range(10)]
1688+
[1, 0, 1, 2, 9, 44, 265, 1854, 14833, 133496]
1689+
>>> # Verify that identical objects are treated as unique by position
1690+
>>> identical = 'X'
1691+
>>> distinct = 'x'
1692+
>>> seq1 = ('A', identical, 'B', identical)
1693+
>>> result1 = ' '.join(map(''.join, derangements(seq1)))
1694+
>>> result1
1695+
'XAXB XBXA XXAB BAXX BXAX BXXA XAXB XBAX XBXA'
1696+
>>> seq2 = ('A', identical, 'B', distinct)
1697+
>>> result2 = ' '.join(map(''.join, derangements(seq2)))
1698+
>>> result2
1699+
'XAxB XBxA XxAB BAxX BxAX BxXA xAXB xBAX xBXA'
1700+
>>> result1 == result2
1701+
False
1702+
>>> result1.casefold() == result2.casefold()
1703+
True
1704+
1705+
16661706
>>> list(powerset([1,2,3]))
16671707
[(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
16681708
>>> all(len(list(powerset(range(n)))) == 2**n for n in range(18))

Doc/whatsnew/3.13.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2340,7 +2340,7 @@ Limited C API Changes
23402340
* :c:func:`PySys_AuditTuple`
23412341
* :c:func:`PyType_GetModuleByDef`
23422342

2343-
(Contributed by Victor Stinner in :gh:`85283`, :gh:`85283`, and :gh:`116936`.)
2343+
(Contributed by Victor Stinner in :gh:`85283` and :gh:`116936`.)
23442344

23452345
* Python built with :option:`--with-trace-refs` (tracing references)
23462346
now supports the :ref:`Limited API <limited-c-api>`.

Lib/test/test_pickle.py

Lines changed: 42 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -419,6 +419,46 @@ def test_issue18339(self):
419419
unpickler.memo = {-1: None}
420420
unpickler.memo = {1: None}
421421

422+
def test_concurrent_pickler_dump(self):
423+
f = io.BytesIO()
424+
pickler = self.pickler_class(f)
425+
class X:
426+
def __reduce__(slf):
427+
self.assertRaises(RuntimeError, pickler.dump, 42)
428+
return list, ()
429+
pickler.dump(X()) # should not crash
430+
self.assertEqual(pickle.loads(f.getvalue()), [])
431+
432+
def test_concurrent_pickler_dump_and_init(self):
433+
f = io.BytesIO()
434+
pickler = self.pickler_class(f)
435+
class X:
436+
def __reduce__(slf):
437+
self.assertRaises(RuntimeError, pickler.__init__, f)
438+
return list, ()
439+
pickler.dump([X()]) # should not fail
440+
self.assertEqual(pickle.loads(f.getvalue()), [[]])
441+
442+
def test_concurrent_unpickler_load(self):
443+
global reducer
444+
def reducer():
445+
self.assertRaises(RuntimeError, unpickler.load)
446+
return 42
447+
f = io.BytesIO(b'(c%b\nreducer\n(tRl.' % (__name__.encode(),))
448+
unpickler = self.unpickler_class(f)
449+
unpickled = unpickler.load() # should not fail
450+
self.assertEqual(unpickled, [42])
451+
452+
def test_concurrent_unpickler_load_and_init(self):
453+
global reducer
454+
def reducer():
455+
self.assertRaises(RuntimeError, unpickler.__init__, f)
456+
return 42
457+
f = io.BytesIO(b'(c%b\nreducer\n(tRl.' % (__name__.encode(),))
458+
unpickler = self.unpickler_class(f)
459+
unpickled = unpickler.load() # should not crash
460+
self.assertEqual(unpickled, [42])
461+
422462
class CDispatchTableTests(AbstractDispatchTableTests, unittest.TestCase):
423463
pickler_class = pickle.Pickler
424464
def get_dispatch_table(self):
@@ -467,7 +507,7 @@ class SizeofTests(unittest.TestCase):
467507
check_sizeof = support.check_sizeof
468508

469509
def test_pickler(self):
470-
basesize = support.calcobjsize('7P2n3i2n3i2P')
510+
basesize = support.calcobjsize('7P2n3i2n4i2P')
471511
p = _pickle.Pickler(io.BytesIO())
472512
self.assertEqual(object.__sizeof__(p), basesize)
473513
MT_size = struct.calcsize('3nP0n')
@@ -484,7 +524,7 @@ def test_pickler(self):
484524
0) # Write buffer is cleared after every dump().
485525

486526
def test_unpickler(self):
487-
basesize = support.calcobjsize('2P2n3P 2P2n2i5P 2P3n8P2n2i')
527+
basesize = support.calcobjsize('2P2n3P 2P2n2i5P 2P3n8P2n3i')
488528
unpickler = _pickle.Unpickler
489529
P = struct.calcsize('P') # Size of memo table entry.
490530
n = struct.calcsize('n') # Size of mark table entry.
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
Forbid reentrant calls of the :class:`pickle.Pickler` and
2+
:class:`pickle.Unpickler` methods for the C implementation. Previously, this
3+
could cause crash or data corruption, now concurrent calls of methods of the
4+
same object raise :exc:`RuntimeError`.

0 commit comments

Comments
 (0)