Skip to content

Commit

Permalink
Merge tag 'v3.15.0' into cpython
Browse files Browse the repository at this point in the history
  • Loading branch information
jaraco committed Feb 25, 2023
2 parents 056c919 + 63bdf88 commit 757a4e1
Show file tree
Hide file tree
Showing 7 changed files with 87 additions and 50 deletions.
30 changes: 0 additions & 30 deletions Lib/test/test_zipfile/_context.py

This file was deleted.

8 changes: 0 additions & 8 deletions Lib/test/test_zipfile/_func_timeout_compat.py

This file was deleted.

38 changes: 38 additions & 0 deletions Lib/test/test_zipfile/_itertools.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import itertools
from collections import deque
from itertools import islice


# from jaraco.itertools 6.3.0
Expand Down Expand Up @@ -39,3 +41,39 @@ def always_iterable(obj, base_type=(str, bytes)):
return iter(obj)
except TypeError:
return iter((obj,))


# from more_itertools v9.0.0
def consume(iterator, n=None):
"""Advance *iterable* by *n* steps. If *n* is ``None``, consume it
entirely.
Efficiently exhausts an iterator without returning values. Defaults to
consuming the whole iterator, but an optional second argument may be
provided to limit consumption.
>>> i = (x for x in range(10))
>>> next(i)
0
>>> consume(i, 3)
>>> next(i)
4
>>> consume(i)
>>> next(i)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
StopIteration
If the iterator has fewer items remaining than the provided limit, the
whole iterator will be consumed.
>>> i = (x for x in range(3))
>>> consume(i, 5)
>>> next(i)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
StopIteration
"""
# Use functions that consume iterators at C speed.
if n is None:
# feed the entire iterator into a zero-length deque
deque(iterator, maxlen=0)
else:
# advance to the empty slice starting at position n
next(islice(iterator, n, n), None)
9 changes: 9 additions & 0 deletions Lib/test/test_zipfile/_support.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
import importlib
import unittest


def import_or_skip(name):
try:
return importlib.import_module(name)
except ImportError: # pragma: no cover
raise unittest.SkipTest(f'Unable to import {name}')
24 changes: 24 additions & 0 deletions Lib/test/test_zipfile/test_complexity.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import unittest
import string
import zipfile

from ._functools import compose
from ._itertools import consume

from ._support import import_or_skip


big_o = import_or_skip('big_o')


class TestComplexity(unittest.TestCase):
def test_implied_dirs_performance(self):
best, others = big_o.big_o(
compose(consume, zipfile.CompleteDirs._implied_dirs),
lambda size: [
'/'.join(string.ascii_lowercase + str(n)) for n in range(size)
],
max_n=1000,
min_n=1,
)
assert best <= big_o.complexities.Linear
21 changes: 10 additions & 11 deletions Lib/test/test_zipfile/test_path.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import contextlib
import pathlib
import pickle
import string
import sys
import unittest
import zipfile
Expand All @@ -12,7 +11,6 @@
from ._itertools import Counter

from ._test_params import parameterize, Invoked
from ._func_timeout_compat import set_timeout

from test.support.os_helper import temp_dir

Expand All @@ -22,9 +20,6 @@ class itertools:
Counter = Counter


consume = tuple


def add_dirs(zf):
"""
Given a writable zip file zf, inject directory entries for
Expand Down Expand Up @@ -330,11 +325,6 @@ def test_joinpath_constant_time(self):
# Check the file iterated all items
assert entries.count == self.HUGE_ZIPFILE_NUM_ENTRIES

@set_timeout(3)
def test_implied_dirs_performance(self):
data = ['/'.join(string.ascii_lowercase + str(n)) for n in range(10000)]
zipfile.CompleteDirs._implied_dirs(data)

@pass_alpharep
def test_read_does_not_close(self, alpharep):
alpharep = self.zipfile_ondisk(alpharep)
Expand Down Expand Up @@ -512,7 +502,7 @@ def test_pickle(self, alpharep, path_type, subpath):
saved_1 = pickle.dumps(zipfile.Path(zipfile_ondisk, at=subpath))
restored_1 = pickle.loads(saved_1)
first, *rest = restored_1.iterdir()
assert first.read_text().startswith('content of ')
assert first.read_text(encoding='utf-8').startswith('content of ')

@pass_alpharep
def test_extract_orig_with_implied_dirs(self, alpharep):
Expand All @@ -524,3 +514,12 @@ def test_extract_orig_with_implied_dirs(self, alpharep):
# wrap the zipfile for its side effect
zipfile.Path(zf)
zf.extractall(source_path.parent)

@pass_alpharep
def test_getinfo_missing(self, alpharep):
"""
Validate behavior of getinfo on original zipfile after wrapping.
"""
zipfile.Path(alpharep)
with self.assertRaises(KeyError):
alpharep.getinfo('does-not-exist')
7 changes: 6 additions & 1 deletion Lib/zipfile/_path.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,11 @@ class CompleteDirs(InitializedState, zipfile.ZipFile):
"""
A ZipFile subclass that ensures that implied directories
are always included in the namelist.
>>> list(CompleteDirs._implied_dirs(['foo/bar.txt', 'foo/bar/baz.txt']))
['foo/', 'foo/bar/']
>>> list(CompleteDirs._implied_dirs(['foo/bar.txt', 'foo/bar/baz.txt', 'foo/bar/']))
['foo/']
"""

@staticmethod
Expand Down Expand Up @@ -215,7 +220,7 @@ class Path:
Read text:
>>> c.read_text()
>>> c.read_text(encoding='utf-8')
'content of c'
existence:
Expand Down

0 comments on commit 757a4e1

Please sign in to comment.