Skip to content

Commit

Permalink
STY: Consolidate & add pre-commit checks
Browse files Browse the repository at this point in the history
  • Loading branch information
mroeschke committed Jul 7, 2023
1 parent 4576909 commit 0e2821a
Show file tree
Hide file tree
Showing 3 changed files with 44 additions and 68 deletions.
36 changes: 15 additions & 21 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -46,14 +46,22 @@ repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:
- id: check-ast
- id: check-case-conflict
- id: check-toml
- id: check-xml
- id: check-yaml
exclude: ^ci/meta.yaml$
- id: debug-statements
- id: end-of-file-fixer
exclude: \.txt$
stages: [commit, merge-commit, push, prepare-commit-msg, commit-msg,
post-checkout, post-commit, post-merge, post-rewrite]
- id: mixed-line-ending
args: [--fix=auto]
exclude: ^pandas/tests/io/parser/data/utf16_ex.txt$
- id: fix-byte-order-marker
- id: fix-encoding-pragma
args: [--remove]
- id: trailing-whitespace
stages: [commit, merge-commit, push, prepare-commit-msg, commit-msg,
post-checkout, post-commit, post-merge, post-rewrite]
- repo: https://github.com/cpplint/cpplint
rev: 1.6.1
hooks:
Expand Down Expand Up @@ -98,6 +106,8 @@ repos:
- repo: https://github.com/pre-commit/pygrep-hooks
rev: v1.10.0
hooks:
- id: python-check-blanket-noqa
- id: python-check-blanket-type-ignore
- id: rst-backticks
- id: rst-directive-colons
types: [text] # overwrite types: [rst]
Expand Down Expand Up @@ -160,25 +170,17 @@ repos:
language: pygrep
types: [python]
files: ^pandas/tests/
exclude: |
(?x)^
pandas/tests/io/pytables/test_store\.py$
- id: unwanted-patterns
name: Unwanted patterns
language: pygrep
entry: |
(?x)
# outdated annotation syntax, missing error codes
# outdated annotation syntax
\#\ type:\ (?!ignore)
|\#\ type:\s?ignore(?!\[)
# foo._class__ instead of type(foo)
|\.__class__
# np.bool/np.object instead of np.bool_/np.object_
|np\.bool[^_8`]
|np\.object[^_8`]
# imports from collections.abc instead of `from collections import abc`
|from\ collections\.abc\ import
Expand All @@ -200,16 +202,8 @@ repos:
# builtin filter function
|(?<!def)[\(\s]filter\(
# exec
|[^a-zA-Z0-9_]exec\(
types_or: [python, cython, rst]
exclude: ^doc/source/development/code_style\.rst # contains examples of patterns to avoid
- id: cython-casting
name: Check Cython casting is `<type>obj`, not `<type> obj`
language: pygrep
entry: '[a-zA-Z0-9*]> '
files: (\.pyx|\.pxi.in)$
- id: incorrect-backticks
name: Check for backticks incorrectly rendering because of missing spaces
language: pygrep
Expand Down
74 changes: 27 additions & 47 deletions pandas/tests/io/pytables/test_store.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import contextlib
import datetime as dt
import hashlib
import os
import tempfile
import time
from warnings import (
Expand All @@ -27,7 +27,6 @@
from pandas.tests.io.pytables.common import (
_maybe_remove,
ensure_clean_store,
safe_close,
)

from pandas.io.pytables import (
Expand Down Expand Up @@ -832,53 +831,34 @@ def reader(path):
tm.assert_frame_equal(df, result)


def test_copy():
with catch_warnings(record=True):

def do_copy(f, new_f=None, keys=None, propindexes=True, **kwargs):
if new_f is None:
fd, new_f = tempfile.mkstemp()

try:
store = HDFStore(f, "r")
tstore = store.copy(new_f, keys=keys, propindexes=propindexes, **kwargs)
@pytest.mark.parametrize("propindexes", [True, False])
def test_copy(propindexes):
df = tm.makeDataFrame()

# check keys
if keys is None:
with tm.ensure_clean() as path:
with HDFStore(path) as st:
st.append("df", df, data_columns=["A"])
with tempfile.NamedTemporaryFile() as new_f:
with HDFStore(path) as store:
with contextlib.closing(
store.copy(new_f.name, keys=None, propindexes=propindexes)
) as tstore:
# check keys
keys = store.keys()
assert set(keys) == set(tstore.keys())

# check indices & nrows
for k in tstore.keys():
if tstore.get_storer(k).is_table:
new_t = tstore.get_storer(k)
orig_t = store.get_storer(k)

assert orig_t.nrows == new_t.nrows

# check propindixes
if propindexes:
for a in orig_t.axes:
if a.is_indexed:
assert new_t[a.name].is_indexed

finally:
safe_close(store)
safe_close(tstore)
try:
os.close(fd)
except (OSError, ValueError):
pass
os.remove(new_f)

# new table
df = tm.makeDataFrame()

with tm.ensure_clean() as path:
with HDFStore(path) as st:
st.append("df", df, data_columns=["A"])
do_copy(f=path)
do_copy(f=path, propindexes=False)
assert set(keys) == set(tstore.keys())
# check indices & nrows
for k in tstore.keys():
if tstore.get_storer(k).is_table:
new_t = tstore.get_storer(k)
orig_t = store.get_storer(k)

assert orig_t.nrows == new_t.nrows

# check propindixes
if propindexes:
for a in orig_t.axes:
if a.is_indexed:
assert new_t[a.name].is_indexed


def test_duplicate_column_name(tmp_path, setup_path):
Expand Down
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,8 @@ select = [
"PGH",
# Ruff-specific rules
"RUF",
# flake8-bandit: exec-builtin
"S102"
]

ignore = [
Expand Down

0 comments on commit 0e2821a

Please sign in to comment.