Skip to content

Commit

Permalink
chore: Clean remove_prefix since python3.9 is now the minimum Python (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
MarcoGorelli authored Oct 6, 2024
1 parent 8c306dd commit c4a28ae
Show file tree
Hide file tree
Showing 7 changed files with 7 additions and 17 deletions.
6 changes: 1 addition & 5 deletions py-polars/polars/convert/normalize.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,6 @@
from polars.schema import Schema


def _remove_prefix(text: str, prefix: str) -> str:
return text.removeprefix(prefix)


def _simple_json_normalize(
data: dict[Any, Any] | Sequence[dict[Any, Any] | Any],
separator: str,
Expand Down Expand Up @@ -206,7 +202,7 @@ def normalize_json(
new_key = f"{key_string}{separator}{key}"

if not key_string:
new_key = _remove_prefix(new_key, separator)
new_key = new_key.removeprefix(separator)

normalize_json(
data=value,
Expand Down
4 changes: 1 addition & 3 deletions py-polars/polars/dataframe/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -8506,9 +8506,7 @@ def unstack(
n_cols = step
n_rows = math.ceil(height / n_cols)

n_fill = n_cols * n_rows - height

if n_fill:
if n_fill := n_cols * n_rows - height:
if not isinstance(fill_values, list):
fill_values = [fill_values for _ in range(df.width)]

Expand Down
3 changes: 1 addition & 2 deletions py-polars/polars/io/csv/batched_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,8 +130,7 @@ def next_batches(self, n: int) -> list[DataFrame] | None:
-------
list of DataFrames
"""
batches = self._reader.next_batches(n)
if batches is not None:
if (batches := self._reader.next_batches(n)) is not None:
if self.new_columns:
return [
_update_columns(wrap_df(df), self.new_columns) for df in batches
Expand Down
3 changes: 1 addition & 2 deletions py-polars/polars/io/delta.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,8 +347,7 @@ def _check_for_unsupported_types(dtypes: list[DataType]) -> None:
# Note that this overlap check does NOT work correctly for Categorical, so
# if Categorical is added back to unsupported_types a different check will
# need to be used.
overlap = schema_dtypes & unsupported_types

if overlap:
if overlap := schema_dtypes & unsupported_types:
msg = f"dataframe contains unsupported data types: {overlap!r}"
raise TypeError(msg)
3 changes: 1 addition & 2 deletions py-polars/polars/lazyframe/in_process.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,7 @@ def fetch(self) -> DataFrame | None:
If it is ready, a materialized DataFrame is returned.
If it is not ready it will return `None`.
"""
out = self._inner.fetch()
if out is not None:
if (out := self._inner.fetch()) is not None:
return wrap_df(out)
else:
return None
Expand Down
2 changes: 1 addition & 1 deletion py-polars/polars/series/series.py
Original file line number Diff line number Diff line change
Expand Up @@ -1225,7 +1225,7 @@ def __contains__(self, item: Any) -> bool:
return self.has_nulls()
return self.implode().list.contains(item).item()

def __iter__(self) -> Generator[Any, None, None]:
def __iter__(self) -> Generator[Any]:
if self.dtype in (List, Array):
# TODO: either make a change and return py-native list data here, or find
# a faster way to return nested/List series; sequential 'get_index' calls
Expand Down
3 changes: 1 addition & 2 deletions py-polars/polars/series/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,8 +100,7 @@ def call_expr(func: SeriesMethod) -> SeriesMethod:
def wrapper(self: Any, *args: P.args, **kwargs: P.kwargs) -> Series:
s = wrap_s(self._s)
expr = F.col(s.name)
namespace = getattr(self, "_accessor", None)
if namespace is not None:
if (namespace := getattr(self, "_accessor", None)) is not None:
expr = getattr(expr, namespace)
f = getattr(expr, func.__name__)
return s.to_frame().select_seq(f(*args, **kwargs)).to_series()
Expand Down

0 comments on commit c4a28ae

Please sign in to comment.