Skip to content

Commit 003ff33

Browse files
DimitriPapadopoulosjhammanjoshmoore
authored
Change occurrences of % and format() to f-strings (#1423)
Co-authored-by: Joe Hamman <joe@earthmover.io> Co-authored-by: Josh Moore <josh@openmicroscopy.org>
1 parent d23683d commit 003ff33

18 files changed

+167
-220
lines changed

docs/release.rst

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,9 @@ Release notes
1818
Unreleased
1919
----------
2020

21+
* Change occurrences of % and format() to f-strings.
22+
By :user:`Dimitri Papadopoulos Orfanos <DimitriPapadopoulos>` :issue:`1423`.
23+
2124
.. _release_2.17.0:
2225

2326
2.17.0

zarr/_storage/absstore.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ def __init__(
8484

8585
blob_service_kwargs = blob_service_kwargs or {}
8686
client = ContainerClient(
87-
"https://{}.blob.core.windows.net/".format(account_name),
87+
f"https://{account_name}.blob.core.windows.net/",
8888
container,
8989
credential=account_key,
9090
**blob_service_kwargs,
@@ -141,7 +141,7 @@ def __getitem__(self, key):
141141
try:
142142
return self.client.download_blob(blob_name).readall()
143143
except ResourceNotFoundError:
144-
raise KeyError("Blob %s not found" % blob_name)
144+
raise KeyError(f"Blob {blob_name} not found")
145145

146146
def __setitem__(self, key, value):
147147
value = ensure_bytes(value)
@@ -154,7 +154,7 @@ def __delitem__(self, key):
154154
try:
155155
self.client.delete_blob(self._append_path_to_prefix(key))
156156
except ResourceNotFoundError:
157-
raise KeyError("Blob %s not found" % key)
157+
raise KeyError(f"Blob {key} not found")
158158

159159
def __eq__(self, other):
160160
return (

zarr/_storage/store.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -227,7 +227,7 @@ def _validate_key(self, key: str):
227227
# TODO: Possibly allow key == ".zmetadata" too if we write a
228228
# consolidated metadata spec corresponding to this?
229229
):
230-
raise ValueError("keys starts with unexpected value: `{}`".format(key))
230+
raise ValueError(f"key starts with unexpected value: `{key}`")
231231

232232
if key.endswith("/"):
233233
raise ValueError("keys may not end in /")

zarr/_storage/v3.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -569,7 +569,7 @@ def __init__(self, store: StoreLike, metadata_key=meta_root + "consolidated/.zme
569569
consolidated_format = meta.get("zarr_consolidated_format", None)
570570
if consolidated_format != 1:
571571
raise MetadataError(
572-
"unsupported zarr consolidated metadata format: %s" % consolidated_format
572+
f"unsupported zarr consolidated metadata format: {consolidated_format}"
573573
)
574574

575575
# decode metadata

zarr/convenience.py

Lines changed: 18 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -259,7 +259,7 @@ def save_group(store: StoreLike, *args, zarr_version=None, path=None, **kwargs):
259259
try:
260260
grp = _create_group(_store, path=path, overwrite=True, zarr_version=zarr_version)
261261
for i, arr in enumerate(args):
262-
k = "arr_{}".format(i)
262+
k = f"arr_{i}"
263263
grp.create_dataset(k, data=arr, overwrite=True, zarr_version=zarr_version)
264264
for k, arr in kwargs.items():
265265
grp.create_dataset(k, data=arr, overwrite=True, zarr_version=zarr_version)
@@ -499,7 +499,7 @@ def __init__(self, log):
499499
self.log_file = log
500500
else:
501501
raise TypeError(
502-
"log must be a callable function, file path or " "file-like object, found %r" % log
502+
f"log must be a callable function, file path or file-like object, found {log!r}"
503503
)
504504

505505
def __enter__(self):
@@ -526,9 +526,9 @@ def _log_copy_summary(log, dry_run, n_copied, n_skipped, n_bytes_copied):
526526
message = "dry run: "
527527
else:
528528
message = "all done: "
529-
message += "{:,} copied, {:,} skipped".format(n_copied, n_skipped)
529+
message += f"{n_copied:,} copied, {n_skipped:,} skipped"
530530
if not dry_run:
531-
message += ", {:,} bytes copied".format(n_bytes_copied)
531+
message += f", {n_bytes_copied:,} bytes copied"
532532
log(message)
533533

534534

@@ -657,9 +657,7 @@ def copy_store(
657657
# check if_exists parameter
658658
valid_if_exists = ["raise", "replace", "skip"]
659659
if if_exists not in valid_if_exists:
660-
raise ValueError(
661-
"if_exists must be one of {!r}; found {!r}".format(valid_if_exists, if_exists)
662-
)
660+
raise ValueError(f"if_exists must be one of {valid_if_exists!r}; found {if_exists!r}")
663661

664662
# setup counting variables
665663
n_copied = n_skipped = n_bytes_copied = 0
@@ -720,20 +718,20 @@ def copy_store(
720718
if if_exists != "replace":
721719
if dest_key in dest:
722720
if if_exists == "raise":
723-
raise CopyError("key {!r} exists in destination".format(dest_key))
721+
raise CopyError(f"key {dest_key!r} exists in destination")
724722
elif if_exists == "skip":
725723
do_copy = False
726724

727725
# take action
728726
if do_copy:
729-
log("copy {}".format(descr))
727+
log(f"copy {descr}")
730728
if not dry_run:
731729
data = source[source_key]
732730
n_bytes_copied += buffer_size(data)
733731
dest[dest_key] = data
734732
n_copied += 1
735733
else:
736-
log("skip {}".format(descr))
734+
log(f"skip {descr}")
737735
n_skipped += 1
738736

739737
# log a final message with a summary of what happened
@@ -744,7 +742,7 @@ def copy_store(
744742

745743
def _check_dest_is_group(dest):
746744
if not hasattr(dest, "create_dataset"):
747-
raise ValueError("dest must be a group, got {!r}".format(dest))
745+
raise ValueError(f"dest must be a group, got {dest!r}")
748746

749747

750748
def copy(
@@ -910,11 +908,9 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_
910908
# check if_exists parameter
911909
valid_if_exists = ["raise", "replace", "skip", "skip_initialized"]
912910
if if_exists not in valid_if_exists:
913-
raise ValueError(
914-
"if_exists must be one of {!r}; found {!r}".format(valid_if_exists, if_exists)
915-
)
911+
raise ValueError(f"if_exists must be one of {valid_if_exists!r}; found {if_exists!r}")
916912
if dest_h5py and if_exists == "skip_initialized":
917-
raise ValueError("{!r} can only be used when copying to zarr".format(if_exists))
913+
raise ValueError(f"{if_exists!r} can only be used when copying to zarr")
918914

919915
# determine name to copy to
920916
if name is None:
@@ -934,9 +930,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_
934930
exists = dest is not None and name in dest
935931
if exists:
936932
if if_exists == "raise":
937-
raise CopyError(
938-
"an object {!r} already exists in destination " "{!r}".format(name, dest.name)
939-
)
933+
raise CopyError(f"an object {name!r} already exists in destination {dest.name!r}")
940934
elif if_exists == "skip":
941935
do_copy = False
942936
elif if_exists == "skip_initialized":
@@ -947,7 +941,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_
947941
# take action
948942
if do_copy:
949943
# log a message about what we're going to do
950-
log("copy {} {} {}".format(source.name, source.shape, source.dtype))
944+
log(f"copy {source.name} {source.shape} {source.dtype}")
951945

952946
if not dry_run:
953947
# clear the way
@@ -1015,7 +1009,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_
10151009
n_copied += 1
10161010

10171011
else:
1018-
log("skip {} {} {}".format(source.name, source.shape, source.dtype))
1012+
log(f"skip {source.name} {source.shape} {source.dtype}")
10191013
n_skipped += 1
10201014

10211015
elif root or not shallow:
@@ -1026,16 +1020,14 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_
10261020
exists_array = dest is not None and name in dest and hasattr(dest[name], "shape")
10271021
if exists_array:
10281022
if if_exists == "raise":
1029-
raise CopyError(
1030-
"an array {!r} already exists in destination " "{!r}".format(name, dest.name)
1031-
)
1023+
raise CopyError(f"an array {name!r} already exists in destination {dest.name!r}")
10321024
elif if_exists == "skip":
10331025
do_copy = False
10341026

10351027
# take action
10361028
if do_copy:
10371029
# log action
1038-
log("copy {}".format(source.name))
1030+
log(f"copy {source.name}")
10391031

10401032
if not dry_run:
10411033
# clear the way
@@ -1078,7 +1070,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_
10781070
n_copied += 1
10791071

10801072
else:
1081-
log("skip {}".format(source.name))
1073+
log(f"skip {source.name}")
10821074
n_skipped += 1
10831075

10841076
return n_copied, n_skipped, n_bytes_copied
@@ -1327,7 +1319,7 @@ def open_consolidated(store: StoreLike, metadata_key=".zmetadata", mode="r+", **
13271319
store, storage_options=kwargs.get("storage_options"), mode=mode, zarr_version=zarr_version
13281320
)
13291321
if mode not in {"r", "r+"}:
1330-
raise ValueError("invalid mode, expected either 'r' or 'r+'; found {!r}".format(mode))
1322+
raise ValueError(f"invalid mode, expected either 'r' or 'r+'; found {mode!r}")
13311323

13321324
path = kwargs.pop("path", None)
13331325
if store._store_version == 2:

zarr/core.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -2396,11 +2396,11 @@ def _encode_chunk(self, chunk):
23962396

23972397
def __repr__(self):
23982398
t = type(self)
2399-
r = "<{}.{}".format(t.__module__, t.__name__)
2399+
r = f"<{t.__module__}.{t.__name__}"
24002400
if self.name:
2401-
r += " %r" % self.name
2402-
r += " %s" % str(self.shape)
2403-
r += " %s" % self.dtype
2401+
r += f" {self.name!r}"
2402+
r += f" {str(self.shape)}"
2403+
r += f" {self.dtype}"
24042404
if self._read_only:
24052405
r += " read-only"
24062406
r += ">"
@@ -2436,11 +2436,11 @@ def info_items(self):
24362436

24372437
def _info_items_nosync(self):
24382438
def typestr(o):
2439-
return "{}.{}".format(type(o).__module__, type(o).__name__)
2439+
return f"{type(o).__module__}.{type(o).__name__}"
24402440

24412441
def bytestr(n):
24422442
if n > 2**10:
2443-
return "{} ({})".format(n, human_readable_size(n))
2443+
return f"{n} ({human_readable_size(n)})"
24442444
else:
24452445
return str(n)
24462446

@@ -2451,7 +2451,7 @@ def bytestr(n):
24512451
items += [("Name", self.name)]
24522452
items += [
24532453
("Type", typestr(self)),
2454-
("Data type", "%s" % self.dtype),
2454+
("Data type", str(self.dtype)),
24552455
("Shape", str(self.shape)),
24562456
("Chunk shape", str(self.chunks)),
24572457
("Order", self.order),
@@ -2461,7 +2461,7 @@ def bytestr(n):
24612461
# filters
24622462
if self.filters:
24632463
for i, f in enumerate(self.filters):
2464-
items += [("Filter [%s]" % i, repr(f))]
2464+
items += [(f"Filter [{i}]", repr(f))]
24652465

24662466
# compressor
24672467
items += [("Compressor", repr(self.compressor))]
@@ -2478,9 +2478,9 @@ def bytestr(n):
24782478
if self.nbytes_stored > 0:
24792479
items += [
24802480
("No. bytes stored", bytestr(self.nbytes_stored)),
2481-
("Storage ratio", "%.1f" % (self.nbytes / self.nbytes_stored)),
2481+
("Storage ratio", f"{self.nbytes / self.nbytes_stored:.1f}"),
24822482
]
2483-
items += [("Chunks initialized", "{}/{}".format(self.nchunks_initialized, self.nchunks))]
2483+
items += [("Chunks initialized", f"{self.nchunks_initialized}/{self.nchunks}")]
24842484

24852485
return items
24862486

zarr/creation.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -287,7 +287,7 @@ def _kwargs_compat(compressor, fill_value, kwargs):
287287
compressor = compression
288288

289289
else:
290-
raise ValueError("bad value for compression: %r" % compression)
290+
raise ValueError(f"bad value for compression: {compression!r}")
291291

292292
# handle 'fillvalue'
293293
if "fillvalue" in kwargs:
@@ -297,7 +297,7 @@ def _kwargs_compat(compressor, fill_value, kwargs):
297297

298298
# ignore other keyword arguments
299299
for k in kwargs:
300-
warn("ignoring keyword argument %r" % k)
300+
warn(f"ignoring keyword argument {k!r}")
301301

302302
return compressor, fill_value
303303

zarr/errors.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -67,9 +67,7 @@ def __init__(self):
6767

6868

6969
def err_too_many_indices(selection, shape):
70-
raise IndexError(
71-
"too many indices for array; expected {}, got {}".format(len(shape), len(selection))
72-
)
70+
raise IndexError(f"too many indices for array; expected {len(shape)}, got {len(selection)}")
7371

7472

7573
class VindexInvalidSelectionError(_BaseZarrIndexError):

zarr/hierarchy.py

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -340,9 +340,9 @@ def __len__(self):
340340

341341
def __repr__(self):
342342
t = type(self)
343-
r = "<{}.{}".format(t.__module__, t.__name__)
343+
r = f"<{t.__module__}.{t.__name__}"
344344
if self.name:
345-
r += " %r" % self.name
345+
r += f" {self.name!r}"
346346
if self._read_only:
347347
r += " read-only"
348348
r += ">"
@@ -358,7 +358,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
358358

359359
def info_items(self):
360360
def typestr(o):
361-
return "{}.{}".format(type(o).__module__, type(o).__name__)
361+
return f"{type(o).__module__}.{type(o).__name__}"
362362

363363
items = []
364364

@@ -1157,17 +1157,15 @@ def _require_dataset_nosync(self, name, shape, dtype=None, exact=False, **kwargs
11571157
shape = normalize_shape(shape)
11581158
if shape != a.shape:
11591159
raise TypeError(
1160-
"shape do not match existing array; expected {}, got {}".format(a.shape, shape)
1160+
f"shape do not match existing array; expected {a.shape}, got {shape}"
11611161
)
11621162
dtype = np.dtype(dtype)
11631163
if exact:
11641164
if dtype != a.dtype:
1165-
raise TypeError(
1166-
"dtypes do not match exactly; expected {}, got {}".format(a.dtype, dtype)
1167-
)
1165+
raise TypeError(f"dtypes do not match exactly; expected {a.dtype}, got {dtype}")
11681166
else:
11691167
if not np.can_cast(dtype, a.dtype):
1170-
raise TypeError("dtypes ({}, {}) cannot be safely cast".format(dtype, a.dtype))
1168+
raise TypeError(f"dtypes ({dtype}, {a.dtype}) cannot be safely cast")
11711169
return a
11721170

11731171
else:

0 commit comments

Comments
 (0)