Skip to content

Commit 2478f46

Browse files
authored
Merge branch 'main' into io
2 parents 9017ac2 + 1ed37f5 commit 2478f46

File tree

3 files changed

+18
-54
lines changed

3 files changed

+18
-54
lines changed

docs/release.rst

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,9 @@ Maintenance
2424
* Change occurrence of ``io.open()`` into ``open()``.
2525
By :user:`Dimitri Papadopoulos Orfanos <DimitriPapadopoulos>` :issue:`1421`.
2626

27+
* Preserve ``dimension_separator`` when resizing arrays.
28+
By :user:`Ziwen Liu <ziw-liu>` :issue:`1533`.
29+
2730
* Initialise some sets in tests with set literals instead of list literals.
2831
By :user:`Dimitri Papadopoulos Orfanos <DimitriPapadopoulos>` :issue:`1534`.
2932

zarr/core.py

Lines changed: 8 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -253,7 +253,6 @@ def _load_metadata_nosync(self):
253253
except KeyError:
254254
raise ArrayNotFoundError(self._path)
255255
else:
256-
257256
# decode and store metadata as instance members
258257
meta = self._store._metadata_class.decode_array_metadata(meta_bytes)
259258
self._meta = meta
@@ -341,7 +340,14 @@ def _flush_metadata_nosync(self):
341340
filters=filters_config,
342341
)
343342
if getattr(self._store, "_store_version", 2) == 2:
344-
meta.update(dict(chunks=self._chunks, dtype=self._dtype, order=self._order))
343+
meta.update(
344+
dict(
345+
chunks=self._chunks,
346+
dtype=self._dtype,
347+
order=self._order,
348+
dimension_separator=self._dimension_separator,
349+
)
350+
)
345351
else:
346352
meta.update(
347353
dict(
@@ -1358,7 +1364,6 @@ def get_mask_selection(self, selection, out=None, fields=None):
13581364
return self._get_selection(indexer=indexer, out=out, fields=fields)
13591365

13601366
def _get_selection(self, indexer, out=None, fields=None):
1361-
13621367
# We iterate over all chunks which overlap the selection and thus contain data
13631368
# that needs to be extracted. Each chunk is processed in turn, extracting the
13641369
# necessary data and storing into the correct location in the output array.
@@ -1983,7 +1988,6 @@ def _set_basic_selection_nd(self, selection, value, fields=None):
19831988
self._set_selection(indexer, value, fields=fields)
19841989

19851990
def _set_selection(self, indexer, value, fields=None):
1986-
19871991
# We iterate over all chunks which overlap the selection and thus contain data
19881992
# that needs to be replaced. Each chunk is processed in turn, extracting the
19891993
# necessary data from the value array and storing into the chunk array.
@@ -2018,7 +2022,6 @@ def _set_selection(self, indexer, value, fields=None):
20182022
):
20192023
# iterative approach
20202024
for chunk_coords, chunk_selection, out_selection in indexer:
2021-
20222025
# extract data to store
20232026
if sel_shape == ():
20242027
chunk_value = value
@@ -2077,7 +2080,6 @@ def _process_chunk(
20772080
and not self._filters
20782081
and self._dtype != object
20792082
):
2080-
20812083
dest = out[out_selection]
20822084
# Assume that array-like objects that doesn't have a
20832085
# `writeable` flag is writable.
@@ -2088,7 +2090,6 @@ def _process_chunk(
20882090
)
20892091

20902092
if write_direct:
2091-
20922093
# optimization: we want the whole chunk, and the destination is
20932094
# contiguous, so we can decompress directly from the chunk
20942095
# into the destination array
@@ -2321,28 +2322,24 @@ def _process_for_setitem(self, ckey, chunk_selection, value, fields=None):
23212322
# to access the existing chunk data
23222323

23232324
if is_scalar(value, self._dtype):
2324-
23252325
# setup array filled with value
23262326
chunk = np.empty_like(
23272327
self._meta_array, shape=self._chunks, dtype=self._dtype, order=self._order
23282328
)
23292329
chunk.fill(value)
23302330

23312331
else:
2332-
23332332
# ensure array is contiguous
23342333
chunk = value.astype(self._dtype, order=self._order, copy=False)
23352334

23362335
else:
23372336
# partially replace the contents of this chunk
23382337

23392338
try:
2340-
23412339
# obtain compressed data for chunk
23422340
cdata = self.chunk_store[ckey]
23432341

23442342
except KeyError:
2345-
23462343
# chunk not initialized
23472344
if self._fill_value is not None:
23482345
chunk = np.empty_like(
@@ -2359,7 +2356,6 @@ def _process_for_setitem(self, ckey, chunk_selection, value, fields=None):
23592356
)
23602357

23612358
else:
2362-
23632359
# decode chunk
23642360
chunk = self._decode_chunk(cdata)
23652361
if not chunk.flags.writeable:
@@ -2429,7 +2425,6 @@ def _decode_chunk(self, cdata, start=None, nitems=None, expected_shape=None):
24292425
return chunk
24302426

24312427
def _encode_chunk(self, chunk):
2432-
24332428
# apply filters
24342429
if self._filters:
24352430
for f in self._filters:
@@ -2619,7 +2614,6 @@ def __setstate__(self, state):
26192614
self.__init__(**state)
26202615

26212616
def _synchronized_op(self, f, *args, **kwargs):
2622-
26232617
if self._synchronizer is None:
26242618
# no synchronization
26252619
lock = nolock
@@ -2636,7 +2630,6 @@ def _synchronized_op(self, f, *args, **kwargs):
26362630
return result
26372631

26382632
def _write_op(self, f, *args, **kwargs):
2639-
26402633
# guard condition
26412634
if self._read_only:
26422635
raise ReadOnlyError()
@@ -2676,7 +2669,6 @@ def resize(self, *args):
26762669
return self._write_op(self._resize_nosync, *args)
26772670

26782671
def _resize_nosync(self, *args):
2679-
26802672
# normalize new shape argument
26812673
old_shape = self._shape
26822674
new_shape = normalize_resize_args(old_shape, *args)
@@ -2755,7 +2747,6 @@ def append(self, data, axis=0):
27552747
return self._write_op(self._append_nosync, data, axis=axis)
27562748

27572749
def _append_nosync(self, data, axis=0):
2758-
27592750
# ensure data is array-like
27602751
if not hasattr(data, "shape"):
27612752
data = np.asanyarray(data, like=self._meta_array)

0 commit comments

Comments
 (0)