Skip to content

Commit bc765d8

Browse files
Apply Sourcery suggestions and fix typos
1 parent ba1f71a commit bc765d8

File tree

18 files changed

+37
-40
lines changed

18 files changed

+37
-40
lines changed

src/zarr/abc/metadata.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,6 @@ def to_dict(self) -> dict[str, JSON]:
2828
value = getattr(self, key)
2929
if isinstance(value, Metadata):
3030
out_dict[field.name] = getattr(self, field.name).to_dict()
31-
elif isinstance(value, str):
32-
out_dict[key] = value
3331
elif isinstance(value, Sequence):
3432
out_dict[key] = tuple(v.to_dict() if isinstance(v, Metadata) else v for v in value)
3533
else:

src/zarr/codecs/sharding.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ class _ShardIndex(NamedTuple):
113113

114114
@property
115115
def chunks_per_shard(self) -> ChunkCoords:
116-
result = tuple(self.offsets_and_lengths.shape[0:-1])
116+
result = tuple(self.offsets_and_lengths.shape[:-1])
117117
# The cast is required until https://github.com/numpy/numpy/pull/27211 is merged
118118
return cast("ChunkCoords", result)
119119

@@ -409,8 +409,8 @@ def validate(self, *, shape: ChunkCoords, dtype: np.dtype[Any], chunk_grid: Chun
409409
)
410410
if not isinstance(chunk_grid, RegularChunkGrid):
411411
raise TypeError("Sharding is only compatible with regular chunk grids.")
412-
if not all(
413-
s % c == 0
412+
if any(
413+
s % c != 0
414414
for s, c in zip(
415415
chunk_grid.chunk_shape,
416416
self.chunk_shape,

src/zarr/core/array.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -650,7 +650,7 @@ async def _create(
650650
overwrite=overwrite,
651651
)
652652
else:
653-
raise ValueError(f"Insupported zarr_format. Got: {zarr_format}")
653+
raise ValueError(f"Unsupported zarr_format. Got: {zarr_format}")
654654

655655
if data is not None:
656656
# insert user-provided data

src/zarr/core/buffer/core.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ def check_item_key_is_1d_contiguous(key: Any) -> None:
115115
raise TypeError(
116116
f"Item key has incorrect type (expected slice, got {key.__class__.__name__})"
117117
)
118-
if not (key.step is None or key.step == 1):
118+
if key.step is not None and key.step != 1:
119119
raise ValueError("slice must be contiguous")
120120

121121

src/zarr/core/common.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,7 @@ def parse_shapelike(data: int | Iterable[int]) -> tuple[int, ...]:
145145
if not all(isinstance(v, int) for v in data_tuple):
146146
msg = f"Expected an iterable of integers. Got {data} instead."
147147
raise TypeError(msg)
148-
if not all(v > -1 for v in data_tuple):
148+
if any(v < 0 for v in data_tuple):
149149
msg = f"Expected all values to be non-negative. Got {data} instead."
150150
raise ValueError(msg)
151151
return data_tuple

src/zarr/core/indexing.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -436,7 +436,7 @@ def replace_ellipsis(selection: Any, shape: ChunkCoords) -> SelectionNormalized:
436436
selection = ensure_tuple(selection)
437437

438438
# count number of ellipsis present
439-
n_ellipsis = sum(1 for i in selection if i is Ellipsis)
439+
n_ellipsis = selection.count(Ellipsis)
440440

441441
if n_ellipsis > 1:
442442
# more than 1 is an error

src/zarr/core/metadata/v2.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -249,11 +249,11 @@ def parse_filters(data: object) -> tuple[numcodecs.abc.Codec, ...] | None:
249249
"""
250250
Parse a potential tuple of filters
251251
"""
252-
out: list[numcodecs.abc.Codec] = []
253252

254253
if data is None:
255254
return data
256255
if isinstance(data, Iterable):
256+
out: list[numcodecs.abc.Codec] = []
257257
for idx, val in enumerate(data):
258258
if isinstance(val, numcodecs.abc.Codec):
259259
out.append(val)

src/zarr/core/metadata/v3.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -107,11 +107,11 @@ def validate_codecs(codecs: tuple[Codec, ...], dtype: DataType) -> None:
107107
# we need to have special codecs if we are decoding vlen strings or bytestrings
108108
# TODO: use codec ID instead of class name
109109
codec_class_name = abc.__class__.__name__
110-
if dtype == DataType.string and not codec_class_name == "VLenUTF8Codec":
110+
if dtype == DataType.string and codec_class_name != "VLenUTF8Codec":
111111
raise ValueError(
112112
f"For string dtype, ArrayBytesCodec must be `VLenUTF8Codec`, got `{codec_class_name}`."
113113
)
114-
if dtype == DataType.bytes and not codec_class_name == "VLenBytesCodec":
114+
if dtype == DataType.bytes and codec_class_name != "VLenBytesCodec":
115115
raise ValueError(
116116
f"For bytes dtype, ArrayBytesCodec must be `VLenBytesCodec`, got `{codec_class_name}`."
117117
)
@@ -574,9 +574,8 @@ def parse_fill_value(
574574
and np.isclose(np.imag(fill_value), np.imag(casted_value), equal_nan=True)
575575
):
576576
raise ValueError(f"fill value {fill_value!r} is not valid for dtype {data_type}")
577-
else:
578-
if fill_value != casted_value:
579-
raise ValueError(f"fill value {fill_value!r} is not valid for dtype {data_type}")
577+
elif fill_value != casted_value:
578+
raise ValueError(f"fill value {fill_value!r} is not valid for dtype {data_type}")
580579

581580
return casted_value
582581

src/zarr/registry.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -179,9 +179,9 @@ def _parse_bytes_bytes_codec(data: dict[str, JSON] | Codec) -> BytesBytesCodec:
179179
if not isinstance(result, BytesBytesCodec):
180180
msg = f"Expected a dict representation of a BytesBytesCodec; got a dict representation of a {type(result)} instead."
181181
raise TypeError(msg)
182+
elif not isinstance(data, BytesBytesCodec):
183+
raise TypeError(f"Expected a BytesBytesCodec. Got {type(data)} instead.")
182184
else:
183-
if not isinstance(data, BytesBytesCodec):
184-
raise TypeError(f"Expected a BytesBytesCodec. Got {type(data)} instead.")
185185
result = data
186186
return result
187187

@@ -199,9 +199,9 @@ def _parse_array_bytes_codec(data: dict[str, JSON] | Codec) -> ArrayBytesCodec:
199199
if not isinstance(result, ArrayBytesCodec):
200200
msg = f"Expected a dict representation of a ArrayBytesCodec; got a dict representation of a {type(result)} instead."
201201
raise TypeError(msg)
202+
elif not isinstance(data, ArrayBytesCodec):
203+
raise TypeError(f"Expected a ArrayBytesCodec. Got {type(data)} instead.")
202204
else:
203-
if not isinstance(data, ArrayBytesCodec):
204-
raise TypeError(f"Expected a ArrayBytesCodec. Got {type(data)} instead.")
205205
result = data
206206
return result
207207

@@ -219,9 +219,9 @@ def _parse_array_array_codec(data: dict[str, JSON] | Codec) -> ArrayArrayCodec:
219219
if not isinstance(result, ArrayArrayCodec):
220220
msg = f"Expected a dict representation of a ArrayArrayCodec; got a dict representation of a {type(result)} instead."
221221
raise TypeError(msg)
222+
elif not isinstance(data, ArrayArrayCodec):
223+
raise TypeError(f"Expected a ArrayArrayCodec. Got {type(data)} instead.")
222224
else:
223-
if not isinstance(data, ArrayArrayCodec):
224-
raise TypeError(f"Expected a ArrayArrayCodec. Got {type(data)} instead.")
225225
result = data
226226
return result
227227

src/zarr/storage/_fsspec.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -355,7 +355,7 @@ async def set_partial_values(
355355
async def list(self) -> AsyncIterator[str]:
356356
# docstring inherited
357357
allfiles = await self.fs._find(self.path, detail=False, withdirs=False)
358-
for onefile in (a.removeprefix(self.path + "/") for a in allfiles):
358+
for onefile in (a.removeprefix(f"{self.path}/") for a in allfiles):
359359
yield onefile
360360

361361
async def list_dir(self, prefix: str) -> AsyncIterator[str]:
@@ -365,7 +365,7 @@ async def list_dir(self, prefix: str) -> AsyncIterator[str]:
365365
allfiles = await self.fs._ls(prefix, detail=False)
366366
except FileNotFoundError:
367367
return
368-
for onefile in (a.replace(prefix + "/", "") for a in allfiles):
368+
for onefile in (a.replace(f"{prefix}/", "") for a in allfiles):
369369
yield onefile.removeprefix(self.path).removeprefix("/")
370370

371371
async def list_prefix(self, prefix: str) -> AsyncIterator[str]:

src/zarr/testing/stateful.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ def add_array(
127127
# array_path = data.draw(st.sampled_from(self.all_arrays), label="Array move source")
128128
# to_group = data.draw(st.sampled_from(self.all_groups), label="Array move destination")
129129

130-
# # fixme renaiming to self?
130+
# # fixme renaming to self?
131131
# array_name = os.path.basename(array_path)
132132
# assume(self.model.can_add(to_group, array_name))
133133
# new_path = f"{to_group}/{array_name}".lstrip("/")
@@ -144,7 +144,7 @@ def add_array(
144144

145145
# from_group_name = os.path.basename(from_group)
146146
# assume(self.model.can_add(to_group, from_group_name))
147-
# # fixme renaiming to self?
147+
# # fixme renaming to self?
148148
# new_path = f"{to_group}/{from_group_name}".lstrip("/")
149149
# note(f"moving group '{from_group}' -> '{new_path}'")
150150
# self.model.rename(from_group, new_path)

src/zarr/testing/store.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -372,8 +372,8 @@ async def test_list(self, store: S) -> None:
372372
prefix = "foo"
373373
data = self.buffer_cls.from_bytes(b"")
374374
store_dict = {
375-
prefix + "/zarr.json": data,
376-
**{prefix + f"/c/{idx}": data for idx in range(10)},
375+
f"{prefix}/zarr.json": data,
376+
**{f"{prefix}/c/{idx}": data for idx in range(10)},
377377
}
378378
await store._set_many(store_dict.items())
379379
expected_sorted = sorted(store_dict.keys())
@@ -436,8 +436,8 @@ async def test_list_empty_path(self, store: S) -> None:
436436
async def test_list_dir(self, store: S) -> None:
437437
root = "foo"
438438
store_dict = {
439-
root + "/zarr.json": self.buffer_cls.from_bytes(b"bar"),
440-
root + "/c/1": self.buffer_cls.from_bytes(b"\x01"),
439+
f"{root}/zarr.json": self.buffer_cls.from_bytes(b"bar"),
440+
f"{root}/c/1": self.buffer_cls.from_bytes(b"\x01"),
441441
}
442442

443443
assert await _collect_aiterator(store.list_dir("")) == ()
@@ -446,7 +446,7 @@ async def test_list_dir(self, store: S) -> None:
446446
await store._set_many(store_dict.items())
447447

448448
keys_observed = await _collect_aiterator(store.list_dir(root))
449-
keys_expected = {k.removeprefix(root + "/").split("/")[0] for k in store_dict}
449+
keys_expected = {k.removeprefix(f"{root}/").split("/")[0] for k in store_dict}
450450

451451
assert sorted(keys_observed) == sorted(keys_expected)
452452

src/zarr/testing/strategies.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -289,7 +289,7 @@ def arrays(
289289
assert a.fill_value is not None
290290
assert a.name is not None
291291
assert a.path == normalize_path(array_path)
292-
assert a.name == "/" + a.path
292+
assert a.name == f"/{a.path}"
293293
assert isinstance(root[array_path], Array)
294294
assert nparray.shape == a.shape
295295
assert chunk_shape == a.chunks

tests/conftest.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ async def parse_store(
4747
if store == "fsspec":
4848
return await FsspecStore.open(url=path)
4949
if store == "zip":
50-
return await ZipStore.open(path + "/zarr.zip", mode="w")
50+
return await ZipStore.open(f"{path}/zarr.zip", mode="w")
5151
raise AssertionError
5252

5353

@@ -101,7 +101,7 @@ async def store2(request: pytest.FixtureRequest, tmpdir: LEGACY_PATH) -> Store:
101101
def sync_store(request: pytest.FixtureRequest, tmp_path: LEGACY_PATH) -> Store:
102102
result = sync(parse_store(request.param, str(tmp_path)))
103103
if not isinstance(result, Store):
104-
raise TypeError("Wrong store class returned by test fixture! got " + result + " instead")
104+
raise TypeError(f"Wrong store class returned by test fixture! got {result} instead")
105105
return result
106106

107107

tests/test_api.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1174,7 +1174,7 @@ async def test_open_falls_back_to_open_group_async(zarr_format: ZarrFormat) -> N
11741174
def test_open_modes_creates_group(tmp_path: pathlib.Path, mode: str) -> None:
11751175
# https://github.com/zarr-developers/zarr-python/issues/2490
11761176
zarr_dir = tmp_path / f"mode-{mode}-test.zarr"
1177-
if mode in ["r", "r+"]:
1177+
if mode in {"r", "r+"}:
11781178
# Expect FileNotFoundError to be raised if 'r' or 'r+' mode
11791179
with pytest.raises(FileNotFoundError):
11801180
zarr.open(store=zarr_dir, mode=mode)

tests/test_array.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1470,7 +1470,7 @@ async def test_name(store: Store, zarr_format: ZarrFormat, path: str | None) ->
14701470
else:
14711471
expected_path = path
14721472
assert arr.path == expected_path
1473-
assert arr.name == "/" + expected_path
1473+
assert arr.name == f"/{expected_path}"
14741474

14751475
# test that implicit groups were created
14761476
path_parts = expected_path.split("/")

tests/test_codec_entrypoints.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ def set_path() -> Generator[None, None, None]:
2525
@pytest.mark.usefixtures("set_path")
2626
@pytest.mark.parametrize("codec_name", ["TestEntrypointCodec", "TestEntrypointGroup.Codec"])
2727
def test_entrypoint_codec(codec_name: str) -> None:
28-
config.set({"codecs.test": "package_with_entrypoint." + codec_name})
28+
config.set({"codecs.test": f"package_with_entrypoint.{codec_name}"})
2929
cls_test = zarr.registry.get_codec_class("test")
3030
assert cls_test.__qualname__ == codec_name
3131

@@ -42,7 +42,7 @@ def test_entrypoint_pipeline() -> None:
4242
def test_entrypoint_buffer(buffer_name: str) -> None:
4343
config.set(
4444
{
45-
"buffer": "package_with_entrypoint." + buffer_name,
45+
"buffer": f"package_with_entrypoint.{buffer_name}",
4646
"ndbuffer": "package_with_entrypoint.TestEntrypointNDBuffer",
4747
}
4848
)

tests/test_indexing.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1598,7 +1598,7 @@ def test_get_selections_with_fields(store: StorePath) -> None:
15981598
assert_array_equal(expect, actual)
15991599

16001600
# basic selection with slice
1601-
expect = a[fields][0:2]
1601+
expect = a[fields][:2]
16021602
actual = z.get_basic_selection(slice(0, 2), fields=fields)
16031603
assert_array_equal(expect, actual)
16041604
# alternative API
@@ -1724,8 +1724,8 @@ def test_set_selections_with_fields(store: StorePath) -> None:
17241724
# basic selection with slice
17251725
a[:] = ("", 0, 0)
17261726
z[:] = ("", 0, 0)
1727-
a[key][0:2] = v[key][0:2]
1728-
z.set_basic_selection(slice(0, 2), v[key][0:2], fields=fields)
1727+
a[key][:2] = v[key][:2]
1728+
z.set_basic_selection(slice(0, 2), v[key][:2], fields=fields)
17291729
assert_array_equal(a, z[:])
17301730

17311731
# orthogonal selection

0 commit comments

Comments
 (0)