Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/zarr/core/buffer/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -535,7 +535,7 @@ def all_equal(self, other: Any, equal_nan: bool = True) -> bool:
and self._data.dtype.kind not in ("U", "S", "T", "O", "V")
):
_data, other = np.broadcast_arrays(self._data, np.asarray(other, self._data.dtype))
void_dtype = "V" + str(_data.dtype.itemsize)
void_dtype = f"V{_data.dtype.itemsize}"
return np.array_equal(_data.view(void_dtype), other.view(void_dtype))
# use array_equal to obtain equal_nan=True functionality
# Since fill-value is a scalar, isn't there a faster path than allocating a new array for fill value
Expand Down
2 changes: 1 addition & 1 deletion src/zarr/core/group.py
Original file line number Diff line number Diff line change
Expand Up @@ -840,7 +840,7 @@ def name(self) -> str:
# follow h5py convention: add leading slash
name = self.path
if name[0] != "/":
name = "/" + name
name = f"/{name}"
return name
return "/"

Expand Down
2 changes: 1 addition & 1 deletion src/zarr/registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ def _reload_config() -> None:

def fully_qualified_name(cls: type) -> str:
module = cls.__module__
return module + "." + cls.__qualname__
return f"{module}.{cls.__qualname__}"


def register_codec(key: str, codec_cls: type[Codec], *, qualname: str | None = None) -> None:
Expand Down
4 changes: 2 additions & 2 deletions src/zarr/storage/_fsspec.py
Original file line number Diff line number Diff line change
Expand Up @@ -408,7 +408,7 @@ async def get_partial_values(
async def list(self) -> AsyncIterator[str]:
# docstring inherited
allfiles = await self.fs._find(self.path, detail=False, withdirs=False)
for onefile in (a.removeprefix(self.path + "/") for a in allfiles):
for onefile in (a.removeprefix(f"{self.path}/") for a in allfiles):
yield onefile

async def list_dir(self, prefix: str) -> AsyncIterator[str]:
Expand All @@ -418,7 +418,7 @@ async def list_dir(self, prefix: str) -> AsyncIterator[str]:
allfiles = await self.fs._ls(prefix, detail=False)
except FileNotFoundError:
return
for onefile in (a.replace(prefix + "/", "") for a in allfiles):
for onefile in (a.replace(f"{prefix}/", "") for a in allfiles):
yield onefile.removeprefix(self.path).removeprefix("/")

async def list_prefix(self, prefix: str) -> AsyncIterator[str]:
Expand Down
8 changes: 4 additions & 4 deletions src/zarr/storage/_memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,9 +217,9 @@ async def list_dir(self, prefix: str) -> AsyncIterator[str]:
# a pseudo directory when there's a nested item and we're listing an
# intermediate level.
keys_unique = {
key.removeprefix(prefix + "/").split("/")[0]
key.removeprefix(f"{prefix}/").split("/")[0]
for key in self._store_dict
if key.startswith(prefix + "/") and key != prefix
if key.startswith(f"{prefix}/") and key != prefix
}

for key in keys_unique:
Expand Down Expand Up @@ -822,7 +822,7 @@ async def delete(self, key: str) -> None:

async def list(self) -> AsyncIterator[str]:
# docstring inherited
prefix = self.path + "/" if self.path else ""
prefix = f"{self.path}/" if self.path else ""
async for key in super().list():
if key.startswith(prefix):
yield key.removeprefix(prefix)
Expand All @@ -832,7 +832,7 @@ async def list_prefix(self, prefix: str) -> AsyncIterator[str]:
# Manual concatenation instead of _join_paths because we need "path/"
# as the prefix when prefix is empty (to list all keys under self.path)
full_prefix = f"{self.path}/{prefix}" if self.path else prefix
path_prefix = self.path + "/" if self.path else ""
path_prefix = f"{self.path}/" if self.path else ""
async for key in super().list_prefix(full_prefix):
yield key.removeprefix(path_prefix)

Expand Down
4 changes: 2 additions & 2 deletions src/zarr/storage/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,10 +220,10 @@ def _relativize_path(*, path: str, prefix: str) -> str:
if prefix == "":
return path
else:
_prefix = prefix + "/"
_prefix = f"{prefix}/"
if not path.startswith(_prefix):
raise ValueError(f"The first component of {path} does not start with {prefix}.")
return path.removeprefix(f"{prefix}/")
return path.removeprefix(_prefix)


def _normalize_paths(paths: Iterable[str]) -> tuple[str, ...]:
Expand Down
4 changes: 2 additions & 2 deletions src/zarr/storage/_zip.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,8 +285,8 @@ async def list_dir(self, prefix: str) -> AsyncIterator[str]:
yield key
else:
for key in keys:
if key.startswith(prefix + "/") and key.strip("/") != prefix:
k = key.removeprefix(prefix + "/").split("/")[0]
if key.startswith(f"{prefix}/") and key.strip("/") != prefix:
k = key.removeprefix(f"{prefix}/").split("/")[0]
if k not in seen:
seen.add(k)
yield k
Expand Down
8 changes: 4 additions & 4 deletions src/zarr/testing/store.py
Original file line number Diff line number Diff line change
Expand Up @@ -449,8 +449,8 @@ async def test_list(self, store: S) -> None:
prefix = "foo"
data = self.buffer_cls.from_bytes(b"")
store_dict = {
prefix + "/zarr.json": data,
**{prefix + f"/c/{idx}": data for idx in range(10)},
f"{prefix}/zarr.json": data,
**{f"{prefix}/c/{idx}": data for idx in range(10)},
}
await store._set_many(store_dict.items())
expected_sorted = sorted(store_dict.keys())
Expand Down Expand Up @@ -536,10 +536,10 @@ async def test_list_dir(self, store: S) -> None:
await store._set_many(store_dict.items())

keys_observed = await _collect_aiterator(store.list_dir(root))
keys_expected = {k.removeprefix(root + "/").split("/")[0] for k in store_dict}
keys_expected = {k.removeprefix(f"{root}/").split("/")[0] for k in store_dict}
assert sorted(keys_observed) == sorted(keys_expected)

keys_observed = await _collect_aiterator(store.list_dir(root + "/"))
keys_observed = await _collect_aiterator(store.list_dir(f"{root}/"))
assert sorted(keys_expected) == sorted(keys_observed)

async def test_set_if_not_exists(self, store: S) -> None:
Expand Down
2 changes: 1 addition & 1 deletion src/zarr/testing/strategies.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,7 @@ def arrays(
assert a.fill_value is not None
assert a.name is not None
assert a.path == normalize_path(array_path)
assert a.name == "/" + a.path
assert a.name == f"/{a.path}"
assert isinstance(root[array_path], Array)
assert nparray.shape == a.shape

Expand Down
4 changes: 2 additions & 2 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ async def parse_store(
if store == "fsspec":
return await FsspecStore.open(url=path)
if store == "zip":
return await ZipStore.open(path + "/zarr.zip", mode="w")
return await ZipStore.open(f"{path}/zarr.zip", mode="w")
if store == "memory_get_latency":
return LatencyStore(MemoryStore(), get_latency=0.0001, set_latency=0)
raise AssertionError
Expand Down Expand Up @@ -143,7 +143,7 @@ async def store2(request: pytest.FixtureRequest, tmpdir: LEGACY_PATH) -> Store:
def sync_store(request: pytest.FixtureRequest, tmp_path: LEGACY_PATH) -> Store:
result = sync(parse_store(request.param, str(tmp_path)))
if not isinstance(result, Store):
raise TypeError("Wrong store class returned by test fixture! got " + result + " instead")
raise TypeError(f"Wrong store class returned by test fixture! got {result} instead")
return result


Expand Down
34 changes: 17 additions & 17 deletions tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -822,9 +822,9 @@ def test_tree() -> None:
# assert len(source) == len(dest)
# for key in source:
# if self._version == 3:
# dest_key = key[:10] + "new/" + key[10:]
# dest_key = f"{key[:10]}new/{key[10:]}"
# else:
# dest_key = "new/" + key
# dest_key = f"new/{key}"
# assert source[key] == dest[dest_key]

# def test_source_dest_path(self):
Expand All @@ -841,7 +841,7 @@ def test_tree() -> None:
# assert source[key] == dest[dest_key]
# else:
# assert key not in dest
# assert ("new/" + key) not in dest
# assert (f"new/{key}") not in dest

# def test_excludes_includes(self):
# source = self.source
Expand All @@ -853,26 +853,26 @@ def test_tree() -> None:
# assert len(dest) == 2

# root = ""
# assert root + "foo" not in dest
# assert "f{root}foo" not in dest

# # multiple excludes
# dest = self._get_dest_store()
# excludes = "b.z", ".*x"
# copy_store(source, dest, excludes=excludes)
# assert len(dest) == 1
# assert root + "foo" in dest
# assert root + "bar/baz" not in dest
# assert root + "bar/qux" not in dest
# assert f"{root}foo" in dest
# assert f"{root}bar/baz" not in dest
# assert f"{root}bar/qux" not in dest

# # excludes and includes
# dest = self._get_dest_store()
# excludes = "b.*"
# includes = ".*x"
# copy_store(source, dest, excludes=excludes, includes=includes)
# assert len(dest) == 2
# assert root + "foo" in dest
# assert root + "bar/baz" not in dest
# assert root + "bar/qux" in dest
# assert f"{root}foo" in dest
# assert f"{root}bar/baz" not in dest
# assert f"{root}bar/qux" in dest

# def test_dry_run(self):
# source = self.source
Expand All @@ -884,7 +884,7 @@ def test_tree() -> None:
# source = self.source
# dest = self._get_dest_store()
# root = ""
# dest[root + "bar/baz"] = b"mmm"
# dest[f"{root}bar/baz"] = b"mmm"

# # default ('raise')
# with pytest.raises(CopyError):
Expand All @@ -897,16 +897,16 @@ def test_tree() -> None:
# # skip
# copy_store(source, dest, if_exists="skip")
# assert 3 == len(dest)
# assert dest[root + "foo"] == b"xxx"
# assert dest[root + "bar/baz"] == b"mmm"
# assert dest[root + "bar/qux"] == b"zzz"
# assert dest[f"{root}foo"] == b"xxx"
# assert dest[f"{root}bar/baz"] == b"mmm"
# assert dest[f"{root}bar/qux"] == b"zzz"

# # replace
# copy_store(source, dest, if_exists="replace")
# assert 3 == len(dest)
# assert dest[root + "foo"] == b"xxx"
# assert dest[root + "bar/baz"] == b"yyy"
# assert dest[root + "bar/qux"] == b"zzz"
# assert dest[f"{root}foo"] == b"xxx"
# assert dest[f"{root}bar/baz"] == b"yyy"
# assert dest[f"{root}bar/qux"] == b"zzz"

# # invalid option
# with pytest.raises(ValueError):
Expand Down
2 changes: 1 addition & 1 deletion tests/test_array.py
Original file line number Diff line number Diff line change
Expand Up @@ -1645,7 +1645,7 @@ async def test_name(store: Store, zarr_format: ZarrFormat, path: str | None) ->
else:
expected_path = path
assert arr.path == expected_path
assert arr.name == "/" + expected_path
assert arr.name == f"/{expected_path}"

# test that implicit groups were created
path_parts = expected_path.split("/")
Expand Down
4 changes: 2 additions & 2 deletions tests/test_codec_entrypoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
@pytest.mark.usefixtures("set_path")
@pytest.mark.parametrize("codec_name", ["TestEntrypointCodec", "TestEntrypointGroup.Codec"])
def test_entrypoint_codec(codec_name: str) -> None:
config.set({"codecs.test": "package_with_entrypoint." + codec_name})
config.set({"codecs.test": f"package_with_entrypoint.{codec_name}"})
cls_test = zarr.registry.get_codec_class("test")
assert cls_test.__qualname__ == codec_name

Expand All @@ -24,7 +24,7 @@ def test_entrypoint_pipeline() -> None:
def test_entrypoint_buffer(buffer_name: str) -> None:
config.set(
{
"buffer": "package_with_entrypoint." + buffer_name,
"buffer": f"package_with_entrypoint.{buffer_name}",
"ndbuffer": "package_with_entrypoint.TestEntrypointNDBuffer",
}
)
Expand Down
22 changes: 11 additions & 11 deletions tests/test_group.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@
async def store(request: pytest.FixtureRequest, tmpdir: LEGACY_PATH) -> Store:
result = await parse_store(request.param, str(tmpdir))
if not isinstance(result, Store):
raise TypeError("Wrong store class returned by test fixture! got " + result + " instead")
raise TypeError(f"Wrong store class returned by test fixture! got {result} instead")
return result


Expand Down Expand Up @@ -150,15 +150,15 @@ def test_group_name_properties(
"""
root = Group.from_store(store=StorePath(store=store, path=root_name), zarr_format=zarr_format)
assert root.path == normalize_path(root_name)
assert root.name == "/" + root.path
assert root.name == f"/{root.path}"
assert root.basename == root.path

branch = root.create_group(branch_name)
if root.path == "":
assert branch.path == normalize_path(branch_name)
else:
assert branch.path == "/".join([root.path, normalize_path(branch_name)])
assert branch.name == "/" + branch.path
assert branch.name == f"/{branch.path}"
assert branch.basename == branch_name.split("/")[-1]


Expand Down Expand Up @@ -732,7 +732,7 @@ def test_group_create_array(
a[:] = data

assert array.path == normalize_path(name)
assert array.name == "/" + array.path
assert array.name == f"/{array.path}"
assert array.shape == shape
assert array.dtype == np.dtype(dtype)
assert np.array_equal(array[:], data)
Expand Down Expand Up @@ -1082,21 +1082,21 @@ async def test_asyncgroup_delitem(store: Store, zarr_format: ZarrFormat) -> None

# todo: clean up the code duplication here
if zarr_format == 2:
assert not await agroup.store_path.store.exists(array_name + "/" + ".zarray")
assert not await agroup.store_path.store.exists(array_name + "/" + ".zattrs")
assert not await agroup.store_path.store.exists(f"{array_name}/.zarray")
assert not await agroup.store_path.store.exists(f"{array_name}/.zattrs")
elif zarr_format == 3:
assert not await agroup.store_path.store.exists(array_name + "/" + "zarr.json")
assert not await agroup.store_path.store.exists(f"{array_name}/zarr.json")
else:
raise AssertionError

sub_group_path = "sub_group"
_ = await agroup.create_group(sub_group_path, attributes={"foo": 100})
await agroup.delitem(sub_group_path)
if zarr_format == 2:
assert not await agroup.store_path.store.exists(array_name + "/" + ".zgroup")
assert not await agroup.store_path.store.exists(array_name + "/" + ".zattrs")
assert not await agroup.store_path.store.exists(f"{array_name}/.zgroup")
assert not await agroup.store_path.store.exists(f"{array_name}/.zattrs")
elif zarr_format == 3:
assert not await agroup.store_path.store.exists(array_name + "/" + "zarr.json")
assert not await agroup.store_path.store.exists(f"{array_name}/zarr.json")
else:
raise AssertionError

Expand All @@ -1113,7 +1113,7 @@ async def test_asyncgroup_create_group(

assert isinstance(subgroup, AsyncGroup)
assert subgroup.path == normalize_path(name)
assert subgroup.name == "/" + subgroup.path
assert subgroup.name == f"/{subgroup.path}"
assert subgroup.attrs == attributes
assert subgroup.store_path.path == subgroup.path
assert subgroup.store_path.store == store
Expand Down
2 changes: 1 addition & 1 deletion tests/test_regression/test_v2_dtype_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ def test_roundtrip_v2(source_array_v2: ArrayV2, tmp_path: Path, script_path: Pat
capture_output=True,
text=True,
)
assert copy_op.returncode == 0, "stdout " + copy_op.stdout + "\n stderr" + copy_op.stderr
assert copy_op.returncode == 0, f"stdout {copy_op.stdout}\n stderr{copy_op.stderr}"
out_array = zarr.open_array(store=out_path, mode="r", zarr_format=2)
assert source_array_v2.metadata.to_dict() == out_array.metadata.to_dict()
assert np.array_equal(source_array_v2[:], out_array[:])
Expand Down
2 changes: 1 addition & 1 deletion tests/test_store/test_memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -380,7 +380,7 @@ def test_from_url(self, store: ManagedMemoryStore) -> None:

def test_from_url_with_path(self, store: ManagedMemoryStore) -> None:
"""Test that from_url extracts path component from URL."""
url = str(store) + "/some/path"
url = f"{store}/some/path"
store2 = ManagedMemoryStore.from_url(url)
assert store2._store_dict is store._store_dict
assert store2.path == "some/path"
Expand Down
2 changes: 1 addition & 1 deletion tests/test_store/test_zip.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ async def test_list_without_explicit_open(self, tmp_path: Path) -> None:
root = zarr.open_group(store=zarr_path, mode="w")
root["x"] = np.array([1, 2, 3])
shutil.make_archive(str(zarr_path), "zip", zarr_path)
shutil.move(str(zarr_path) + ".zip", zip_path)
shutil.move(f"{zarr_path}.zip", zip_path)

store = ZipStore(zip_path, mode="r")
assert not store._is_open
Expand Down
Loading