feat: init

This commit is contained in:
2025-12-04 16:07:30 +08:00
commit 262583a57f
681 changed files with 117578 additions and 0 deletions

View File

@@ -0,0 +1,269 @@
import os
import pytest
import numpy as np
import h5py
from navigate.tools.file_functions import delete_folder
def recurse_dtype(group):
for key, subgroup in group.items():
subgroup_type = type(subgroup)
if subgroup_type == h5py._hl.group.Group:
recurse_dtype(subgroup)
elif subgroup_type == h5py._hl.dataset.Dataset:
if key == "resolutions":
assert subgroup.dtype == "float64"
elif key == "subdivisions":
assert subgroup.dtype == "int32"
elif key == "cells":
assert subgroup.dtype == "uint16"
else:
print("Unknown how to handle:", key, subgroup_type)
def bdv_ds(fn, multiposition, per_stack, z_stack, stop_early, size):
from test.model.dummy import DummyModel
from navigate.model.data_sources.bdv_data_source import BigDataViewerDataSource
print(
f"Conditions are multiposition: {multiposition} per_stack: {per_stack} "
f"z_stack: {z_stack} stop_early: {stop_early}"
)
# Set up model with a random number of z-steps to modulate the shape
model = DummyModel()
z_steps = np.random.randint(1, 3)
timepoints = np.random.randint(1, 3)
x_size, y_size = size
model.configuration["experiment"]["CameraParameters"]["x_pixels"] = x_size
model.configuration["experiment"]["CameraParameters"]["y_pixels"] = y_size
model.img_width = x_size
model.img_height = y_size
model.configuration["experiment"]["MicroscopeState"]["image_mode"] = (
"z-stack" if z_stack else "single"
)
model.configuration["experiment"]["MicroscopeState"]["number_z_steps"] = z_steps
model.configuration["experiment"]["MicroscopeState"][
"is_multiposition"
] = multiposition
model.configuration["experiment"]["MicroscopeState"]["timepoints"] = timepoints
model.configuration["experiment"]["BDVParameters"] = {
"shear": {
"shear_data": True,
"shear_dimension": "YZ",
"shear_angle": 45,
},
"rotate": {
"rotate_data": False,
"X": 0,
"Y": 0,
"Z": 0,
},
"down_sample": {
"down_sample": False,
"axial_down_sample": 1,
"lateral_down_sample": 1,
},
}
if per_stack:
model.configuration["experiment"]["MicroscopeState"][
"stack_cycling_mode"
] = "per_stack"
else:
model.configuration["experiment"]["MicroscopeState"][
"stack_cycling_mode"
] = "per_slice"
# Establish a BDV data source
ds = BigDataViewerDataSource(fn)
ds.set_metadata_from_configuration_experiment(model.configuration)
# Populate one image per channel per timepoint
n_images = ds.shape_c * ds.shape_z * ds.shape_t * ds.positions
print(
f"x: {ds.shape_x} y: {ds.shape_y} z: {ds.shape_z} c: {ds.shape_c} "
f"t: {ds.shape_t} positions: {ds.positions} per_stack: {ds.metadata.per_stack}"
)
data = (np.random.rand(n_images, ds.shape_y, ds.shape_x) * 2**16).astype("uint16")
dbytes = np.sum(
ds.shapes.prod(1) * ds.shape_t * ds.shape_c * ds.positions * 2
) # 2 bytes per pixel (16-bit)
assert dbytes == ds.nbytes
data_positions = (np.random.rand(n_images, 5) * 50e3).astype(float)
for i in range(n_images):
ds.write(
data[i, ...].squeeze(),
x=data_positions[i, 0],
y=data_positions[i, 1],
z=data_positions[i, 2],
theta=data_positions[i, 3],
f=data_positions[i, 4],
)
if stop_early and np.random.rand() > 0.5:
break
return ds
def close_bdv_ds(ds, file_name=None):
ds.close()
if file_name is None:
file_name = ds.file_name
# Delete
try:
xml_fn = os.path.splitext(file_name)[0] + ".xml"
if os.path.isdir(file_name):
# n5 is a directory
delete_folder(file_name)
else:
os.remove(file_name)
os.remove(xml_fn)
except PermissionError:
# Windows seems to think these files are still open
pass
@pytest.mark.parametrize("multiposition", [True, False])
@pytest.mark.parametrize("per_stack", [True, False])
@pytest.mark.parametrize("z_stack", [True, False])
@pytest.mark.parametrize("stop_early", [True, False])
@pytest.mark.parametrize("size", [(1024, 2048), (2048, 1024), (2048, 2048)])
@pytest.mark.parametrize("ext", ["h5", "n5"])
def test_bdv_write(multiposition, per_stack, z_stack, stop_early, size, ext):
fn = f"test.{ext}"
ds = bdv_ds(fn, multiposition, per_stack, z_stack, stop_early, size)
file_name = ds.file_name
ds.close()
# check datatypes
# todo: extend to n5
if ext == "h5":
ds = h5py.File(f"test.{ext}", "r")
for key in ds.keys():
recurse_dtype(ds[key])
close_bdv_ds(ds, file_name=file_name)
assert True
@pytest.mark.parametrize("multiposition", [True, False])
@pytest.mark.parametrize("per_stack", [True, False])
@pytest.mark.parametrize("z_stack", [True, False])
@pytest.mark.parametrize("size", [(1024, 2048), (2048, 1024), (2048, 2048)])
def test_bdv_getitem(multiposition, per_stack, z_stack, size):
ds = bdv_ds("test.h5", multiposition, per_stack, z_stack, False, size)
# Check indexing
assert ds[0, ...].shape == (
ds.positions,
ds.shape_t,
ds.shape_z,
ds.shape_c,
ds.shape_y,
1,
)
assert ds[:, 0, ...].shape == (
ds.positions,
ds.shape_t,
ds.shape_z,
ds.shape_c,
1,
ds.shape_x,
)
assert ds[:, :, 0, ...].shape == (
ds.positions,
ds.shape_t,
ds.shape_z,
1,
ds.shape_y,
ds.shape_x,
)
assert ds[:, :, :, 0, ...].shape == (
ds.positions,
ds.shape_t,
1,
ds.shape_c,
ds.shape_y,
ds.shape_x,
)
assert ds[:, :, :, :, 0, ...].shape == (
ds.positions,
1,
ds.shape_z,
ds.shape_c,
ds.shape_y,
ds.shape_x,
)
assert ds[:, :, :, :, :, 0].shape == (
1,
ds.shape_t,
ds.shape_z,
ds.shape_c,
ds.shape_y,
ds.shape_x,
)
# Check slicing
sx = 5
assert ds[:sx, ...].shape == (
ds.positions,
ds.shape_t,
ds.shape_z,
ds.shape_c,
ds.shape_y,
min(ds.shape_x, sx),
)
assert ds[:, :sx, ...].shape == (
ds.positions,
ds.shape_t,
ds.shape_z,
ds.shape_c,
min(ds.shape_y, sx),
ds.shape_x,
)
assert ds[:, :, :sx, ...].shape == (
ds.positions,
ds.shape_t,
ds.shape_z,
min(ds.shape_c, sx),
ds.shape_y,
ds.shape_x,
)
assert ds[:, :, :, :sx, ...].shape == (
ds.positions,
ds.shape_t,
min(ds.shape_z, sx),
ds.shape_c,
ds.shape_y,
ds.shape_x,
)
assert ds[:, :, :, :, :sx, ...].shape == (
ds.positions,
min(ds.shape_t, sx),
ds.shape_z,
ds.shape_c,
ds.shape_y,
ds.shape_x,
)
assert ds[:, :, :, :, :, :sx].shape == (
min(ds.positions, sx),
ds.shape_t,
ds.shape_z,
ds.shape_c,
ds.shape_y,
ds.shape_x,
)
close_bdv_ds(ds)
assert True

View File

@@ -0,0 +1,95 @@
import numpy as np
import pytest
def test_data_source_mode():
from navigate.model.data_sources.data_source import DataSource
ds = DataSource()
# set read and write
with pytest.raises(NotImplementedError):
ds.mode = "r"
assert ds.mode == "r"
ds.mode = "w"
assert ds.mode == "w"
# set unknown mode, default to read
with pytest.raises(NotImplementedError):
ds.mode = "goblin"
assert ds.mode == "r"
def test_data_source_cztp_indices():
import itertools
from navigate.model.data_sources.data_source import DataSource
MAX = 25
ds = DataSource()
ds.shape_c = np.random.randint(1, MAX)
ds.shape_z = 1
ds.shape_t = np.random.randint(1, MAX)
ds.positions = np.random.randint(1, MAX)
n_inds = ds.shape_c * ds.shape_z * ds.shape_t * ds.positions
print(f"n_inds : {n_inds}")
cztp_inds = itertools.product(
range(ds.positions), range(ds.shape_z), range(ds.shape_t), range(ds.shape_c)
)
for i, inds in zip(range(n_inds), cztp_inds):
c, z, t, p = ds._cztp_indices(i, False)
pt, zt, tt, ct = inds
assert c == ct
assert z == zt
assert t == tt
assert p == pt
print(
f"Shape (XYCZTP): {ds.shape} {ds.positions} "
f"Final (CZTP): {ds._cztp_indices(n_inds-1, False)}"
)
ds.shape_z = np.random.randint(2, MAX)
n_inds = ds.shape_c * ds.shape_z * ds.shape_t * ds.positions
cztp_inds = itertools.product(
range(ds.positions), range(ds.shape_t), range(ds.shape_z), range(ds.shape_c)
)
for i, inds in zip(range(n_inds), cztp_inds):
c, z, t, p = ds._cztp_indices(i, False)
pt, tt, zt, ct = inds
assert c == ct
assert z == zt
assert t == tt
assert p == pt
print(
f"Shape (XYCZTP): {ds.shape} {ds.positions} "
f"Final (CZTP): {ds._cztp_indices(n_inds-1, False)}"
)
cztp_inds = itertools.product(
range(ds.positions), range(ds.shape_t), range(ds.shape_c), range(ds.shape_z)
)
for i, inds in zip(range(n_inds), cztp_inds):
c, z, t, p = ds._cztp_indices(i, True)
pt, tt, ct, zt = inds
assert c == ct
assert z == zt
assert t == tt
assert p == pt
print(
f"Shape (XYCZTP): {ds.shape} {ds.positions} "
f"Final (CZTP): {ds._cztp_indices(n_inds-1, False)}"
)
# assert False

View File

@@ -0,0 +1,108 @@
import os
import pytest
from navigate.tools.file_functions import delete_folder
@pytest.mark.parametrize("is_ome", [True, False])
@pytest.mark.parametrize("multiposition", [True, False])
@pytest.mark.parametrize("per_stack", [True, False])
@pytest.mark.parametrize("z_stack", [True, False])
@pytest.mark.parametrize("stop_early", [True, False])
def test_tiff_write_read(is_ome, multiposition, per_stack, z_stack, stop_early):
import numpy as np
from test.model.dummy import DummyModel
from navigate.model.data_sources.tiff_data_source import TiffDataSource
print(
f"Conditions are is_ome: {is_ome} multiposition: {multiposition} "
f"per_stack: {per_stack} z_stack: {z_stack} stop_early: {stop_early}"
)
# Set up model with a random number of z-steps to modulate the shape
model = DummyModel()
z_steps = np.random.randint(1, 3)
timepoints = np.random.randint(1, 3)
model.configuration["experiment"]["MicroscopeState"]["image_mode"] = (
"z-stack" if z_stack else "single"
)
model.configuration["experiment"]["MicroscopeState"]["number_z_steps"] = z_steps
model.configuration["experiment"]["MicroscopeState"][
"is_multiposition"
] = multiposition
model.configuration["experiment"]["MicroscopeState"]["timepoints"] = timepoints
if per_stack:
model.configuration["experiment"]["MicroscopeState"][
"stack_cycling_mode"
] == "per_stack"
else:
model.configuration["experiment"]["MicroscopeState"][
"stack_cycling_mode"
] == "per_slice"
if not os.path.exists("test_save_dir"):
os.mkdir("test_save_dir")
# Establish a TIFF data source
if is_ome:
fn = "./test_save_dir/test.ome.tif"
else:
fn = "./test_save_dir/test.tif"
ds = TiffDataSource(fn)
ds.set_metadata_from_configuration_experiment(model.configuration)
# Populate one image per channel per timepoint per position
n_images = ds.shape_c * ds.shape_z * ds.shape_t * ds.positions
data = (np.random.rand(n_images, ds.shape_y, ds.shape_x) * 2**16).astype(
np.uint16
)
file_names_raw = []
for i in range(n_images):
ds.write(data[i, ...].squeeze())
file_names_raw.extend(ds.file_name)
if stop_early and np.random.rand() > 0.5:
break
ds.close()
# Cannot use list(set()) trick here because ordering is important
file_names = []
for fn in file_names_raw:
if fn not in file_names:
file_names.append(fn)
# print(file_names)
try:
# For each file...
for i, fn in enumerate(file_names):
ds2 = TiffDataSource(fn, "r")
# Make sure XYZ size is correct (and C and T are each of size 1)
assert (
(ds2.shape_x == ds.shape_x)
and (ds2.shape_y == ds.shape_y)
and (ds2.shape_c == 1)
and (ds2.shape_t == 1)
and (ds2.shape_z == ds.shape_z)
)
# Make sure the data copied properly
np.testing.assert_equal(
ds2.data, data[i * ds.shape_z : (i + 1) * ds.shape_z, ...].squeeze()
)
ds2.close()
except IndexError as e:
if stop_early:
# This file was not written
pass
else:
raise e
except AssertionError as e:
if stop_early:
# This file has an underfilled axes
pass
else:
raise e
except Exception as e:
raise e
finally:
delete_folder("test_save_dir")

View File

@@ -0,0 +1,152 @@
import os
import pytest
import numpy as np
try:
from pydantic import ValidationError
from pydantic_ome_ngff.v04.multiscale import Group
pydantic = True
except (ImportError, TypeError):
pydantic = False
from navigate.tools.file_functions import delete_folder
def zarr_ds(fn, multiposition, per_stack, z_stack, stop_early, size):
from test.model.dummy import DummyModel
from navigate.model.data_sources.zarr_data_source import OMEZarrDataSource
print(
f"Conditions are multiposition: {multiposition} per_stack: {per_stack} "
f"z_stack: {z_stack} stop_early: {stop_early}"
)
# Set up model with a random number of z-steps to modulate the shape
model = DummyModel()
z_steps = np.random.randint(1, 3)
timepoints = np.random.randint(1, 3)
x_size, y_size = size
microscope_name = model.configuration["experiment"]["MicroscopeState"][
"microscope_name"
]
model.configuration["experiment"]["CameraParameters"][microscope_name][
"x_pixels"
] = x_size
model.configuration["experiment"]["CameraParameters"][microscope_name][
"y_pixels"
] = y_size
model.img_width = x_size
model.img_height = y_size
model.configuration["experiment"]["MicroscopeState"]["image_mode"] = (
"z-stack" if z_stack else "single"
)
model.configuration["experiment"]["MicroscopeState"]["number_z_steps"] = z_steps
model.configuration["experiment"]["MicroscopeState"][
"is_multiposition"
] = multiposition
model.configuration["experiment"]["MicroscopeState"]["timepoints"] = timepoints
model.configuration["experiment"]["BDVParameters"] = {
"shear": {
"shear_data": True,
"shear_dimension": "YZ",
"shear_angle": 45,
},
"rotate": {
"rotate_data": False,
"X": 0,
"Y": 0,
"Z": 0,
},
"down_sample": {
"down_sample": False,
"axial_down_sample": 1,
"lateral_down_sample": 1,
},
}
if per_stack:
model.configuration["experiment"]["MicroscopeState"][
"stack_cycling_mode"
] = "per_stack"
else:
model.configuration["experiment"]["MicroscopeState"][
"stack_cycling_mode"
] = "per_slice"
# Establish a BDV data source
ds = OMEZarrDataSource(fn)
ds.set_metadata_from_configuration_experiment(model.configuration)
# Populate one image per channel per timepoint
n_images = ds.shape_c * ds.shape_z * ds.shape_t * ds.positions
print(
f"x: {ds.shape_x} y: {ds.shape_y} z: {ds.shape_z} c: {ds.shape_c} "
f"t: {ds.shape_t} positions: {ds.positions} per_stack: {ds.metadata.per_stack}"
)
data = (np.random.rand(n_images, ds.shape_y, ds.shape_x) * 2**16).astype("uint16")
dbytes = np.sum(
ds.shapes.prod(1) * ds.shape_t * ds.shape_c * ds.positions * 2
) # 2 bytes per pixel (16-bit)
assert dbytes == ds.nbytes
data_positions = (np.random.rand(n_images, 5) * 50e3).astype(float)
for i in range(n_images):
ds.write(
data[i, ...].squeeze(),
x=data_positions[i, 0],
y=data_positions[i, 1],
z=data_positions[i, 2],
theta=data_positions[i, 3],
f=data_positions[i, 4],
)
if stop_early and np.random.rand() > 0.5:
break
return ds
def close_zarr_ds(ds, file_name=None):
ds.close()
if file_name is None:
file_name = ds.file_name
# Delete
try:
if os.path.isdir(file_name):
# zarr is a directory
delete_folder(file_name)
else:
os.remove(file_name)
except PermissionError:
# Windows seems to think these files are still open
pass
@pytest.mark.parametrize("multiposition", [True, False])
@pytest.mark.parametrize("per_stack", [True, False])
@pytest.mark.parametrize("z_stack", [True, False])
@pytest.mark.parametrize("stop_early", [True, False])
@pytest.mark.parametrize("size", [(1024, 2048), (2048, 1024), (2048, 2048)])
def test_zarr_write(multiposition, per_stack, z_stack, stop_early, size):
fn = "test.zarr"
ds = zarr_ds(fn, multiposition, per_stack, z_stack, stop_early, size)
if pydantic:
try:
Group.from_zarr(ds.image)
except ValidationError as e:
print(e)
assert False
file_name = ds.file_name
close_zarr_ds(ds, file_name=file_name)
assert True