diff --git a/.github/workflows/test_conda.yml b/.github/workflows/test_conda.yml index a4c2f65..127ac9a 100644 --- a/.github/workflows/test_conda.yml +++ b/.github/workflows/test_conda.yml @@ -21,7 +21,7 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest, windows-latest, macos-15-intel, macos-15] + os: [ubuntu-latest, windows-latest, macos-15] steps: @@ -57,4 +57,4 @@ jobs: - name: Run snakemake run: | - snakemake --cores 2 -p --configfile ./config_files/motta2019.yml + snakemake --cores 2 -p --configfile ./config_files/test.yml diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..00c20e7 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,90 @@ +[build-system] # Require setuptool version due to https://github.com/pypa/setuptools/issues/2938 +requires = ["setuptools>=61.0.0", "wheel"] + +[project] +dependencies = [ + "matplotlib", + "pyvista", + "scikit-image", + "meshio", + "dask-image", + "seaborn", + "pyacvd", + "pytetwild", + "fastremap", + "cloud-volume", + "connected-components-3d", + "cmocean", + "nbmorph", +] +name = "EMIMesh" +version = "1.0.0.dev0" +description = "Generating high-quality extracellular-membrane-intracellular meshes from imaging data" +authors = [{ name = "Marius Causemann", email = "mariusca@simula.no" }] +license = { file = "LICENSE" } +readme = "readme.md" + + +[project.scripts] +emi-generate-mesh = "emimesh.clients.generate_mesh:main" +emi-download-data = "emimesh.clients.download_data:main" +emi-evaluate-mesh = "emimesh.clients.evaluate_mesh:main" +emi-extract-surfaces = "emimesh.clients.extract_surfaces:main" +emi-plot-analysis = "emimesh.clients.analysis_plot:main" +emi-process-image-data = "emimesh.clients.process_image_data:main" +emi-generate-screenshot = "emimesh.clients.generate_screenshot:main" + +[project.optional-dependencies] +test = [] +dev = ["pdbpp", "ipython", "mypy", "ruff"] +webknossos = ["webknossos"] +docs = [ + "jupyter-book<2.0.0", + "jupytext", + "ipykernel<7.0.0", # Note: Remove once https://github.com/ipython/ipykernel/issues/1450 is in a release + "sphinx-codeautolink", +] +all = ["EMIMesh[test,dev,docs]"] + +[tool.pytest.ini_options] +addopts = ["--import-mode=importlib"] +testpaths = ["tests"] + +[tool.mypy] +ignore_missing_imports = true +# Folders to exclude +exclude = ["docs/", "build/"] +# Folder to check with mypy +files = ["src", "tests"] + +[tool.ruff] +src = ["src", "tests", "docs"] +line-length = 100 +indent-width = 4 + +[tool.ruff.lint] +select = [ + # Pyflakes + "F", + # Pycodestyle + "E", + "W", + # isort + "I001", +] + + +[tool.ruff.lint.isort] +known-first-party = ["EMIMesh"] +known-third-party = ["numpy", "pytest"] +section-order = [ + "future", + "standard-library", + "mpi", + "third-party", + "first-party", + "local-folder", +] + +[tool.ruff.lint.isort.sections] +"mpi" = ["mpi4py", "petsc4py"] diff --git a/workflow/scripts/cle_patch.py b/src/emimesh/cle_patch.py similarity index 100% rename from workflow/scripts/cle_patch.py rename to src/emimesh/cle_patch.py diff --git a/src/emimesh/clients/analysis_plot.py b/src/emimesh/clients/analysis_plot.py new file mode 100644 index 0000000..1c43f92 --- /dev/null +++ b/src/emimesh/clients/analysis_plot.py @@ -0,0 +1,22 @@ +import argparse +import yaml +from emimesh.generate_analysis_plots import plot_cell_sizes + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument( + "--infile", + help="path to mesh statistics infile", + type=str, + ) + parser.add_argument( + "--output", + help="outfile name", + type=str, + ) + args = parser.parse_args() + + with open(args.infile) as infile: + mesh_statistic = yaml.load(infile, Loader=yaml.FullLoader) + + plot_cell_sizes(mesh_statistic, args.output) diff --git a/src/emimesh/clients/download_data.py b/src/emimesh/clients/download_data.py new file mode 100644 index 0000000..3331b85 --- /dev/null +++ b/src/emimesh/clients/download_data.py @@ -0,0 +1,53 @@ +import argparse +from pathlib import Path +from emimesh.utils import np2pv +from pathlib import Path +import argparse +from emimesh.download_data import download_webknossos, download_cloudvolume + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + "--cloudpath", + help="path to cloud data", + type=str, + default="precomputed://gs://iarpa_microns/minnie/minnie65/seg", + ) + parser.add_argument("--mip", help="resolution (0 is highest)", type=int, default=0) + parser.add_argument( + "--position", + help="point position in x-y-z integer pixel position \ + (can be copied from neuroglancer)", + type=str, + ) + parser.add_argument( + "--size", + help="cube side length of the volume to be downloaded (in nm)", + type=str, + default=1000, + ) + parser.add_argument( + "--output", help="output filename", type=str, default="data.xdmf" + ) + + args = parser.parse_args() + + position = args.position.split("-") + try: + size = [int(args.size)] * 3 + except ValueError: + size = [int(s) for s in args.size.split("-")] + + try: + img,res = download_cloudvolume(args.cloudpath, args.mip, position, size) + except: + img,res = download_webknossos(args.cloudpath, args.mip, position, size) + + print(res) + data = np2pv(img, res) + Path(args.output).parent.mkdir(exist_ok=True, parents=True) + data.save(args.output) + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/workflow/scripts/evaluate_mesh.py b/src/emimesh/clients/evaluate_mesh.py similarity index 57% rename from workflow/scripts/evaluate_mesh.py rename to src/emimesh/clients/evaluate_mesh.py index c9b57f1..528d262 100644 --- a/workflow/scripts/evaluate_mesh.py +++ b/src/emimesh/clients/evaluate_mesh.py @@ -1,63 +1,11 @@ +import argparse import pyvista as pv import numpy as np -import argparse -import yaml -import matplotlib.pyplot as plt -import seaborn as sns - -lstr = "label" -ecs_id = 1 - - -def compute_local_width(mesh, ecs_id, cell_ids): - ecs = mesh.extract_cells(np.isin(mesh.cell_data[lstr], ecs_id)) - distances = [] - for cid in cell_ids: - cell = mesh.extract_cells(np.isin(mesh.cell_data[lstr], [cid])) - dist = ecs.compute_implicit_distance(cell.extract_surface()) - distances.append(abs(dist.point_data["implicit_distance"])) - - dist = ecs.compute_implicit_distance(mesh.extract_surface()) - distances.append(abs(dist.point_data["implicit_distance"])) - - distances = np.array(distances).T - distances.sort() - min_dist = distances[:, 0] + distances[:, 1] - ecs["local_width"] = min_dist - ecs = ecs.point_data_to_cell_data() - ecs = ecs.compute_cell_sizes() - return ecs.cell_data["local_width"], abs(ecs.cell_data["Volume"]) -def compute_surface_volume(mesh, cell_ids): - mesh = mesh.compute_cell_sizes() - mesh["Volume"] = np.abs(mesh["Volume"]) - assert (mesh["Volume"] > 0).all() - volumes, surface_areas = [], [] - for cid in cell_ids: - cell = mesh.extract_cells(np.isin(mesh.cell_data[lstr], [cid])) - surf = cell.extract_surface() - surface_areas.append(float(surf.compute_cell_sizes()["Area"].sum())) - volumes.append(float(cell["Volume"].sum())) - return volumes, surface_areas - - -def plot_local_width(width, volume, filename): - plt.figure(dpi=300) - sns.histplot( - x=width, - weights=volume, - bins=40, - kde=True, - stat="percent", - edgecolor="white", - kde_kws={"bw_adjust": 1}, - ) - plt.xlabel("local width (nm)") - plt.ylabel("relative frequency (%)") - plt.tight_layout() - plt.savefig(filename) +import yaml +from emimesh.evaluate_mesh import compute_surface_volume, ecs_id, lstr -if __name__ == "__main__": +def main(): parser = argparse.ArgumentParser() parser.add_argument( "--infile", @@ -108,4 +56,8 @@ def plot_local_width(width, volume, filename): with open(args.output, "w") as outfile: yaml.dump(results, outfile) - #plot_local_width(width, volume, Path(args.infile).parent / "local_width.png") \ No newline at end of file + #plot_local_width(width, volume, Path(args.infile).parent / "local_width.png") + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/src/emimesh/clients/extract_surfaces.py b/src/emimesh/clients/extract_surfaces.py new file mode 100644 index 0000000..913f21e --- /dev/null +++ b/src/emimesh/clients/extract_surfaces.py @@ -0,0 +1,72 @@ +import argparse +from pathlib import Path +import pyvista as pv + +from emimesh.extract_surfaces import extract_surface, clip_closed_box, extract_cell_meshes,create_balanced_csg_tree +import numpy as np +import json +import fastremap +from emimesh.utils import get_bounding_box +import argparse + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + "--infile", + help="path to image data", + type=str, + ) + parser.add_argument( + "--outdir", help="directory for output", type=str, default="output" + ) + parser.add_argument( + "--ncpus", help="number oc cores, default 1", type=int, default=1 + ) + args = parser.parse_args() + outdir = Path(args.outdir) + print(f"reading file: {args.infile}") + img_grid = pv.read(args.infile) + dims = img_grid.dimensions + resolution = img_grid.spacing + img = img_grid["data"].reshape(dims - np.array([1, 1, 1]), order="F") + cell_labels, cell_counts = fastremap.unique(img, return_counts=True) + cell_labels = list(cell_labels[np.argsort(cell_counts)]) + cell_labels.remove(0) + + outerbox = get_bounding_box(img_grid, resolution[0]*5 + img_grid.length*0.002) + + if "roimask" in img_grid.array_names: + roimask = img_grid["roimask"].reshape(dims - np.array([1, 1, 1]), + order="F") + roipadded = np.pad(roimask, 1) + grid = pv.ImageData(dimensions=roipadded.shape, spacing=resolution, + origin=(0, 0, 0)) + roisurf = extract_surface(roipadded, grid, mesh_reduction_factor=10, + taubin_smooth_iter=5) + + clip_closed_box(roisurf, outerbox) + else: + roisurf = outerbox + roi_file = outdir / "roi.ply" + + surfs = extract_cell_meshes( + img, + cell_labels[::-1], + resolution, + mesh_reduction_factor=10, + taubin_smooth_iter=5, + write_dir=outdir, + ncpus=args.ncpus + ) + + mesh_files = [outdir / f"{cid}.ply" for cid in surfs if cid] + roisurf.save(roi_file) + csg_tree = create_balanced_csg_tree([str(f) for f in mesh_files]) + csg_tree = create_balanced_csg_tree([str(roi_file), csg_tree]) + csg_tree = {"operation":"intersection","right":csg_tree, "left":str(roi_file)} + with open(outdir / "csgtree.json", "w") as outfile: + outfile.write(json.dumps(csg_tree)) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/workflow/scripts/generate_mesh.py b/src/emimesh/clients/generate_mesh.py similarity index 72% rename from workflow/scripts/generate_mesh.py rename to src/emimesh/clients/generate_mesh.py index 5e690db..2746230 100644 --- a/workflow/scripts/generate_mesh.py +++ b/src/emimesh/clients/generate_mesh.py @@ -2,7 +2,7 @@ import pyvista as pv import argparse import numpy as np -import time +from emimesh.generate_mesh import mesh_surfaces def get_values(d): for v in d.values(): @@ -11,19 +11,8 @@ def get_values(d): else: yield v -def mesh_surfaces(csg_tree_path, eps, stop_quality, max_threads): - from pytetwild import tetrahedralize_csg - start = time.time() - mesh = tetrahedralize_csg(csg_tree_path, epsilon=eps, edge_length_r=eps*50, - coarsen=True, stop_energy=stop_quality, - num_threads=max_threads).clean() - print("meshing finished!") - mesh["label"] = mesh["marker"] - mesh.field_data['runtime'] = time.time() - start - mesh.field_data['threads'] = max_threads - return mesh -if __name__ == "__main__": +def main(): parser = argparse.ArgumentParser() parser.add_argument( "--csgtree", @@ -64,3 +53,7 @@ def mesh_surfaces(csg_tree_path, eps, stop_quality, max_threads): pv.save_meshio(args.output, volmesh) print(volmesh.array_names) + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/workflow/scripts/generate_screenshot.py b/src/emimesh/clients/generate_screenshot.py similarity index 96% rename from workflow/scripts/generate_screenshot.py rename to src/emimesh/clients/generate_screenshot.py index 5f4479d..47178ac 100644 --- a/workflow/scripts/generate_screenshot.py +++ b/src/emimesh/clients/generate_screenshot.py @@ -1,6 +1,6 @@ import argparse import pyvista as pv -from plot_utils import get_screenshot +from emimesh.plot_utils import get_screenshot import numpy as np import matplotlib import cmocean @@ -8,7 +8,7 @@ hexcolor = lambda c: int(matplotlib.colors.to_hex(c)[1:], base=16) -if __name__ == "__main__": +def main(): parser = argparse.ArgumentParser() parser.add_argument( "--infile", @@ -65,4 +65,7 @@ #generate_screenshots([cells_k3d, ecs_k3d], args.output, fov=32) - \ No newline at end of file + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/workflow/scripts/generate_surface_tags.py b/src/emimesh/clients/generate_surface_tags.py similarity index 93% rename from workflow/scripts/generate_surface_tags.py rename to src/emimesh/clients/generate_surface_tags.py index 0082bfe..79ed7a4 100644 --- a/workflow/scripts/generate_surface_tags.py +++ b/src/emimesh/clients/generate_surface_tags.py @@ -1,27 +1,9 @@ -from fenics import * + +from fenics import MeshFunction, facets, cells, Mesh, XDMFFile import argparse import numpy as np - -def mark_interfaces(mesh, subdomains, outer_offset): - bm = MeshFunction("size_t", mesh, 2, 0) - bm.rename("boundaries", "") - for f in facets(mesh): - domains = [] - for c in cells(f): - domains.append(subdomains[c]) - domains = list(set(domains)) - domains.sort() - if f.exterior(): - bm[f] = domains[0] + outer_offset - continue - if len(domains) < 2: - continue - bm[f] = domains[1] - return bm - - -if __name__ == "__main__": +def main(): parser = argparse.ArgumentParser() parser.add_argument( "--infile", @@ -46,3 +28,25 @@ def mark_interfaces(mesh, subdomains, outer_offset): with XDMFFile(args.output) as outfile: outfile.write(bm) + + +def mark_interfaces(mesh, subdomains, outer_offset): + bm = MeshFunction("size_t", mesh, 2, 0) + bm.rename("boundaries", "") + for f in facets(mesh): + domains = [] + for c in cells(f): + domains.append(subdomains[c]) + domains = list(set(domains)) + domains.sort() + if f.exterior(): + bm[f] = domains[0] + outer_offset + continue + if len(domains) < 2: + continue + bm[f] = domains[1] + return bm + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/workflow/scripts/process_image_data.py b/src/emimesh/clients/process_image_data.py similarity index 64% rename from workflow/scripts/process_image_data.py rename to src/emimesh/clients/process_image_data.py index b9ac4ca..f4195f3 100644 --- a/workflow/scripts/process_image_data.py +++ b/src/emimesh/clients/process_image_data.py @@ -1,94 +1,19 @@ -import numpy as np import argparse -import yaml import time import pyvista as pv from pathlib import Path -from utils import np2pv +from emimesh.utils import np2pv from dask_image.ndinterp import affine_transform -import fastremap import dask.array as da -import dask from functools import partial -import cc3d import numba -import nbmorph - -dask.config.set({"array.chunk-size": "1024 MiB"}) - -def mergecells(img, labels): - print(f"merging cells: {labels}, ({img.shape})") - img = np.where(np.isin(img, labels), labels[0], img) - return img - -def ncells(img, ncells, keep_cell_labels=None): - cell_labels, cell_counts = fastremap.unique(img, return_counts=True) - cell_labels = cell_labels[np.argsort(cell_counts)] - if keep_cell_labels is None: cois =[] - cois = set(keep_cell_labels) - for cid in cell_labels: - if len(cois) >= ncells: break - cois.add(cid) - img = np.where(np.isin(img, cois), img, 0) - return img - -def dilate(img, radius, labels=None): - print(f"dilating cells, ({img.shape})") - if labels is None: - img = nbmorph.dilate_labels_spherical(img, radius=radius) - else: - vipimg = np.where(np.isin(img, labels), img, 0) - vipimg = dilate(vipimg, radius=radius) - img = np.where(vipimg, vipimg, img) - return img - -def erode(img, radius, labels=None): - print(f"eroding cells, ({img.shape})") - if labels is None: - img = nbmorph.erode_labels_spherical(img, radius=radius) - else: - vipimg = np.where(np.isin(img, labels), img, 0) - vipimg = erode(vipimg, radius=radius) - orig_wo_vips = np.where(np.isin(img, labels), 0, img) - img = np.where(orig_wo_vips > vipimg, orig_wo_vips, vipimg) - return img - -def smooth(img, iterations, radius, labels=None): - print(f"smoothing cells, ({img.shape})") - if labels is None: - img = nbmorph.smooth_labels_spherical(img, radius=radius, - iterations=iterations, dilate_radius=radius) - else: - vipimg = np.where(np.isin(img, labels), img, 0) - vipimg = smooth(vipimg, iterations=iterations, radius=radius) - # remove labelled cells from original image - orig_wo_vips = np.where(np.isin(img, labels), 0, img) - # insert smoothed labeled cells in original (overwrite original) - img = np.where(vipimg, vipimg, orig_wo_vips) - return img - -def removeislands(img, minsize): - return cc3d.dust(img, threshold=minsize, connectivity=6) - -opdict ={"merge": mergecells, "smooth":smooth, "dilate":dilate, - "erode":erode, "removeislands":removeislands, "ncells":ncells} - -def _parse_to_dict(values): - result = {} - for value in values: - k, v = value.split('=') - result[k.strip(" '")] = yaml.safe_load(v.strip(" '")) - return result - -def parse_operations(ops): - parsed = [] - for op in ops: - subargs = _parse_to_dict(op[1:]) - parsed.append((op[0], subargs)) - return parsed - +import numpy as np +import fastremap +import dask +import yaml +from emimesh.process_image_data import opdict, parse_operations -if __name__ == "__main__": +def main(): parser = argparse.ArgumentParser() parser.add_argument("--infile", help="input data", type=str) parser.add_argument( @@ -203,3 +128,7 @@ def parse_operations(ops): with open(resdir / "imagestatistic.yml", "w") as mesh_stat_file: yaml.dump(mesh_statistics, mesh_stat_file) + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/workflow/scripts/download_data.py b/src/emimesh/download_data.py similarity index 50% rename from workflow/scripts/download_data.py rename to src/emimesh/download_data.py index 832eb7a..2309a94 100644 --- a/workflow/scripts/download_data.py +++ b/src/emimesh/download_data.py @@ -1,8 +1,6 @@ import numpy as np -import pyvista as pv -import argparse -from utils import np2pv -from pathlib import Path + +__all__ = ["download_webknossos", "download_cloudvolume"] def download_webknossos(cloud_path, mip, pos, physical_size): import webknossos as wk @@ -37,47 +35,3 @@ def download_cloudvolume(cloud_path, mip, pos, physical_size): img = vol.download_point(pos, mip=mip, size=size).squeeze() return img, vol.resolution - - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument( - "--cloudpath", - help="path to cloud data", - type=str, - default="precomputed://gs://iarpa_microns/minnie/minnie65/seg", - ) - parser.add_argument("--mip", help="resolution (0 is highest)", type=int, default=0) - parser.add_argument( - "--position", - help="point position in x-y-z integer pixel position \ - (can be copied from neuroglancer)", - type=str, - ) - parser.add_argument( - "--size", - help="cube side length of the volume to be downloaded (in nm)", - type=str, - default=1000, - ) - parser.add_argument( - "--output", help="output filename", type=str, default="data.xdmf" - ) - - args = parser.parse_args() - - position = args.position.split("-") - try: - size = [int(args.size)] * 3 - except ValueError: - size = [int(s) for s in args.size.split("-")] - - try: - img,res = download_cloudvolume(args.cloudpath, args.mip, position, size) - except: - img,res = download_webknossos(args.cloudpath, args.mip, position, size) - - print(res) - data = np2pv(img, res) - Path(args.output).parent.mkdir(exist_ok=True, parents=True) - data.save(args.output) diff --git a/src/emimesh/evaluate_mesh.py b/src/emimesh/evaluate_mesh.py new file mode 100644 index 0000000..c790f16 --- /dev/null +++ b/src/emimesh/evaluate_mesh.py @@ -0,0 +1,57 @@ + +import numpy as np +import matplotlib.pyplot as plt +import seaborn as sns + +lstr = "label" +ecs_id = 1 + + +def compute_local_width(mesh, ecs_id, cell_ids): + ecs = mesh.extract_cells(np.isin(mesh.cell_data[lstr], ecs_id)) + distances = [] + for cid in cell_ids: + cell = mesh.extract_cells(np.isin(mesh.cell_data[lstr], [cid])) + dist = ecs.compute_implicit_distance(cell.extract_surface()) + distances.append(abs(dist.point_data["implicit_distance"])) + + dist = ecs.compute_implicit_distance(mesh.extract_surface()) + distances.append(abs(dist.point_data["implicit_distance"])) + + distances = np.array(distances).T + distances.sort() + min_dist = distances[:, 0] + distances[:, 1] + ecs["local_width"] = min_dist + ecs = ecs.point_data_to_cell_data() + ecs = ecs.compute_cell_sizes() + return ecs.cell_data["local_width"], abs(ecs.cell_data["Volume"]) + +def compute_surface_volume(mesh, cell_ids): + mesh = mesh.compute_cell_sizes() + mesh["Volume"] = np.abs(mesh["Volume"]) + assert (mesh["Volume"] > 0).all() + volumes, surface_areas = [], [] + for cid in cell_ids: + cell = mesh.extract_cells(np.isin(mesh.cell_data[lstr], [cid])) + surf = cell.extract_surface() + surface_areas.append(float(surf.compute_cell_sizes()["Area"].sum())) + volumes.append(float(cell["Volume"].sum())) + return volumes, surface_areas + + +def plot_local_width(width, volume, filename): + plt.figure(dpi=300) + sns.histplot( + x=width, + weights=volume, + bins=40, + kde=True, + stat="percent", + edgecolor="white", + kde_kws={"bw_adjust": 1}, + ) + plt.xlabel("local width (nm)") + plt.ylabel("relative frequency (%)") + plt.tight_layout() + plt.savefig(filename) + diff --git a/workflow/scripts/extract_surfaces.py b/src/emimesh/extract_surfaces.py similarity index 68% rename from workflow/scripts/extract_surfaces.py rename to src/emimesh/extract_surfaces.py index b98035e..e0f4fa4 100644 --- a/workflow/scripts/extract_surfaces.py +++ b/src/emimesh/extract_surfaces.py @@ -1,17 +1,15 @@ -import os + import numpy as np import pyvista as pv -import json + import pyacvd -import argparse + from pathlib import Path -from utils import get_bounding_box -import fastremap import sys import shutil import itertools -from pyvista.core import _vtk_core as _vti +from pyvista.core import _vtk_core as _vtk from pyvista.core.filters import _get_output, _update_alg from pyvista.core.utilities.helpers import generate_plane @@ -21,9 +19,9 @@ def clip_closed_surface(surf, normal='x', origin=None, tolerance=1e-06, inplace=False, progress_bar=False): plane = generate_plane(normal, origin) - collection = _vti.vtiPlaneCollection() + collection = _vtk.vtkPlaneCollection() collection.AddItem(plane) - alg = _vti.vtiClipClosedSurface() + alg = _vtk.vtkClipClosedSurface() alg.SetGenerateFaces(True) alg.SetInputDataObject(surf) alg.SetTolerance(tolerance) @@ -156,60 +154,3 @@ def create_balanced_csg_tree(surface_files): , "right": create_balanced_csg_tree(surface_files[int(n/2):])} return surface_files[0] -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument( - "--infile", - help="path to image data", - type=str, - ) - parser.add_argument( - "--outdir", help="directory for output", type=str, default="output" - ) - parser.add_argument( - "--ncpus", help="number oc cores, default 1", type=int, default=1 - ) - args = parser.parse_args() - outdir = Path(args.outdir) - print(f"reading file: {args.infile}") - img_grid = pv.read(args.infile) - dims = img_grid.dimensions - resolution = img_grid.spacing - img = img_grid["data"].reshape(dims - np.array([1, 1, 1]), order="F") - cell_labels, cell_counts = fastremap.unique(img, return_counts=True) - cell_labels = list(cell_labels[np.argsort(cell_counts)]) - cell_labels.remove(0) - - outerbox = get_bounding_box(img_grid, resolution[0]*5 + img_grid.length*0.002) - - if "roimask" in img_grid.array_names: - roimask = img_grid["roimask"].reshape(dims - np.array([1, 1, 1]), - order="F") - roipadded = np.pad(roimask, 1) - grid = pv.ImageData(dimensions=roipadded.shape, spacing=resolution, - origin=(0, 0, 0)) - roisurf = extract_surface(roipadded, grid, mesh_reduction_factor=10, - taubin_smooth_iter=5) - - clip_closed_box(roisurf, outerbox) - else: - roisurf = outerbox - roi_file = outdir / "roi.ply" - - surfs = extract_cell_meshes( - img, - cell_labels[::-1], - resolution, - mesh_reduction_factor=10, - taubin_smooth_iter=5, - write_dir=outdir, - ncpus=args.ncpus - ) - - mesh_files = [outdir / f"{cid}.ply" for cid in surfs if cid] - roisurf.save(roi_file) - csg_tree = create_balanced_csg_tree([str(f) for f in mesh_files]) - csg_tree = create_balanced_csg_tree([str(roi_file), csg_tree]) - csg_tree = {"operation":"intersection","right":csg_tree, "left":str(roi_file)} - with open(outdir / "csgtree.json", "w") as outfile: - outfile.write(json.dumps(csg_tree)) diff --git a/workflow/scripts/generate_analysis_plots.py b/src/emimesh/generate_analysis_plots.py similarity index 85% rename from workflow/scripts/generate_analysis_plots.py rename to src/emimesh/generate_analysis_plots.py index cad93ec..489d3a4 100644 --- a/workflow/scripts/generate_analysis_plots.py +++ b/src/emimesh/generate_analysis_plots.py @@ -1,6 +1,5 @@ import numpy as np -import argparse -import yaml + import matplotlib.pyplot as plt from matplotlib.ticker import PercentFormatter import seaborn as sns @@ -95,22 +94,3 @@ def plot_cell_sizes(mesh_statistics, filename): plt.tight_layout() plt.savefig(filename) - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument( - "--infile", - help="path to mesh statistics infile", - type=str, - ) - parser.add_argument( - "--output", - help="outfile name", - type=str, - ) - args = parser.parse_args() - - with open(args.infile) as infile: - mesh_statistic = yaml.load(infile, Loader=yaml.FullLoader) - - plot_cell_sizes(mesh_statistic, args.output) diff --git a/src/emimesh/generate_mesh.py b/src/emimesh/generate_mesh.py new file mode 100644 index 0000000..0eaac26 --- /dev/null +++ b/src/emimesh/generate_mesh.py @@ -0,0 +1,16 @@ +import time + +from pytetwild import tetrahedralize_csg + +__all__ = ["make_surfaces"] + +def mesh_surfaces(csg_tree_path, eps, stop_quality, max_threads): + start = time.time() + mesh = tetrahedralize_csg(csg_tree_path, epsilon=eps, edge_length_r=eps*50, + coarsen=True, stop_energy=stop_quality, + num_threads=max_threads).clean() + print("meshing finished!") + mesh["label"] = mesh["marker"] + mesh.field_data['runtime'] = time.time() - start + mesh.field_data['threads'] = max_threads + return mesh diff --git a/workflow/scripts/k3d_headless.py b/src/emimesh/k3d_headless.py similarity index 100% rename from workflow/scripts/k3d_headless.py rename to src/emimesh/k3d_headless.py diff --git a/workflow/scripts/overview_table.py b/src/emimesh/overview_table.py similarity index 100% rename from workflow/scripts/overview_table.py rename to src/emimesh/overview_table.py diff --git a/workflow/scripts/plot_utils.py b/src/emimesh/plot_utils.py similarity index 100% rename from workflow/scripts/plot_utils.py rename to src/emimesh/plot_utils.py diff --git a/src/emimesh/process_image_data.py b/src/emimesh/process_image_data.py new file mode 100644 index 0000000..e7d106a --- /dev/null +++ b/src/emimesh/process_image_data.py @@ -0,0 +1,80 @@ +import numpy as np +import yaml +import fastremap +import dask +import cc3d +import nbmorph + +dask.config.set({"array.chunk-size": "1024 MiB"}) + +def mergecells(img, labels): + print(f"merging cells: {labels}, ({img.shape})") + img = np.where(np.isin(img, labels), labels[0], img) + return img + +def ncells(img, ncells, keep_cell_labels=None): + cell_labels, cell_counts = fastremap.unique(img, return_counts=True) + cell_labels = cell_labels[np.argsort(cell_counts)] + if keep_cell_labels is None: cois =[] + cois = set(keep_cell_labels) + for cid in cell_labels: + if len(cois) >= ncells: break + cois.add(cid) + img = np.where(np.isin(img, cois), img, 0) + return img + +def dilate(img, radius, labels=None): + print(f"dilating cells, ({img.shape})") + if labels is None: + img = nbmorph.dilate_labels_spherical(img, radius=radius) + else: + vipimg = np.where(np.isin(img, labels), img, 0) + vipimg = dilate(vipimg, radius=radius) + img = np.where(vipimg, vipimg, img) + return img + +def erode(img, radius, labels=None): + print(f"eroding cells, ({img.shape})") + if labels is None: + img = nbmorph.erode_labels_spherical(img, radius=radius) + else: + vipimg = np.where(np.isin(img, labels), img, 0) + vipimg = erode(vipimg, radius=radius) + orig_wo_vips = np.where(np.isin(img, labels), 0, img) + img = np.where(orig_wo_vips > vipimg, orig_wo_vips, vipimg) + return img + +def smooth(img, iterations, radius, labels=None): + print(f"smoothing cells, ({img.shape})") + if labels is None: + img = nbmorph.smooth_labels_spherical(img, radius=radius, + iterations=iterations, dilate_radius=radius) + else: + vipimg = np.where(np.isin(img, labels), img, 0) + vipimg = smooth(vipimg, iterations=iterations, radius=radius) + # remove labelled cells from original image + orig_wo_vips = np.where(np.isin(img, labels), 0, img) + # insert smoothed labeled cells in original (overwrite original) + img = np.where(vipimg, vipimg, orig_wo_vips) + return img + +def removeislands(img, minsize): + return cc3d.dust(img, threshold=minsize, connectivity=6) + +opdict ={"merge": mergecells, "smooth":smooth, "dilate":dilate, + "erode":erode, "removeislands":removeislands, "ncells":ncells} + +def _parse_to_dict(values): + result = {} + for value in values: + k, v = value.split('=') + result[k.strip(" '")] = yaml.safe_load(v.strip(" '")) + return result + +def parse_operations(ops): + parsed = [] + for op in ops: + subargs = _parse_to_dict(op[1:]) + parsed.append((op[0], subargs)) + return parsed + \ No newline at end of file diff --git a/workflow/scripts/process_image_data_opencl.py b/src/emimesh/process_image_data_opencl.py similarity index 100% rename from workflow/scripts/process_image_data_opencl.py rename to src/emimesh/process_image_data_opencl.py diff --git a/workflow/scripts/profile_pycle.py b/src/emimesh/profile_pycle.py similarity index 100% rename from workflow/scripts/profile_pycle.py rename to src/emimesh/profile_pycle.py diff --git a/workflow/scripts/seperate_touching_cells.py b/src/emimesh/seperate_touching_cells.py similarity index 94% rename from workflow/scripts/seperate_touching_cells.py rename to src/emimesh/seperate_touching_cells.py index 34ce5c2..8674dae 100644 --- a/workflow/scripts/seperate_touching_cells.py +++ b/src/emimesh/seperate_touching_cells.py @@ -1,6 +1,6 @@ -from fenics import * +from fenics import vertices, cells, Mesh, MeshFunction, XDMFFile + import argparse -import numpy as np def seperate_touching_cells(sm): cellids = [] diff --git a/workflow/scripts/utils.py b/src/emimesh/utils.py similarity index 96% rename from workflow/scripts/utils.py rename to src/emimesh/utils.py index dcf9953..0678b7b 100644 --- a/workflow/scripts/utils.py +++ b/src/emimesh/utils.py @@ -1,7 +1,5 @@ import pyvista as pv import numpy as np -import dask -import dask.array as da import fastremap diff --git a/workflow/Snakefile b/workflow/Snakefile index 6068328..3688446 100644 --- a/workflow/Snakefile +++ b/workflow/Snakefile @@ -52,7 +52,7 @@ rule downloadImageData: options=dict_to_optionsstring(config["raw"]) shell: """ - python workflow/scripts/download_data.py \ + emi-download-data \ {params.options} \ --output {output.rawdata} """ @@ -72,7 +72,7 @@ rule processImageData: options=dict_to_optionsstring(config["processing"]) shell: """ - python workflow/scripts/process_image_data.py --infile {input.rawdata} \ + emi-process-image-data --infile {input.rawdata} \ {params.options} \ --output {output.outfile} \ --nworkers {resources.ntasks} @@ -91,7 +91,7 @@ rule extractSurfaces: conda_env shell: """ - python workflow/scripts/extract_surfaces.py --infile {input.processeddata} --ncpus {resources.ntasks} \ + emi-extract-surfaces --infile {input.processeddata} --ncpus {resources.ntasks} \ --outdir {output.outdir} """ @@ -110,7 +110,7 @@ rule generateMesh: options=dict_to_optionsstring(config["meshing"]) shell: """ - python workflow/scripts/generate_mesh.py \ + emi-generate-mesh \ --csgtree {input.csgtree} \ {params.options} \ --output {output.outfile} \ @@ -130,7 +130,7 @@ rule evaluateMesh: threads: 1 shell: """ - python workflow/scripts/evaluate_mesh.py \ + emi-evaluate-mesh \ --infile {input} --output {output} """ @@ -144,7 +144,7 @@ rule takeScreenshot: threads: 1 shell: """ - python workflow/scripts/generate_screenshot.py \ + emi-generate-screenshot \ --infile {input} --output {output} """ @@ -157,7 +157,7 @@ rule generateAnalysisPlot: conda_env shell: """ - python workflow/scripts/generate_analysis_plots.py \ + emi-plot-analysis \ --infile {input.infile} --output {output.plotfile} """ diff --git a/workflow/envs/environment.yml b/workflow/envs/environment.yml index b540a9f..964ec23 100644 --- a/workflow/envs/environment.yml +++ b/workflow/envs/environment.yml @@ -11,10 +11,11 @@ dependencies: - pyacvd - pip - pip: - - pytetwild - - fastremap - - cloud-volume - - webknossos - - connected-components-3d - - cmocean - - nbmorph + - pytetwild + - fastremap + - cloud-volume + # - webknossos # Needs to unpin it's zipp versioning, ref: https://github.com/scalableminds/webknossos-libs/issues/1421 + - connected-components-3d + - cmocean + - nbmorph + - -e ../..