Skip to content

Commit

Permalink
fixes for prestitched data, and revert back to chunksize z=1
Browse files Browse the repository at this point in the history
  • Loading branch information
akhanf committed Sep 8, 2024
1 parent 7e9e602 commit ce66e0e
Show file tree
Hide file tree
Showing 5 changed files with 34 additions and 20 deletions.
14 changes: 9 additions & 5 deletions config/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,26 +57,30 @@ bigstitcher:
downsampling: 1
block_size_x: 256 # for storage
block_size_y: 256
block_size_z: 256
block_size_z: 1
block_size_factor_x: 1 #e.g. 2 will use 2*block_size for computation
block_size_factor_y: 1
block_size_factor_z: 1
block_size_factor_z: 256

ome_zarr:
desc: stitchedflatcorr
max_downsampling_layers: 5 # e.g. 4 levels: { 0: orig, 1: ds2, 2: ds4, 3: ds8, 4: ds16}
rechunk_size: #z, y, x
- 256
- 256
- 256
- 1
- 4096
- 4096
scaling_method: 'local_mean' #can be nearest, gaussian, local_mean, zoom (zoom uses spline interp)

omero_metadata:
channels:
default_color: 'FFFFFF'
color_mapping:
autof: 'FFFFFF'
AutoF: 'FFFFFF'
abeta: '00FF00'
Abeta: '00FF00'
PI: 'FFFFFF'
AlphaSynuclein: '00FF00'
defaults:
active: True
coefficient: 1.0
Expand Down
11 changes: 11 additions & 0 deletions spimprep_run
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
#!/bin/bash

if ! command -v singularity >/dev/null 2>&1; then
echo "Error: 'singularity' is not installed or not in the PATH." >&2
exit 1
fi

container='docker://khanlab/spimprep-deps:main'
unset SNAKEMAKE_PROFILE
singularity exec ${container} snakemake --config --set-resources bigstitcher:mem_mb=30000 fuse_dataset:mem_mb=30000 -pr $@

2 changes: 1 addition & 1 deletion workflow/Snakefile
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ rule all:
input:
get_all_targets(),
get_bids_toplevel_targets(),
get_qc_targets(),
# get_qc_targets(), #need to skip this if using prestitched
localrule: True


Expand Down
4 changes: 2 additions & 2 deletions workflow/rules/ome_zarr.smk
Original file line number Diff line number Diff line change
Expand Up @@ -77,10 +77,10 @@ rule tif_stacks_to_ome_zarr:
config["containers"]["spimprep"]
group:
"preproc"
threads: 8
threads: config["cores_per_rule"]
resources:
runtime=360,
mem_mb=32000,
mem_mb=35000,
script:
"../scripts/tif_stacks_to_ome_zarr.py"

Expand Down
23 changes: 11 additions & 12 deletions workflow/scripts/tif_stacks_to_ome_zarr.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,16 @@
import json
import zarr
import dask.array as da
from dask.array.image import imread as dask_imread
from dask.array.image import imread as dask_imread
from ome_zarr.io import parse_url
from ome_zarr.writer import write_image
from ome_zarr.format import format_from_version
from ome_zarr.scale import Scaler
from dask.diagnostics import ProgressBar
from upath import UPath as Path
from lib.cloud_io import get_fsspec, is_remote
from dask.distributed import Client, LocalCluster

in_tif_glob = snakemake.params.in_tif_glob

metadata_json=snakemake.input.metadata_json
downsampling=snakemake.params.downsampling
max_layer=snakemake.params.max_downsampling_layers #number of downsamplings by 2 to include in zarr
Expand All @@ -21,6 +20,9 @@
scaling_method=snakemake.params.scaling_method
uri = snakemake.params.uri

cluster = LocalCluster(processes=False)
client = Client(cluster)
print(client.dashboard_link)

# prepare metadata for ome-zarr
with open(metadata_json) as fp:
Expand Down Expand Up @@ -48,6 +50,8 @@
omero={key:val for key,val in snakemake.config['ome_zarr']['omero_metadata']['defaults'].items()}
omero['channels']=[]



darr_list=[]
for i,stain in enumerate(stains):

Expand All @@ -63,7 +67,6 @@
channel_metadata['color'] = color
omero['channels'].append(channel_metadata)


darr_channels = da.stack(darr_list)


Expand All @@ -72,23 +75,19 @@
fs = get_fsspec(uri,**fs_args)
store = zarr.storage.FSStore(Path(uri).path,fs=fs,dimension_separator='/',mode='w')
else:
store = zarr.DirectoryStore(out_zarr)


store = zarr.DirectoryStore(out_zarr,dimension_separator='/')



group = zarr.group(store,overwrite=True)
scaler = Scaler(max_layer=max_layer,method=scaling_method)


with ProgressBar():
write_image(image=darr_channels,
delayed = write_image(image=darr_channels,
group=group,
scaler=scaler,
coordinate_transformations=coordinate_transformations,
axes=axes,
metadata={'omero':omero}
metadata={'omero':omero},
compute=True
)


0 comments on commit ce66e0e

Please sign in to comment.