Skip to content

Commit

Permalink
Replace most instances of starfish.image.Filter.Reduce with imagestac…
Browse files Browse the repository at this point in the history
…k.reduce (#1548)

Unless the reduction is happening multiple times, it's simpler to represent a stack reduction as imagestack.reduce

Test plan: travis thanks!
  • Loading branch information
Tony Tung authored Oct 10, 2019
1 parent 6bf1d96 commit 3cd0df6
Show file tree
Hide file tree
Showing 13 changed files with 43 additions and 77 deletions.
8 changes: 3 additions & 5 deletions docs/source/_static/data_processing_examples/iss_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,8 @@ def iss_pipeline(fov, codebook):
# register the raw image
learn_translation = LearnTransform.Translation(reference_stack=fov.get_image('dots'),
axes=Axes.ROUND, upsampling=100)
max_projector = Filter.Reduce(
(Axes.CH, Axes.ZPLANE), func="max", module=FunctionSource.np)
transforms_list = learn_translation.run(max_projector.run(primary_image))
transforms_list = learn_translation.run(
primary_image.reduce({Axes.CH, Axes.ZPLANE}, func="max"))
warp = ApplyTransform.Warp()
registered = warp.run(primary_image, transforms_list=transforms_list, in_place=False, verbose=True)

Expand All @@ -39,8 +38,7 @@ def iss_pipeline(fov, codebook):
)

# detect spots using laplacian of gaussians approach
dots_max_projector = Filter.Reduce((Axes.ROUND, Axes.ZPLANE), func="max", module=FunctionSource.np)
dots_max = dots_max_projector.run(fov.get_image('dots'))
dots_max = fov.get_image('dots').reduce((Axes.ROUND, Axes.ZPLANE), func="max", module=FunctionSource.np)
# locate spots in a reference image
spots = bd.run(reference_image=dots_max, image_stack=filtered)

Expand Down
5 changes: 2 additions & 3 deletions docs/source/_static/tutorials/exec_image_manipulations.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,7 @@
#
from starfish.image import Filter

max_projector = Filter.Reduce((Axes.CH,), func="max", module=FunctionSource.np)
projected_image: starfish.ImageStack = max_projector.run(image)
projected_image: starfish.ImageStack = image.reduce({Axes.CH}, func="max")

###################################################################################################
# To demonstrate the effect, the below figure displays each channel of round :code:`1` in the
Expand Down Expand Up @@ -108,4 +107,4 @@
ax6.set_axis_off()

# fix matplotlib whitespace
f.tight_layout()
f.tight_layout()
13 changes: 6 additions & 7 deletions docs/source/getting_started/loading_data/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -108,9 +108,8 @@ we'll collapse all the spots across channels in each round, mimicing a "dots" im
In[11]: from starfish.image import Filter
In[12]: from starfish.types import FunctionSource
In[13]: max_projector = Filter.Reduce((Axes.CH,), func="max", module=FunctionSource.np)
In[14]: max_projector.run(image)
Out[14]: <starfish.ImageStack (r: 4, c: 1, z: 1, y: 1000, x: 1000)>
In[13]: image.reduce({Axes.CH}, func="max")
Out[13]: <starfish.ImageStack (r: 4, c: 1, z: 1, y: 1000, x: 1000)>
Visualizing Data
----------------
Expand All @@ -125,10 +124,10 @@ to enable the :code:`qt` environment in IPython:

.. code-block:: python
In[15]: ipython = get_ipython()
In[16]: ipython.magic("gui qt5")
In[17]: starfish.display(image)
Out[17]: <napari.components._viewer.model.Viewer at 0x15f7b44e0>
In[14]: ipython = get_ipython()
In[15]: ipython.magic("gui qt5")
In[16]: starfish.display(image)
Out[16]: <napari.components._viewer.model.Viewer at 0x15f7b44e0>
Typing the above code should display an image viewer that looks something like this:,

Expand Down
24 changes: 8 additions & 16 deletions notebooks/ISS.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -137,13 +137,8 @@
"source": [
"from starfish.image import Filter\n",
"\n",
"rcz_max_projector = Filter.Reduce(\n",
" (Axes.ROUND, Axes.CH, Axes.ZPLANE,), func=\"max\", module=FunctionSource.np)\n",
"per_round_max_projector = Filter.Reduce(\n",
" (Axes.CH, Axes.ZPLANE,), func=\"max\", module=FunctionSource.np)\n",
"\n",
"dots = fov.get_image(\"dots\")\n",
"dots_single_plane = rcz_max_projector.run(dots)\n",
"dots_single_plane = dots.reduce({Axes.ROUND, Axes.CH, Axes.ZPLANE}, func=\"max\")\n",
"imshow_plane(dots_single_plane, title=\"Anchor channel, all RNA molecules\")"
]
},
Expand All @@ -161,7 +156,7 @@
"outputs": [],
"source": [
"nuclei = fov.get_image(\"nuclei\")\n",
"nuclei_single_plane = rcz_max_projector.run(nuclei)\n",
"nuclei_single_plane = nuclei.reduce({Axes.ROUND, Axes.CH, Axes.ZPLANE}, func=\"max\")\n",
"imshow_plane(nuclei_single_plane, title=\"Nuclei (DAPI) channel\")"
]
},
Expand All @@ -180,8 +175,6 @@
"metadata": {},
"outputs": [],
"source": [
"from starfish.image import Filter\n",
"\n",
"# filter raw data\n",
"masking_radius = 15\n",
"filt = Filter.WhiteTophat(masking_radius, is_volume=False)\n",
Expand Down Expand Up @@ -235,7 +228,7 @@
"from starfish.image import ApplyTransform, LearnTransform\n",
"\n",
"learn_translation = LearnTransform.Translation(reference_stack=dots, axes=Axes.ROUND, upsampling=1000)\n",
"transforms_list = learn_translation.run(per_round_max_projector.run(imgs))\n",
"transforms_list = learn_translation.run(imgs.reduce({Axes.CH, Axes.ZPLANE}, func=\"max\"))\n",
"warp = ApplyTransform.Warp()\n",
"registered_imgs = warp.run(filtered_imgs, transforms_list=transforms_list, in_place=False, verbose=True)"
]
Expand Down Expand Up @@ -266,8 +259,7 @@
" measurement_type='mean',\n",
")\n",
"\n",
"dots_max_projector = Filter.Reduce((Axes.ROUND, Axes.ZPLANE), func=\"max\", module=FunctionSource.np)\n",
"dots_max = dots_max_projector.run(dots)\n",
"dots_max = dots.reduce((Axes.ROUND, Axes.ZPLANE), func=\"max\", module=FunctionSource.np)\n",
"spots = bd.run(image_stack=registered_imgs, reference_image=dots_max)\n",
"\n",
"decoder = DecodeSpots.PerRoundMaxChannel(codebook=experiment.codebook)\n",
Expand Down Expand Up @@ -300,10 +292,10 @@
"stain_thresh = .22 # binary mask for overall cells // binarization of stain\n",
"min_dist = 57\n",
"\n",
"registered_mp = per_round_max_projector.run(registered_imgs).xarray.squeeze()\n",
"registered_mp = registered_imgs.reduce({Axes.CH, Axes.ZPLANE}, func=\"max\").xarray.squeeze()\n",
"stain = np.mean(registered_mp, axis=0)\n",
"stain = stain/stain.max()\n",
"nuclei = rcz_max_projector.run(nuclei)\n",
"nuclei = nuclei.reduce({Axes.ROUND, Axes.CH, Axes.ZPLANE}, func=\"max\")\n",
"\n",
"\n",
"seg = Segment.Watershed(\n",
Expand Down Expand Up @@ -363,9 +355,9 @@
"GENE2 = 'VIM'\n",
"\n",
"rgb = np.zeros(registered_imgs.tile_shape + (3,))\n",
"nuclei_numpy = rcz_max_projector.run(nuclei)._squeezed_numpy(Axes.ROUND, Axes.CH, Axes.ZPLANE)\n",
"nuclei_numpy = nuclei.reduce({Axes.ROUND, Axes.CH, Axes.ZPLANE}, func=\"max\")._squeezed_numpy(Axes.ROUND, Axes.CH, Axes.ZPLANE)\n",
"rgb[:,:,0] = nuclei_numpy\n",
"dots_numpy = rcz_max_projector.run(dots)._squeezed_numpy(Axes.ROUND, Axes.CH, Axes.ZPLANE)\n",
"dots_numpy = dots.reduce({Axes.ROUND, Axes.CH, Axes.ZPLANE}, func=\"max\")._squeezed_numpy(Axes.ROUND, Axes.CH, Axes.ZPLANE)\n",
"rgb[:,:,1] = dots_numpy\n",
"do = rgb2gray(rgb)\n",
"do = do/(do.max())\n",
Expand Down
5 changes: 1 addition & 4 deletions notebooks/MERFISH.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -324,9 +324,6 @@
"from scipy.stats import scoreatpercentile\n",
"import warnings\n",
"\n",
"all_max_projector = Filter.Reduce(\n",
" (Axes.ROUND, Axes.CH, Axes.ZPLANE,), func=\"max\", module=FunctionSource.np)\n",
"\n",
"f, (ax1, ax2) = plt.subplots(1, 2, figsize=(10, 5))\n",
"\n",
"with warnings.catch_warnings():\n",
Expand All @@ -337,7 +334,7 @@
" show_image(np.squeeze(prop_results.decoded_image)*(mask > 2), cmap='nipy_spectral', ax=ax1)\n",
" ax1.axes.set_axis_off()\n",
"\n",
" mp_numpy = all_max_projector.run(filtered_imgs)._squeezed_numpy(\n",
" mp_numpy = filtered_imgs.reduce({Axes.ROUND, Axes.CH, Axes.ZPLANE}, func=\"max\")._squeezed_numpy(\n",
" Axes.ROUND, Axes.CH, Axes.ZPLANE)\n",
" clim = scoreatpercentile(mp_numpy, [0.5, 99.5])\n",
" show_image(mp_numpy, clim=clim, ax=ax2)\n",
Expand Down
10 changes: 4 additions & 6 deletions notebooks/STARmap.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,7 @@
"metadata": {},
"outputs": [],
"source": [
"ch_r_max_projector = starfish.image.Filter.Reduce((Axes.CH, Axes.ROUND), func=\"max\")\n",
"ch_r_max_projection = ch_r_max_projector.run(stack)\n",
"ch_r_max_projection = stack.reduce({Axes.CH, Axes.ROUND}, func=\"max\")\n",
"\n",
"f = plt.figure(dpi=150)\n",
"imshow_plane(ch_r_max_projection, sel={Axes.ZPLANE: 15})"
Expand Down Expand Up @@ -134,8 +133,7 @@
"metadata": {},
"outputs": [],
"source": [
"ch_z_max_projector = starfish.image.Filter.Reduce((Axes.CH, Axes.ZPLANE), func=\"max\")\n",
"projection = ch_r_max_projector.run(stack)\n",
"projection = stack.reduce({Axes.CH, Axes.ZPLANE}, func=\"max\")\n",
"reference_image = projection.sel({Axes.ROUND: 0})\n",
"\n",
"ltt = starfish.image.LearnTransform.Translation(\n",
Expand Down Expand Up @@ -195,7 +193,7 @@
"metadata": {},
"outputs": [],
"source": [
"post_projection = ch_z_max_projector.run(stack)"
"post_projection = stack.reduce({Axes.CH, Axes.ZPLANE}, func=\"max\")"
]
},
{
Expand Down Expand Up @@ -374,4 +372,4 @@
},
"nbformat": 4,
"nbformat_minor": 2
}
}
5 changes: 2 additions & 3 deletions notebooks/osmFISH.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -108,8 +108,7 @@
"metadata": {},
"outputs": [],
"source": [
"z_max_projector = Filter.Reduce((Axes.ZPLANE,))\n",
"mp = z_max_projector.run(imgs_ghp_laplace)"
"mp = imgs_ghp_laplace.reduce({Axes.ZPLANE}, func=\"max\")"
]
},
{
Expand All @@ -125,7 +124,7 @@
"metadata": {},
"outputs": [],
"source": [
"single_plane = imgs.max_proj(Axes.ZPLANE).xarray.sel({Axes.CH:0}).squeeze()\n",
"single_plane = imgs.reduce({Axes.ZPLANE}, func=\"max\").xarray.sel({Axes.CH:0}).squeeze()\n",
"single_plane_filtered = mp.xarray.sel({Axes.CH: 0}).squeeze()\n",
"\n",
"plt.figure(figsize=(10,10))\n",
Expand Down
24 changes: 8 additions & 16 deletions notebooks/py/ISS.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,13 +90,8 @@
# EPY: START code
from starfish.image import Filter

rcz_max_projector = Filter.Reduce(
(Axes.ROUND, Axes.CH, Axes.ZPLANE,), func="max", module=FunctionSource.np)
per_round_max_projector = Filter.Reduce(
(Axes.CH, Axes.ZPLANE,), func="max", module=FunctionSource.np)

dots = fov.get_image("dots")
dots_single_plane = rcz_max_projector.run(dots)
dots_single_plane = dots.reduce({Axes.ROUND, Axes.CH, Axes.ZPLANE}, func="max")
imshow_plane(dots_single_plane, title="Anchor channel, all RNA molecules")
# EPY: END code

Expand All @@ -106,7 +101,7 @@

# EPY: START code
nuclei = fov.get_image("nuclei")
nuclei_single_plane = rcz_max_projector.run(nuclei)
nuclei_single_plane = nuclei.reduce({Axes.ROUND, Axes.CH, Axes.ZPLANE}, func="max")
imshow_plane(nuclei_single_plane, title="Nuclei (DAPI) channel")
# EPY: END code

Expand All @@ -117,8 +112,6 @@
# EPY: END markdown

# EPY: START code
from starfish.image import Filter

# filter raw data
masking_radius = 15
filt = Filter.WhiteTophat(masking_radius, is_volume=False)
Expand Down Expand Up @@ -156,7 +149,7 @@
from starfish.image import ApplyTransform, LearnTransform

learn_translation = LearnTransform.Translation(reference_stack=dots, axes=Axes.ROUND, upsampling=1000)
transforms_list = learn_translation.run(per_round_max_projector.run(imgs))
transforms_list = learn_translation.run(imgs.reduce({Axes.CH, Axes.ZPLANE}, func="max"))
warp = ApplyTransform.Warp()
registered_imgs = warp.run(filtered_imgs, transforms_list=transforms_list, in_place=False, verbose=True)
# EPY: END code
Expand All @@ -179,8 +172,7 @@
measurement_type='mean',
)

dots_max_projector = Filter.Reduce((Axes.ROUND, Axes.ZPLANE), func="max", module=FunctionSource.np)
dots_max = dots_max_projector.run(dots)
dots_max = dots.reduce((Axes.ROUND, Axes.ZPLANE), func="max", module=FunctionSource.np)
spots = bd.run(image_stack=registered_imgs, reference_image=dots_max)

decoder = DecodeSpots.PerRoundMaxChannel(codebook=experiment.codebook)
Expand All @@ -205,10 +197,10 @@
stain_thresh = .22 # binary mask for overall cells // binarization of stain
min_dist = 57

registered_mp = per_round_max_projector.run(registered_imgs).xarray.squeeze()
registered_mp = registered_imgs.reduce({Axes.CH, Axes.ZPLANE}, func="max").xarray.squeeze()
stain = np.mean(registered_mp, axis=0)
stain = stain/stain.max()
nuclei = rcz_max_projector.run(nuclei)
nuclei = nuclei.reduce({Axes.ROUND, Axes.CH, Axes.ZPLANE}, func="max")


seg = Segment.Watershed(
Expand Down Expand Up @@ -249,9 +241,9 @@
GENE2 = 'VIM'

rgb = np.zeros(registered_imgs.tile_shape + (3,))
nuclei_numpy = rcz_max_projector.run(nuclei)._squeezed_numpy(Axes.ROUND, Axes.CH, Axes.ZPLANE)
nuclei_numpy = nuclei.reduce({Axes.ROUND, Axes.CH, Axes.ZPLANE}, func="max")._squeezed_numpy(Axes.ROUND, Axes.CH, Axes.ZPLANE)
rgb[:,:,0] = nuclei_numpy
dots_numpy = rcz_max_projector.run(dots)._squeezed_numpy(Axes.ROUND, Axes.CH, Axes.ZPLANE)
dots_numpy = dots.reduce({Axes.ROUND, Axes.CH, Axes.ZPLANE}, func="max")._squeezed_numpy(Axes.ROUND, Axes.CH, Axes.ZPLANE)
rgb[:,:,1] = dots_numpy
do = rgb2gray(rgb)
do = do/(do.max())
Expand Down
5 changes: 1 addition & 4 deletions notebooks/py/MERFISH.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,9 +223,6 @@
from scipy.stats import scoreatpercentile
import warnings

all_max_projector = Filter.Reduce(
(Axes.ROUND, Axes.CH, Axes.ZPLANE,), func="max", module=FunctionSource.np)

f, (ax1, ax2) = plt.subplots(1, 2, figsize=(10, 5))

with warnings.catch_warnings():
Expand All @@ -236,7 +233,7 @@
show_image(np.squeeze(prop_results.decoded_image)*(mask > 2), cmap='nipy_spectral', ax=ax1)
ax1.axes.set_axis_off()

mp_numpy = all_max_projector.run(filtered_imgs)._squeezed_numpy(
mp_numpy = filtered_imgs.reduce({Axes.ROUND, Axes.CH, Axes.ZPLANE}, func="max")._squeezed_numpy(
Axes.ROUND, Axes.CH, Axes.ZPLANE)
clim = scoreatpercentile(mp_numpy, [0.5, 99.5])
show_image(mp_numpy, clim=clim, ax=ax2)
Expand Down
8 changes: 3 additions & 5 deletions notebooks/py/STARmap.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,7 @@
# EPY: END code

# EPY: START code
ch_r_max_projector = starfish.image.Filter.Reduce((Axes.CH, Axes.ROUND), func="max")
ch_r_max_projection = ch_r_max_projector.run(stack)
ch_r_max_projection = stack.reduce({Axes.CH, Axes.ROUND}, func="max")

f = plt.figure(dpi=150)
imshow_plane(ch_r_max_projection, sel={Axes.ZPLANE: 15})
Expand Down Expand Up @@ -95,8 +94,7 @@
# EPY: END markdown

# EPY: START code
ch_z_max_projector = starfish.image.Filter.Reduce((Axes.CH, Axes.ZPLANE), func="max")
projection = ch_r_max_projector.run(stack)
projection = stack.reduce({Axes.CH, Axes.ZPLANE}, func="max")
reference_image = projection.sel({Axes.ROUND: 0})

ltt = starfish.image.LearnTransform.Translation(
Expand Down Expand Up @@ -132,7 +130,7 @@
# EPY: END markdown

# EPY: START code
post_projection = ch_z_max_projector.run(stack)
post_projection = stack.reduce({Axes.CH, Axes.ZPLANE}, func="max")
# EPY: END code

# EPY: START code
Expand Down
5 changes: 2 additions & 3 deletions notebooks/py/osmFISH.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,16 +68,15 @@
# EPY: END markdown

# EPY: START code
z_max_projector = Filter.Reduce((Axes.ZPLANE,))
mp = z_max_projector.run(imgs_ghp_laplace)
mp = imgs_ghp_laplace.reduce({Axes.ZPLANE}, func="max")
# EPY: END code

# EPY: START markdown
#We can now visualize our data before and after filtering
# EPY: END markdown

# EPY: START code
single_plane = imgs.max_proj(Axes.ZPLANE).xarray.sel({Axes.CH:0}).squeeze()
single_plane = imgs.reduce({Axes.ZPLANE}, func="max").xarray.sel({Axes.CH:0}).squeeze()
single_plane_filtered = mp.xarray.sel({Axes.CH: 0}).squeeze()

plt.figure(figsize=(10,10))
Expand Down
3 changes: 1 addition & 2 deletions workflows/wdl/iss_published/recipe.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,7 @@ def process_fov(field_num: int, experiment_str: str):

print("Learning Transform")
learn_translation = LearnTransform.Translation(reference_stack=dots, axes=Axes.ROUND, upsampling=1000)
max_projector = Filter.Reduce((Axes.CH, Axes.ZPLANE))
transforms_list = learn_translation.run(max_projector.run(imgs))
transforms_list = learn_translation.run(imgs.reduce({Axes.CH, Axes.ZPLANE}, func="max"))

print("Applying transform")
warp = ApplyTransform.Warp()
Expand Down
5 changes: 2 additions & 3 deletions workflows/wdl/iss_spaceTX/recipe.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,7 @@ def process_fov(field_num: int, experiment_str: str):

fov = experiment[fov_str]
imgs = fov.get_image(starfish.FieldOfView.PRIMARY_IMAGES)
max_projector = starfish.image.Filter.Reduce((Axes.CH, Axes.ZPLANE))
dots = max_projector.run(imgs)
dots = imgs.reduce({Axes.CH, Axes.ZPLANE}, func="max")

# filter
filt = starfish.image.Filter.WhiteTophat(masking_radius=15, is_volume=False)
Expand Down Expand Up @@ -53,4 +52,4 @@ def process_fov(field_num: int, experiment_str: str):

# save results
df = decoded.to_decoded_dataframe()
return df
return df

0 comments on commit 3cd0df6

Please sign in to comment.