diff --git a/.travis.yml b/.travis.yml index 634f5962a..3e8a5e769 100644 --- a/.travis.yml +++ b/.travis.yml @@ -57,9 +57,6 @@ jobs: - name: 3D smFISH data processing example if: type = push and branch =~ /^(master|merge)/ script: make install-dev 3d_smFISH.py - - name: iss_cli.sh data processing example - if: type = push and branch =~ /^(master|merge)/ - script: make install-dev iss_cli.sh - name: iss_pipeline.py data processing example if: type = push and branch =~ /^(master|merge)/ script: make TESTING=1 install-dev iss_pipeline.py diff --git a/docs/source/_static/data_processing_examples/iss_cli.sh b/docs/source/_static/data_processing_examples/iss_cli.sh deleted file mode 100644 index c3c41a8ea..000000000 --- a/docs/source/_static/data_processing_examples/iss_cli.sh +++ /dev/null @@ -1,79 +0,0 @@ -mkdir -p /tmp/starfish/max_projected -mkdir -p /tmp/starfish/transforms -mkdir -p /tmp/starfish/registered -mkdir -p /tmp/starfish/filtered -mkdir -p /tmp/starfish/results - -URL=https://d2nhj9g34unfro.cloudfront.net/20181005/ISS-TEST/experiment.json - -starfish validate experiment $URL - -starfish Filter \ - -i @${URL}'[fov_001][primary]' \ - -o /tmp/starfish/max_projected/primary_images.json \ - MaxProject \ - --dims c --dims z - -starfish LearnTransform \ - -i /tmp/starfish/max_projected/primary_images.json \ - -o /tmp/starfish/transforms/transforms.json \ - Translation \ - --reference-stack @$URL'[fov_001][dots]' \ - --upsampling 1000 \ - --axes r - -starfish ApplyTransform \ - -i @$URL'[fov_001][primary]' \ - -o /tmp/starfish/registered/primary_images.json \ - --transformation-list /tmp/starfish/transforms/transforms.json \ - Warp - -starfish Filter \ - -i /tmp/starfish/registered/primary_images.json \ - -o /tmp/starfish/filtered/primary_images.json \ - WhiteTophat \ - --masking-radius 15 - -starfish Filter \ - -i @$URL'[fov_001][nuclei]' \ - -o /tmp/starfish/filtered/nuclei.json \ - WhiteTophat \ - --masking-radius 15 - -starfish Filter \ - -i @$URL'[fov_001][dots]' \ - -o /tmp/starfish/filtered/dots.json \ - WhiteTophat \ - --masking-radius 15 - -starfish DetectSpots \ - --input /tmp/starfish/filtered/primary_images.json \ - --output /tmp/starfish/results/spots.nc \ - --blobs-stack /tmp/starfish/filtered/dots.json \ - --blobs-axis r --blobs-axis c \ - BlobDetector \ - --min-sigma 4 \ - --max-sigma 6 \ - --num-sigma 20 \ - --threshold 0.01 - -starfish Segment \ - --primary-images /tmp/starfish/filtered/primary_images.json \ - --nuclei /tmp/starfish/filtered/nuclei.json \ - -o /tmp/starfish/results/label_image.png \ - Watershed \ - --nuclei-threshold .16 \ - --input-threshold .22 \ - --min-distance 57 - -starfish AssignTargets \ - --label-image /tmp/starfish/results/label_image.png \ - --intensities /tmp/starfish/results/spots.nc \ - --output /tmp/starfish/results/targeted-spots.nc \ - Label - -starfish Decode \ - -i /tmp/starfish/results/targeted-spots.nc \ - --codebook @$URL \ - -o /tmp/starfish/results/decoded-spots.nc \ - PerRoundMaxChannel diff --git a/docs/source/_static/tutorials/exec_running_a_pipeline.py b/docs/source/_static/tutorials/exec_running_a_pipeline.py deleted file mode 100644 index 03b54ba36..000000000 --- a/docs/source/_static/tutorials/exec_running_a_pipeline.py +++ /dev/null @@ -1,123 +0,0 @@ -""" -Running a Pipeline -================== -This example loads one of the pipeline recipes used in starfish's unit tests and executes it from -the recipe API. Subsequently, we execute the same recipe using the CLI. -""" - -################################################################################################### -# This is a hack to load the recipe from the existing code. -import os -import pathlib -from starfish.test.full_pipelines import recipe - -recipe_tests_folder = pathlib.Path(recipe.__file__).parent -iss_recipe_file = recipe_tests_folder / "iss_recipe.txt" - -with open(os.fspath(iss_recipe_file), "r") as fh: - recipe_contents = fh.read() - -print(recipe_contents) - -################################################################################################### -# Providing data -# -------------- -# -# The recipe references `file_input[0]`...`file_input[3]`. We provide values for these variables to -# the recipe, and when the recipe executes, the `file_input[n]` references are replaced by the -# values. -# -# Recipes support referencing an image or codebook within an experiment through a special syntax: -# ``@[fov_name][img_name]`` will load the experiment.json from and return -# the ``img_name`` image form the ``fov_name`` fov. - -experiment_url = "https://d2nhj9g34unfro.cloudfront.net/20181005/ISS-TEST/experiment.json" -primary_image = f"@{experiment_url}[fov_001][primary]" -dots_image = f"@{experiment_url}[fov_001][dots]" -nuclei_image = f"@{experiment_url}[fov_001][nuclei]" -codebook = f"@{experiment_url}" -print( - f"primary_image: {primary_image}\n" - f"dots_image: {dots_image}\n" - f"nuclei_image: {nuclei_image}\n" - f"codebook: {codebook}\n") - -################################################################################################### -# Writing out data -# ---------------- -# -# Any steps that assign a value to ``file_output`` will require an output file. Let's set up a -# directory to write our outputs to. - -import tempfile -tempdir = tempfile.TemporaryDirectory() -output_path_api = pathlib.Path(tempdir.name) / "decoded_spots_api.nc" -output_path_cli = pathlib.Path(tempdir.name) / "decoded_spots_cli.nc" - -################################################################################################### -# Execute the recipe -# ------------------ -# -# Let's execute the recipe! - -from starfish.core.recipe import Recipe -recipe = Recipe( - recipe_contents, - [primary_image, dots_image, nuclei_image, codebook], - [os.fspath(output_path_api)] -) -recipe.run_and_save() - -################################################################################################### -# Load up results -# --------------- -# -# We can now load up the results. -import numpy as np -import pandas as pd -from starfish import IntensityTable -from starfish.types import Features - -intensity_table = IntensityTable.open_netcdf(os.fspath(output_path_api)) - -genes, counts = np.unique( - intensity_table.coords[Features.TARGET], return_counts=True) -gene_counts = pd.Series(counts, genes) -print(gene_counts) - -################################################################################################### -# Execute the recipe (from the command line) -# ------------------------------------------ -# -# We can also execute the recipe from the command line. The command line will look like -cmdline = [ - "starfish", - "recipe", - "--recipe", - os.fspath(iss_recipe_file), -] -for input_file in (primary_image, dots_image, nuclei_image, codebook): - cmdline.append("--input") - cmdline.append(input_file) -cmdline.append("--output") -cmdline.append(os.fspath(output_path_cli)) - -cmdline_str = " ".join(cmdline) - -print(f"% {cmdline_str}") - -import subprocess -subprocess.check_output(cmdline) - -################################################################################################### -# Load up results from the command line invocation -# ------------------------------------------------ -# -# This should produce identical results to the API invocation. -intensity_table = IntensityTable.open_netcdf(os.fspath(output_path_cli)) - -genes, counts = np.unique( - intensity_table.coords[Features.TARGET], return_counts=True) -gene_counts = pd.Series(counts, genes) -print(gene_counts) - diff --git a/docs/source/api/data_structures/index.rst b/docs/source/api/data_structures/index.rst index b337ea224..4f7aa8edb 100644 --- a/docs/source/api/data_structures/index.rst +++ b/docs/source/api/data_structures/index.rst @@ -56,6 +56,3 @@ serialization for use in single-cell analysis environments such as Seurat_ and S .. toctree:: intensity_table.rst - -.. toctree:: - pipeline_component.rst diff --git a/docs/source/api/data_structures/pipeline_component.rst b/docs/source/api/data_structures/pipeline_component.rst deleted file mode 100644 index 93fbb5672..000000000 --- a/docs/source/api/data_structures/pipeline_component.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. _pipeline_component: - -Pipeline Component -================== - -.. automodule:: starfish.core.pipeline.pipelinecomponent - :members: diff --git a/docs/source/api/index.rst b/docs/source/api/index.rst index d524c7c8e..5d5e2183a 100644 --- a/docs/source/api/index.rst +++ b/docs/source/api/index.rst @@ -18,9 +18,6 @@ API .. toctree:: spots/index.rst -.. toctree:: - recipe/index.rst - .. toctree:: types/index.rst diff --git a/docs/source/api/recipe/index.rst b/docs/source/api/recipe/index.rst deleted file mode 100644 index 6ea3a8cbc..000000000 --- a/docs/source/api/recipe/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. _Recipe: - -Recipe -====== - -.. autoclass:: starfish.core.recipe.Recipe - :members: diff --git a/docs/source/creating_an_image_processing_pipeline/index.rst b/docs/source/creating_an_image_processing_pipeline/index.rst index 5e3a00330..723758746 100644 --- a/docs/source/creating_an_image_processing_pipeline/index.rst +++ b/docs/source/creating_an_image_processing_pipeline/index.rst @@ -75,20 +75,6 @@ origin. At this point, it's trivial to create a cell x gene matrix. tutorials/exec_feature_identification_and_annotation.rst -Putting Together a Pipeline Recipe and Running it -------------------------------------------------- - -Pipeline recipes should describe the computational steps to get from the input files to the output files. Pipeline -recipes should manage file input and output through the injected ``file_inputs`` and ``file_outputs`` variables. When a -recipe is executed, the user should provide a list of file paths or URLs, which are loaded as starfish objects (i.e., -:ref:`ImageStack`, :ref:`Codebook`, :ref:`IntensityTable`, or :ref:`ExpressionMatrix`), and provided to the recipe in -the ``file_inputs`` array. Correspondingly, any results saved to the ``file_outputs`` array will be written to the -file paths users provide. - -.. toctree:: - :maxdepth: 1 - - tutorials/exec_running_a_pipeline.rst Old Content not to be deleted yet. ---------------------------------- diff --git a/docs/source/getting_started/example_workflow/index.rst b/docs/source/getting_started/example_workflow/index.rst index 97551eb35..ea1f595e4 100644 --- a/docs/source/getting_started/example_workflow/index.rst +++ b/docs/source/getting_started/example_workflow/index.rst @@ -40,13 +40,3 @@ The above steps can be recapitulated using starfish as follows: .. literalinclude:: ../../_static/data_processing_examples/iss_pipeline.py -Using the CLI -------------- - -Starfish has a fully-featured CLI that mimics the API such that one does not need to write python -code to use starfish. Running the commands below will re-produce the notebook_ results using -starfish's CLI: - -.. _notebook: ../../_static/data_processing_examples/iss_pipeline.py - -.. literalinclude:: ../../_static/data_processing_examples/iss_cli.sh diff --git a/docs/source/index.rst b/docs/source/index.rst index fb119fc89..a987f138a 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -30,7 +30,7 @@ see the :ref:`Getting Started ` section. Starfish's documentation also contains instructions on :ref:`creating image-based transcriptomics processing pipelines `, tutorials on running -starfish using the CLI, API, and workflow runner, and a worked example of contributing code to +starfish using the API, and a worked example of contributing code to starfish. Finally, advanced users can examine the :ref:`Data Structures ` and :ref:`Help & Reference ` sections to learn more details about _starfish_ and its object models. @@ -66,7 +66,6 @@ To see the code or report a bug, please visit the `github repository * Spot-Finding: :ref:`API ` * Decoding: :ref:`API ` * Segmenting: :ref:`API ` -* Pipelining: :ref:`API ` .. raw:: html diff --git a/docs/source/usage/fov-builder/fov-builder.rst b/docs/source/usage/fov-builder/fov-builder.rst deleted file mode 100644 index 28711417f..000000000 --- a/docs/source/usage/fov-builder/fov-builder.rst +++ /dev/null @@ -1,35 +0,0 @@ -.. _cli_build: - -Synthetic experiments -===================== - -Building synthetic SpaceTx-specification compliant experiments --------------------------------------------------------------- - -starfish provides a tool to construct example datasets that can be used to test software for use with our formats. -This tool generates spaceTx-specification experiments with tunable sizes and shapes, but the images are randomly generated and do not contain biologically meaningful data. - -Usage ------ - -starfish build --help will provide instructions on how to use the tool: - -.. program-output:: env MPLBACKEND=Agg starfish build --help - -Examples --------- - -Build a 3-field of view experiment with 2 channels and 8 hybridization rounds per primary image stack that samples z 30 times. -The experiment has both a dots image and a nuclei image, but these have only one channel and round each. -The size of the (x,y) tiles cannot be modified at this time. - -:: - - mkdir tmp - OUTPUT_DIR=tmp - starfish build \ - --fov-count 3 \ - --primary-image-dimensions '{"r": 8, "c": 2, "z": 30}' \ - --dots-dimensions '{"r": 1, "c": 1, "z": 30}' \ - --nuclei-dimensions '{"r": 1, "c": 1, "z": 30}' \ - ${OUTPUT_DIR} diff --git a/docs/source/usage/index.rst b/docs/source/usage/index.rst index d94a22101..868b32bde 100644 --- a/docs/source/usage/index.rst +++ b/docs/source/usage/index.rst @@ -182,29 +182,15 @@ analysis with *starfish*. Background subtraction, for example, is handled by Starfish. If there is confusion about what types of image pre-processing should be applied, please open an issue. - -Getting started with the CLI ----------------------------- - -The simplest way to get started with starfish for most users will be to try out the -command-line interface (CLI). After following the :ref:`installation ` -instructions, a ``starfish`` command will be available. Running ``starfish --help`` -will print out the subcommands that are available. - -.. program-output:: env MPLBACKEND=Agg starfish --help - -.. toctree:: - :maxdepth: 3 - :caption: CLI: - -.. toctree:: - fov-builder/fov-builder.rst - +Configuration +-------------- .. toctree:: - validation/index.rst + configuration/index.rst +Validation +----------- .. toctree:: - configuration/index.rst + validation/index.rst Data Formatting Examples diff --git a/docs/source/usage/validation/index.rst b/docs/source/usage/validation/index.rst index fa12b5fd5..a4517a0ba 100644 --- a/docs/source/usage/validation/index.rst +++ b/docs/source/usage/validation/index.rst @@ -17,47 +17,11 @@ starfish validate --help will provide instructions on how to use the tool: Examples -------- -As a simple example, we can show that the :ref:`synthetic experiment ` -created in the previous section is valid: - :: starfish validate experiment tmp/experiment.json > /dev/null && echo ok -Building a :ref:`synthetic experiment ` can provide you with a template that -you can use to model your own data. If you then modify that experiment incorrectly, you -might see the following validation warning: - -:: - - $ starfish validate experiment tmp/experiment.json - - _ __ _ _ - | | / _(_) | | - ___| |_ __ _ _ __| |_ _ ___| |__ - / __| __/ _` | '__| _| / __| '_ ` - \__ \ || (_| | | | | | \__ \ | | | - |___/\__\__,_|_| |_| |_|___/_| |_| - - - /scratch/repos/starfish/sptx_format/util.py:82: UserWarning: - Additional properties are not allowed ('nuclei_dimensions', 'dots_dimensions' were unexpected) - Schema: unknown - Subschema level: 0 - Path to error: properties/images/additionalProperties - Filename: experiment.json - - warnings.warn(message) - /scratch/repos/starfish/sptx_format/util.py:82: UserWarning: - 'codeword' is a required property - Schema: unknown - Subschema level: 0 - Path to error: properties/mappings/items/required - Filename: codebook.json - - warnings.warn(message) - Validating the experiment, validates all of the included files. These files can also be individually validated: :: diff --git a/starfish/__init__.py b/starfish/__init__.py index 01685fa8f..83b33bef6 100644 --- a/starfish/__init__.py +++ b/starfish/__init__.py @@ -18,5 +18,4 @@ from .core.expression_matrix.expression_matrix import ExpressionMatrix from .core.imagestack.imagestack import ImageStack from .core.intensity_table.intensity_table import IntensityTable -from .core.recipe import cli from .core.starfish import starfish diff --git a/starfish/core/experiment/builder/cli.py b/starfish/core/experiment/builder/cli.py deleted file mode 100644 index 82bf71bf9..000000000 --- a/starfish/core/experiment/builder/cli.py +++ /dev/null @@ -1,74 +0,0 @@ -import json - -from slicedimage import ImageFormat - -from starfish.core.types import Axes -from starfish.core.util import click -from . import write_experiment_json - - -AUX_IMAGE_NAMES = { - 'nuclei', - 'dots', -} - - -class StarfishIndex(click.ParamType): - - name = "starfish-index" - - def convert(self, spec_json, param, ctx): - try: - spec = json.loads(spec_json) - except json.decoder.JSONDecodeError: - self.fail( - "Could not parse {} into a valid index specification.".format(spec_json)) - - return { - Axes.ROUND: spec.get(Axes.ROUND, 1), - Axes.CH: spec.get(Axes.CH, 1), - Axes.ZPLANE: spec.get(Axes.ZPLANE, 1), - } - - -def dimensions_option(name, required): - return click.option( - "--{}-dimensions".format(name), - type=StarfishIndex(), required=required, - help="Dimensions for the {} images. Should be a json dict, with {}, {}, " - "and {} as the possible keys. The value should be the shape along that " - "dimension. If a key is not present, the value is assumed to be 0." - .format( - name, - Axes.ROUND.value, - Axes.CH.value, - Axes.ZPLANE.value)) - - -decorators = [ - click.command(), - click.argument("output_dir", type=click.Path(exists=True, file_okay=False, writable=True)), - click.option("--fov-count", type=int, required=True, help="Number of FOVs in this experiment."), - dimensions_option("primary-image", True), -] -for image_name in AUX_IMAGE_NAMES: - decorators.append(dimensions_option(image_name, False)) - - -def build(output_dir, fov_count, primary_image_dimensions, **kwargs): - """generate synthetic experiments""" - - aux_names_to_dimension_cardinality = { - aux_name: size - for aux_name, size in kwargs.items() - if size is not None - } - write_experiment_json( - output_dir, fov_count, ImageFormat.TIFF, - primary_image_dimensions=primary_image_dimensions, - aux_name_to_dimensions=aux_names_to_dimension_cardinality, - ) - - -for decorator in reversed(decorators): - build = decorator(build) diff --git a/starfish/core/experiment/builder/test/test_build.py b/starfish/core/experiment/builder/test/test_build.py deleted file mode 100644 index 0d8a56ef0..000000000 --- a/starfish/core/experiment/builder/test/test_build.py +++ /dev/null @@ -1,47 +0,0 @@ -import os -import unittest - -from starfish.core.util import exec - - -class TestWithBuildData(unittest.TestCase): - - STAGES = ( - [ - "starfish", "build", - "--fov-count=2", '--primary-image-dimensions={"z": 3}', - lambda tempdir: tempdir - ], - # Old-style - [ - "starfish", "validate", "--experiment-json", - lambda tempdir: os.sep.join([tempdir, "experiment.json"]) - ], - # New-style - [ - "starfish", "validate", "experiment", - lambda tempdir: os.sep.join([tempdir, "experiment.json"]) - ], - # Validate other input files - [ - "starfish", "validate", "experiment", - lambda tempdir, *args, **kwargs: os.sep.join([tempdir, "experiment.json"]) - ], - [ - "starfish", "validate", "codebook", - lambda tempdir, *args, **kwargs: os.sep.join([tempdir, "codebook.json"]) - ], - [ - "starfish", "validate", "manifest", - lambda tempdir, *args, **kwargs: os.sep.join([tempdir, "primary.json"]) - ], - [ - "starfish", "validate", "fov", - lambda tempdir, *args, **kwargs: os.sep.join([tempdir, "primary-fov_000.json"]) - ], - ) - - def test_run_build(self): - exec.stages( - TestWithBuildData.STAGES, - keep_data=("TEST_BUILD_KEEP_DATA" in os.environ)) diff --git a/starfish/core/image/Filter/__init__.py b/starfish/core/image/Filter/__init__.py new file mode 100644 index 000000000..b3eab6209 --- /dev/null +++ b/starfish/core/image/Filter/__init__.py @@ -0,0 +1,16 @@ +from .bandpass import Bandpass +from .clip import Clip +from .clip_percentile_to_zero import ClipPercentileToZero +from .clip_value_to_zero import ClipValueToZero +from .element_wise_mult import ElementWiseMultiply +from .gaussian_high_pass import GaussianHighPass +from .gaussian_low_pass import GaussianLowPass +from .laplace import Laplace +from .linear_unmixing import LinearUnmixing +from .match_histograms import MatchHistograms +from .max_proj import MaxProject +from .mean_high_pass import MeanHighPass +from .reduce import Reduce +from .richardson_lucy_deconvolution import DeconvolvePSF +from .white_tophat import WhiteTophat +from .zero_by_channel_magnitude import ZeroByChannelMagnitude diff --git a/starfish/core/image/Filter/_base.py b/starfish/core/image/Filter/_base.py new file mode 100644 index 000000000..82b6d1bb9 --- /dev/null +++ b/starfish/core/image/Filter/_base.py @@ -0,0 +1,12 @@ +from abc import abstractmethod + +from starfish.core.imagestack.imagestack import ImageStack +from starfish.core.pipeline.algorithmbase import AlgorithmBase + + +class FilterAlgorithmBase(metaclass=AlgorithmBase): + + @abstractmethod + def run(self, stack: ImageStack, *args) -> ImageStack: + """Perform filtering of an image stack""" + raise NotImplementedError() diff --git a/starfish/core/image/_filter/bandpass.py b/starfish/core/image/Filter/bandpass.py similarity index 83% rename from starfish/core/image/_filter/bandpass.py rename to starfish/core/image/Filter/bandpass.py index 91af06d1d..d40e3e43c 100644 --- a/starfish/core/image/_filter/bandpass.py +++ b/starfish/core/image/Filter/bandpass.py @@ -7,7 +7,6 @@ from starfish.core.imagestack.imagestack import ImageStack from starfish.core.types import Clip, Number -from starfish.core.util import click from ._base import FilterAlgorithmBase from .util import determine_axes_to_group_by @@ -140,25 +139,3 @@ def run( verbose=verbose, ) return result - - @staticmethod - @click.command("Bandpass") - @click.option( - "--lshort", type=float, help="filter signals below this frequency") - @click.option( - "--llong", type=int, help="filter signals above this frequency") - @click.option( - "--threshold", default=0, type=float, help="zero pixels below this intensity value") - @click.option( - "--truncate", default=4, type=float, - help="truncate the filter at this many standard deviations") - @click.option( - "--clip-method", default=Clip.CLIP, type=Clip, - help="method to constrain data to [0,1]. options: 'clip', 'scale_by_image', " - "'scale_by_chunk'") - @click.pass_context - def _cli(ctx, lshort, llong, threshold, truncate, clip_method): - ctx.obj["component"]._cli_run( - ctx, - Bandpass(lshort, llong, threshold, truncate, clip_method) - ) diff --git a/starfish/core/image/_filter/call_bases.py b/starfish/core/image/Filter/call_bases.py similarity index 90% rename from starfish/core/image/_filter/call_bases.py rename to starfish/core/image/Filter/call_bases.py index d0f569478..8144c1925 100644 --- a/starfish/core/image/_filter/call_bases.py +++ b/starfish/core/image/Filter/call_bases.py @@ -5,7 +5,6 @@ import xarray as xr from starfish.core.imagestack.imagestack import ImageStack -from starfish.core.util import click from starfish.types import Axes from ._base import FilterAlgorithmBase @@ -158,15 +157,3 @@ def run( group_by=group_by, verbose=verbose, in_place=in_place, n_processes=n_processes ) return result - - @staticmethod - @click.command("CallBases") - @click.option( - "--intensity-threshold", default=0, type=float, help="Intensity threshold for a base call") - @click.option( - "--quality-threshold", default=0, type=float, help="Quality threshold for a base call") - @click.pass_context - def _cli(ctx, intensity_threshold, quality_threshold, clip_method): - ctx.obj["component"]._cli_run( - ctx, CallBases(intensity_threshold, quality_threshold, clip_method) - ) diff --git a/starfish/core/image/_filter/clip.py b/starfish/core/image/Filter/clip.py similarity index 82% rename from starfish/core/image/_filter/clip.py rename to starfish/core/image/Filter/clip.py index 1d4f0a26b..b7be73f8e 100644 --- a/starfish/core/image/_filter/clip.py +++ b/starfish/core/image/Filter/clip.py @@ -5,7 +5,6 @@ import xarray as xr from starfish.core.imagestack.imagestack import ImageStack -from starfish.core.util import click from ._base import FilterAlgorithmBase from .util import determine_axes_to_group_by @@ -98,19 +97,3 @@ def run( group_by=group_by, verbose=verbose, in_place=in_place, n_processes=n_processes ) return result - - @staticmethod - @click.command("Clip") - @click.option( - "--p-min", default=0, type=int, help="clip intensities below this percentile") - @click.option( - "--p-max", default=100, type=int, help="clip intensities above this percentile") - @click.option( - "--is-volume", is_flag=True, help="filter 3D volumes") - @click.option( - "--expand-dynamic-range", is_flag=True, - help="linearly scale data to fill [0, 1] after clipping." - ) - @click.pass_context - def _cli(ctx, p_min, p_max, is_volume, expand_dynamic_range): - ctx.obj["component"]._cli_run(ctx, Clip(p_min, p_max, is_volume, expand_dynamic_range)) diff --git a/starfish/core/image/_filter/clip_percentile_to_zero.py b/starfish/core/image/Filter/clip_percentile_to_zero.py similarity index 75% rename from starfish/core/image/_filter/clip_percentile_to_zero.py rename to starfish/core/image/Filter/clip_percentile_to_zero.py index 05b50d22d..8e84bf808 100644 --- a/starfish/core/image/_filter/clip_percentile_to_zero.py +++ b/starfish/core/image/Filter/clip_percentile_to_zero.py @@ -5,7 +5,6 @@ import xarray as xr from starfish.core.imagestack.imagestack import ImageStack -from starfish.core.util import click from ._base import FilterAlgorithmBase from .util import determine_axes_to_group_by @@ -99,28 +98,3 @@ def run(self, stack: ImageStack, in_place: bool = False, in_place=in_place, n_processes=n_processes ) return result - - @staticmethod - @click.command("ClipPercentileToZero") - @click.option( - "--p-min", default=0, type=int, - help=("clip intensities below this percentile and subtract the " - "percentile value from the image")) - @click.option( - "--p-max", default=100, type=int, - help="clip intensities above this percentile") - @click.option( - "--min-coeff", default=1.0, type=float, - help="apply coefficient to minimum percentile value") - @click.option( - "--max-coeff", default=1.0, type=float, - help="apply coefficient to maximum percentile value") - @click.option( - "--is-volume", is_flag=True, help="filter 3D volumes") - @click.pass_context - def _cli(ctx, p_min, p_max, min_coeff, max_coeff, is_volume): - ctx.obj["component"]._cli_run(ctx, - ClipPercentileToZero(p_min, p_max, - min_coeff, - max_coeff, - is_volume)) diff --git a/starfish/core/image/_filter/clip_value_to_zero.py b/starfish/core/image/Filter/clip_value_to_zero.py similarity index 81% rename from starfish/core/image/_filter/clip_value_to_zero.py rename to starfish/core/image/Filter/clip_value_to_zero.py index 80fab02cc..9772ed3d4 100644 --- a/starfish/core/image/_filter/clip_value_to_zero.py +++ b/starfish/core/image/Filter/clip_value_to_zero.py @@ -6,7 +6,6 @@ from starfish.core.imagestack.imagestack import ImageStack from starfish.core.types import Number -from starfish.core.util import click from ._base import FilterAlgorithmBase from .util import determine_axes_to_group_by @@ -88,19 +87,3 @@ def run(self, in_place=in_place, n_processes=n_processes ) return result - - @staticmethod - @click.command("ClipValueToZero") - @click.option( - "--v-min", default=0.0, type=float, - help=("clip intensities below this value and subtract this value " - "from the image")) - @click.option( - "--v-max", default=None, type=float, - help="clip intensities above this value") - @click.option( - "--is-volume", is_flag=True, help="filter 3D volumes") - @click.pass_context - def _cli(ctx, v_min, v_max, is_volume): - ctx.obj["component"]._cli_run(ctx, - ClipValueToZero(v_min, v_max, is_volume)) diff --git a/starfish/core/image/_filter/element_wise_mult.py b/starfish/core/image/Filter/element_wise_mult.py similarity index 85% rename from starfish/core/image/_filter/element_wise_mult.py rename to starfish/core/image/Filter/element_wise_mult.py index ff5ba1689..ebb43bb00 100644 --- a/starfish/core/image/_filter/element_wise_mult.py +++ b/starfish/core/image/Filter/element_wise_mult.py @@ -6,7 +6,6 @@ from starfish.core.imagestack.imagestack import ImageStack from starfish.core.types import Clip -from starfish.core.util import click from starfish.core.util.dtype import preserve_float_range from ._base import FilterAlgorithmBase @@ -84,13 +83,3 @@ def run( else: stack._data.data.values = preserve_float_range(stack._data.data.values, rescale=True) return stack - - @staticmethod - @click.command("ElementWiseMultiply") - @click.option( - "--mult-array", required=True, type=np.ndarray, help="matrix to multiply with the image") - @click.option( - "--clip-method", default=Clip.CLIP, type=Clip, - help="method to constrain data to [0,1]. options: 'clip', 'scale_by_image'") - def _cli(ctx, mult_array, clip_method): - ctx.obj["component"]._cli_run(ctx, ElementWiseMultiply(mult_array), clip_method) diff --git a/starfish/core/image/_filter/gaussian_high_pass.py b/starfish/core/image/Filter/gaussian_high_pass.py similarity index 85% rename from starfish/core/image/_filter/gaussian_high_pass.py rename to starfish/core/image/Filter/gaussian_high_pass.py index 416609f0c..0157e55d4 100644 --- a/starfish/core/image/_filter/gaussian_high_pass.py +++ b/starfish/core/image/Filter/gaussian_high_pass.py @@ -4,10 +4,9 @@ import numpy as np import xarray as xr -from starfish.core.image._filter.gaussian_low_pass import GaussianLowPass +from starfish.core.image.Filter.gaussian_low_pass import GaussianLowPass from starfish.core.imagestack.imagestack import ImageStack from starfish.core.types import Clip, Number -from starfish.core.util import click from starfish.core.util.dtype import preserve_float_range from ._base import FilterAlgorithmBase from .util import ( @@ -121,16 +120,3 @@ def run( clip_method=self.clip_method ) return result - - @staticmethod - @click.command("GaussianHighPass") - @click.option("--sigma", type=float, help="standard deviation of gaussian kernel") - @click.option("--is-volume", is_flag=True, - help="indicates that the image stack should be filtered in 3d") - @click.option( - "--clip-method", default=Clip.CLIP, type=Clip, - help="method to constrain data to [0,1]. options: 'clip', 'scale_by_image', " - "'scale_by_chunk'") - @click.pass_context - def _cli(ctx, sigma, is_volume, clip_method): - ctx.obj["component"]._cli_run(ctx, GaussianHighPass(sigma, is_volume, clip_method)) diff --git a/starfish/core/image/_filter/gaussian_low_pass.py b/starfish/core/image/Filter/gaussian_low_pass.py similarity index 87% rename from starfish/core/image/_filter/gaussian_low_pass.py rename to starfish/core/image/Filter/gaussian_low_pass.py index a24b77d65..b96543883 100644 --- a/starfish/core/image/_filter/gaussian_low_pass.py +++ b/starfish/core/image/Filter/gaussian_low_pass.py @@ -7,7 +7,6 @@ from starfish.core.imagestack.imagestack import ImageStack from starfish.core.types import Clip, Number -from starfish.core.util import click from starfish.core.util.dtype import preserve_float_range from ._base import FilterAlgorithmBase from .util import ( @@ -125,16 +124,3 @@ def run( clip_method=self.clip_method ) return result - - @staticmethod - @click.command("GaussianLowPass") - @click.option("--sigma", type=float, help="standard deviation of gaussian kernel") - @click.option("--is-volume", is_flag=True, - help="indicates that the image stack should be filtered in 3d") - @click.option( - "--clip-method", default=Clip.CLIP, type=Clip, - help="method to constrain data to [0,1]. options: 'clip', 'scale_by_image', " - "'scale_by_chunk'") - @click.pass_context - def _cli(ctx, sigma, is_volume, clip_method): - ctx.obj["component"]._cli_run(ctx, GaussianLowPass(sigma, is_volume, clip_method)) diff --git a/starfish/core/image/_filter/laplace.py b/starfish/core/image/Filter/laplace.py similarity index 81% rename from starfish/core/image/_filter/laplace.py rename to starfish/core/image/Filter/laplace.py index 6bcbbfc76..f9ab583bd 100644 --- a/starfish/core/image/_filter/laplace.py +++ b/starfish/core/image/Filter/laplace.py @@ -6,14 +6,13 @@ import xarray as xr from scipy.ndimage import gaussian_laplace -from starfish.core.image._filter._base import FilterAlgorithmBase -from starfish.core.image._filter.util import ( +from starfish.core.image.Filter._base import FilterAlgorithmBase +from starfish.core.image.Filter.util import ( determine_axes_to_group_by, validate_and_broadcast_kernel_size, ) from starfish.core.imagestack.imagestack import ImageStack from starfish.core.types import Clip, Number -from starfish.core.util import click class Laplace(FilterAlgorithmBase): @@ -126,25 +125,3 @@ def run( group_by=group_by, verbose=verbose, in_place=in_place, n_processes=n_processes, clip_method=self.clip_method ) - - @staticmethod - @click.command("Laplace") - @click.option( - "--sigma", type=float, - help="Standard deviation of gaussian kernel for spot enhancement") - @click.option( - "--mode", default="reflect", - help="How the input array is extended when the filter overlaps a border") - @click.option( - "--cval", default=0.0, - help="Value to fill past edges of input if mode is ‘constant") - @click.option( - "--is-volume", is_flag=True, - help="indicates that the image stack should be filtered in 3d") - @click.option( - "--clip-method", default=Clip.CLIP, type=Clip, - help="method to constrain data to [0,1]. options: 'clip', 'scale_by_image', " - "'scale_by_chunk'") - @click.pass_context - def _cli(ctx, sigma, mode, cval, is_volume, clip_method): - ctx.obj["component"]._cli_run(ctx, Laplace(sigma, mode, cval, is_volume, clip_method)) diff --git a/starfish/core/image/_filter/linear_unmixing.py b/starfish/core/image/Filter/linear_unmixing.py similarity index 89% rename from starfish/core/image/_filter/linear_unmixing.py rename to starfish/core/image/Filter/linear_unmixing.py index 180592cb5..407615622 100644 --- a/starfish/core/image/_filter/linear_unmixing.py +++ b/starfish/core/image/Filter/linear_unmixing.py @@ -6,7 +6,6 @@ from starfish.core.imagestack.imagestack import ImageStack from starfish.core.types import Axes, Clip -from starfish.core.util import click from ._base import FilterAlgorithmBase @@ -135,15 +134,3 @@ def run( clip_method=self.clip_method, ) return result - - @staticmethod - @click.command("LinearUnmixing") - @click.option( - "--coeff_mat", required=True, type=np.ndarray, help="linear unmixing coefficients") - @click.option( - "--clip-method", default='scale_by_image', - type=click.Choice(['clip', 'scale_by_image', 'scale_by_chunk']), - help="method to constrain data to [0,1]") - @click.pass_context - def _cli(ctx, coeff_mat, clip_method): - ctx.obj["component"]._cli_run(ctx, LinearUnmixing(coeff_mat, clip_method)) diff --git a/starfish/core/image/_filter/match_histograms.py b/starfish/core/image/Filter/match_histograms.py similarity index 88% rename from starfish/core/image/_filter/match_histograms.py rename to starfish/core/image/Filter/match_histograms.py index f0f1d1a2b..6b47f4097 100644 --- a/starfish/core/image/_filter/match_histograms.py +++ b/starfish/core/image/Filter/match_histograms.py @@ -7,7 +7,7 @@ from starfish.core.compat import match_histograms from starfish.core.imagestack.imagestack import ImageStack from starfish.core.types import Axes -from starfish.core.util import click, enum +from starfish.core.util import enum from ._base import FilterAlgorithmBase @@ -114,15 +114,3 @@ def run( group_by=self.group_by, verbose=verbose, in_place=in_place, n_processes=n_processes ) return result - - @staticmethod - @click.command("MatchHistograms") - @click.option( - "--group-by", type=set, required=True, - help=("set that specifies the grouping over which to match the image intensity " - "e.g. {'c', 'r'} would equalize each volume, whereas {'c',} would equalize all " - "volumes within a channel.") - ) - @click.pass_context - def _cli(ctx, group_by): - ctx.obj["component"]._cli_run(ctx, MatchHistograms(group_by)) diff --git a/starfish/core/image/_filter/max_proj.py b/starfish/core/image/Filter/max_proj.py similarity index 75% rename from starfish/core/image/_filter/max_proj.py rename to starfish/core/image/Filter/max_proj.py index 30ad81253..28060e2d1 100644 --- a/starfish/core/image/_filter/max_proj.py +++ b/starfish/core/image/Filter/max_proj.py @@ -3,7 +3,6 @@ from starfish.core.imagestack.imagestack import ImageStack from starfish.core.types import Axes -from starfish.core.util import click from ._base import FilterAlgorithmBase @@ -61,18 +60,3 @@ def run( """ return stack.max_proj(*tuple(Axes(dim) for dim in self.dims)) - - @staticmethod - @click.command("MaxProject") - @click.option( - "--dims", - type=click.Choice( - [Axes.ROUND.value, Axes.CH.value, Axes.ZPLANE.value, Axes.X.value, Axes.Y.value] - ), - multiple=True, - help="The dimensions the Imagestack should max project over." - "For multiple dimensions add multiple --dims. Ex." - "--dims r --dims c") - @click.pass_context - def _cli(ctx, dims): - ctx.obj["component"]._cli_run(ctx, MaxProject(dims)) diff --git a/starfish/core/image/_filter/mean_high_pass.py b/starfish/core/image/Filter/mean_high_pass.py similarity index 87% rename from starfish/core/image/_filter/mean_high_pass.py rename to starfish/core/image/Filter/mean_high_pass.py index 08c947440..22a18c243 100644 --- a/starfish/core/image/_filter/mean_high_pass.py +++ b/starfish/core/image/Filter/mean_high_pass.py @@ -7,7 +7,6 @@ from starfish.core.imagestack.imagestack import ImageStack from starfish.core.types import Clip, Number -from starfish.core.util import click from starfish.core.util.dtype import preserve_float_range from ._base import FilterAlgorithmBase from .util import ( @@ -122,18 +121,3 @@ def run( clip_method=self.clip_method ) return result - - @staticmethod - @click.command("MeanHighPass") - @click.option( - "--size", type=float, help="width of the kernel") - @click.option( - "--is-volume", is_flag=True, - help="indicates that the image stack should be filtered in 3d") - @click.option( - "--clip-method", default=Clip.CLIP, type=Clip, - help="method to constrain data to [0,1]. options: 'clip', 'scale_by_image', " - "'scale_by_chunk'") - @click.pass_context - def _cli(ctx, size, is_volume, clip_method): - ctx.obj["component"]._cli_run(ctx, MeanHighPass(size, is_volume, clip_method)) diff --git a/starfish/core/image/_filter/reduce.py b/starfish/core/image/Filter/reduce.py similarity index 87% rename from starfish/core/image/_filter/reduce.py rename to starfish/core/image/Filter/reduce.py index f440043aa..4a3f4af5e 100644 --- a/starfish/core/image/_filter/reduce.py +++ b/starfish/core/image/Filter/reduce.py @@ -15,7 +15,6 @@ from starfish.core.imagestack.imagestack import ImageStack from starfish.core.types import Axes, Clip, Coordinates, Number -from starfish.core.util import click from starfish.core.util.dtype import preserve_float_range from ._base import FilterAlgorithmBase @@ -208,34 +207,3 @@ def run( reduced_stack = ImageStack.from_numpy(reduced.values, coordinates=physical_coords) return reduced_stack - - @staticmethod - @click.command("Reduce") - @click.option( - "--dims", - type=click.Choice( - [Axes.ROUND.value, Axes.CH.value, Axes.ZPLANE.value, Axes.X.value, Axes.Y.value] - ), - multiple=True, - help="The dimensions the Imagestack should max project over." - "For multiple dimensions add multiple --dims. Ex." - "--dims r --dims c") - @click.option( - "--func", - type=str, - help="The function to apply across dims." - ) - @click.option( - "--module", - type=click.Choice([member.name for member in list(FunctionSource)]), - multiple=False, - help="Module to source the function from.", - default=FunctionSource.np.name, - ) - @click.option( - "--clip-method", default=Clip.CLIP, type=Clip, - help="method to constrain data to [0,1]. options: 'clip', 'scale_by_image'") - @click.pass_context - def _cli(ctx, dims, func, module, clip_method): - ctx.obj["component"]._cli_run( - ctx, Reduce(dims, func, Reduce.FunctionSource[module], clip_method)) diff --git a/starfish/core/image/_filter/richardson_lucy_deconvolution.py b/starfish/core/image/Filter/richardson_lucy_deconvolution.py similarity index 89% rename from starfish/core/image/_filter/richardson_lucy_deconvolution.py rename to starfish/core/image/Filter/richardson_lucy_deconvolution.py index cf448c009..ae18c04a3 100644 --- a/starfish/core/image/_filter/richardson_lucy_deconvolution.py +++ b/starfish/core/image/Filter/richardson_lucy_deconvolution.py @@ -7,7 +7,6 @@ from starfish.core.imagestack.imagestack import ImageStack from starfish.core.types import Clip, Number -from starfish.core.util import click from ._base import FilterAlgorithmBase from .util import ( determine_axes_to_group_by, @@ -185,19 +184,3 @@ def run( in_place=in_place, ) return result - - @staticmethod - @click.command("DeconvolvePSF") - @click.option( - '--num-iter', type=int, help='number of iterations to run') - @click.option( - '--sigma', type=float, help='standard deviation of gaussian kernel') - @click.option("--is-volume", is_flag=True, - help="indicates that the image stack should be filtered in 3d") - @click.option( - "--clip-method", default=Clip.CLIP, type=Clip, - help="method to constrain data to [0,1]. options: 'clip', 'scale_by_image', " - "'scale_by_chunk'") - @click.pass_context - def _cli(ctx, num_iter, sigma, is_volume, clip_method): - ctx.obj["component"]._cli_run(ctx, DeconvolvePSF(num_iter, sigma, is_volume, clip_method)) diff --git a/starfish/core/image/_filter/test/__init__.py b/starfish/core/image/Filter/test/__init__.py similarity index 100% rename from starfish/core/image/_filter/test/__init__.py rename to starfish/core/image/Filter/test/__init__.py diff --git a/starfish/core/image/_filter/test/test_api_contract.py b/starfish/core/image/Filter/test/test_api_contract.py similarity index 83% rename from starfish/core/image/_filter/test/test_api_contract.py rename to starfish/core/image/Filter/test/test_api_contract.py index 792349b9a..9ebd97da2 100644 --- a/starfish/core/image/_filter/test/test_api_contract.py +++ b/starfish/core/image/Filter/test/test_api_contract.py @@ -24,10 +24,26 @@ from starfish import ImageStack from starfish.core.image import Filter -from starfish.core.image._filter.max_proj import MaxProject -from starfish.core.image._filter.reduce import Reduce - -methods: Mapping[str, Type] = Filter._algorithm_to_class_map() +from starfish.core.image.Filter.max_proj import MaxProject +from starfish.core.image.Filter.reduce import Reduce + + +methods: Mapping[str, Type] = { + 'clip': Filter.Clip, + 'bandpass': Filter.Bandpass, + 'clip_percentile_to_zero': Filter.ClipPercentileToZero, + 'clip_value_to_zero': Filter.ClipPercentileToZero, + 'element_wise_mult': Filter.ElementWiseMultiply, + 'guassian_high_pass': Filter.GaussianHighPass, + 'guassian_low_pass': Filter.GaussianLowPass, + 'laplace': Filter.Laplace, + 'max_proj': Filter.MaxProject, + 'mean_high_pass': Filter.MeanHighPass, + 'reduce': Filter.Reduce, + 'deconvolve': Filter.DeconvolvePSF, + 'white_top_hat': Filter.WhiteTophat, + 'zero_by_channel': Filter.ZeroByChannelMagnitude +} def generate_default_data(): diff --git a/starfish/core/image/_filter/test/test_call_bases.py b/starfish/core/image/Filter/test/test_call_bases.py similarity index 98% rename from starfish/core/image/_filter/test/test_call_bases.py rename to starfish/core/image/Filter/test/test_call_bases.py index 9a0e0b49c..bd857830b 100644 --- a/starfish/core/image/_filter/test/test_call_bases.py +++ b/starfish/core/image/Filter/test/test_call_bases.py @@ -3,7 +3,7 @@ import xarray as xr from starfish import ImageStack -from starfish.core.image._filter.call_bases import CallBases +from starfish.core.image.Filter.call_bases import CallBases from starfish.types import Axes diff --git a/starfish/core/image/_filter/test/test_filter.py b/starfish/core/image/Filter/test/test_filter.py similarity index 95% rename from starfish/core/image/_filter/test/test_filter.py rename to starfish/core/image/Filter/test/test_filter.py index 90f7f3f16..e2a92a7cd 100644 --- a/starfish/core/image/_filter/test/test_filter.py +++ b/starfish/core/image/Filter/test/test_filter.py @@ -4,7 +4,7 @@ import pytest import xarray as xr -from starfish.core.image._filter import element_wise_mult, gaussian_high_pass, mean_high_pass +from starfish.core.image.Filter import element_wise_mult, gaussian_high_pass, mean_high_pass from starfish.core.imagestack.imagestack import ImageStack from starfish.core.types import Clip, Number diff --git a/starfish/core/image/_filter/test/test_histogram_matching.py b/starfish/core/image/Filter/test/test_histogram_matching.py similarity index 94% rename from starfish/core/image/_filter/test/test_histogram_matching.py rename to starfish/core/image/Filter/test/test_histogram_matching.py index 813a54cc1..55e922741 100644 --- a/starfish/core/image/_filter/test/test_histogram_matching.py +++ b/starfish/core/image/Filter/test/test_histogram_matching.py @@ -1,6 +1,6 @@ import numpy as np -from starfish.core.image._filter.match_histograms import MatchHistograms +from starfish.core.image.Filter.match_histograms import MatchHistograms from starfish.core.imagestack.imagestack import ImageStack from starfish.core.types import Axes diff --git a/starfish/core/image/_filter/test/test_linear_unmixing.py b/starfish/core/image/Filter/test/test_linear_unmixing.py similarity index 95% rename from starfish/core/image/_filter/test/test_linear_unmixing.py rename to starfish/core/image/Filter/test/test_linear_unmixing.py index 70b5c5666..4c4b55ae5 100644 --- a/starfish/core/image/_filter/test/test_linear_unmixing.py +++ b/starfish/core/image/Filter/test/test_linear_unmixing.py @@ -1,7 +1,7 @@ import numpy as np from starfish import ImageStack -from starfish.core.image._filter.linear_unmixing import LinearUnmixing +from starfish.core.image.Filter.linear_unmixing import LinearUnmixing from starfish.core.types import Clip def setup_linear_unmixing_test(): diff --git a/starfish/core/image/_filter/test/test_reduce.py b/starfish/core/image/Filter/test/test_reduce.py similarity index 98% rename from starfish/core/image/_filter/test/test_reduce.py rename to starfish/core/image/Filter/test/test_reduce.py index 128ac5058..82a9bb6bc 100644 --- a/starfish/core/image/_filter/test/test_reduce.py +++ b/starfish/core/image/Filter/test/test_reduce.py @@ -5,7 +5,7 @@ from starfish import data from starfish import ImageStack -from starfish.core.image._filter.reduce import Reduce +from starfish.core.image.Filter.reduce import Reduce from starfish.core.imagestack.test.factories import imagestack_with_coords_factory from starfish.core.imagestack.test.imagestack_test_utils import verify_physical_coordinates from starfish.types import Axes, PhysicalCoordinateTypes diff --git a/starfish/core/image/_filter/test/test_white_tophat.py b/starfish/core/image/Filter/test/test_white_tophat.py similarity index 96% rename from starfish/core/image/_filter/test/test_white_tophat.py rename to starfish/core/image/Filter/test/test_white_tophat.py index 665387d6a..2737f0a35 100644 --- a/starfish/core/image/_filter/test/test_white_tophat.py +++ b/starfish/core/image/Filter/test/test_white_tophat.py @@ -2,7 +2,7 @@ import pytest from skimage.filters import gaussian -from starfish.core.image._filter.white_tophat import WhiteTophat +from starfish.core.image.Filter.white_tophat import WhiteTophat def simple_spot_3d(): diff --git a/starfish/core/image/_filter/test/test_zero_by_channel_magnitude.py b/starfish/core/image/Filter/test/test_zero_by_channel_magnitude.py similarity index 89% rename from starfish/core/image/_filter/test/test_zero_by_channel_magnitude.py rename to starfish/core/image/Filter/test/test_zero_by_channel_magnitude.py index c5745b99b..6acdb583e 100644 --- a/starfish/core/image/_filter/test/test_zero_by_channel_magnitude.py +++ b/starfish/core/image/Filter/test/test_zero_by_channel_magnitude.py @@ -1,7 +1,7 @@ import numpy as np from starfish import ImageStack -from starfish.core.image._filter.zero_by_channel_magnitude import ZeroByChannelMagnitude +from starfish.core.image.Filter.zero_by_channel_magnitude import ZeroByChannelMagnitude def create_imagestack_with_magnitude_scale(): """create an imagestack with increasing magnitudes""" diff --git a/starfish/core/image/_filter/util.py b/starfish/core/image/Filter/util.py similarity index 100% rename from starfish/core/image/_filter/util.py rename to starfish/core/image/Filter/util.py diff --git a/starfish/core/image/_filter/white_tophat.py b/starfish/core/image/Filter/white_tophat.py similarity index 82% rename from starfish/core/image/_filter/white_tophat.py rename to starfish/core/image/Filter/white_tophat.py index 674800d70..ccf3fcab8 100644 --- a/starfish/core/image/_filter/white_tophat.py +++ b/starfish/core/image/Filter/white_tophat.py @@ -6,7 +6,6 @@ from starfish.core.imagestack.imagestack import ImageStack from starfish.core.types import Clip -from starfish.core.util import click from ._base import FilterAlgorithmBase from .util import determine_axes_to_group_by @@ -95,18 +94,3 @@ def run( clip_method=self.clip_method ) return result - - @staticmethod - @click.command("WhiteTophat") - @click.option( - "--masking-radius", default=15, type=int, - help="diameter of morphological masking disk in pixels") - @click.option( # FIXME: was this intentionally missed? - "--is-volume", is_flag=True, help="filter 3D volumes") - @click.option( - "--clip-method", default=Clip.CLIP, type=Clip, - help="method to constrain data to [0,1]. options: 'clip', 'scale_by_image', " - "'scale_by_chunk'") - @click.pass_context - def _cli(ctx, masking_radius, is_volume, clip_method): - ctx.obj["component"]._cli_run(ctx, WhiteTophat(masking_radius, is_volume, clip_method)) diff --git a/starfish/core/image/_filter/zero_by_channel_magnitude.py b/starfish/core/image/Filter/zero_by_channel_magnitude.py similarity index 87% rename from starfish/core/image/_filter/zero_by_channel_magnitude.py rename to starfish/core/image/Filter/zero_by_channel_magnitude.py index 74c8d2b20..d5ffe8d07 100644 --- a/starfish/core/image/_filter/zero_by_channel_magnitude.py +++ b/starfish/core/image/Filter/zero_by_channel_magnitude.py @@ -7,7 +7,6 @@ from starfish.core.config import StarfishConfig from starfish.core.imagestack.imagestack import ImageStack from starfish.core.types import Axes -from starfish.core.util import click from ._base import FilterAlgorithmBase @@ -92,15 +91,3 @@ def run( where=magnitude_mask ) return stack - - @staticmethod - @click.command("ZeroByChannelMagnitude") - @click.option( - '--thresh', type=float, - help='minimum magnitude threshold for pixels across channels') - @click.option( - '--normalize', is_flag=True, - help='Scales all rounds to have unit L2 norm across channels') - @click.pass_context - def _cli(ctx, thresh, normalize): - ctx.obj["component"]._cli_run(ctx, ZeroByChannelMagnitude(thresh, normalize)) diff --git a/starfish/core/image/Segment/__init__.py b/starfish/core/image/Segment/__init__.py new file mode 100644 index 000000000..641ba4060 --- /dev/null +++ b/starfish/core/image/Segment/__init__.py @@ -0,0 +1 @@ +from .watershed import Watershed diff --git a/starfish/core/image/Segment/_base.py b/starfish/core/image/Segment/_base.py new file mode 100644 index 000000000..354e29855 --- /dev/null +++ b/starfish/core/image/Segment/_base.py @@ -0,0 +1,18 @@ +from abc import abstractmethod + +from starfish.core.imagestack.imagestack import ImageStack +from starfish.core.pipeline.algorithmbase import AlgorithmBase +from starfish.core.segmentation_mask import SegmentationMaskCollection + + +class SegmentAlgorithmBase(metaclass=AlgorithmBase): + + @abstractmethod + def run( + self, + primary_image_stack: ImageStack, + nuclei_stack: ImageStack, + *args + ) -> SegmentationMaskCollection: + """Performs segmentation on the stack provided.""" + raise NotImplementedError() diff --git a/starfish/core/image/_segment/watershed.py b/starfish/core/image/Segment/watershed.py similarity index 95% rename from starfish/core/image/_segment/watershed.py rename to starfish/core/image/Segment/watershed.py index ac7654f4a..c4eb64676 100644 --- a/starfish/core/image/_segment/watershed.py +++ b/starfish/core/image/Segment/watershed.py @@ -7,11 +7,10 @@ from skimage.feature import peak_local_max from skimage.morphology import watershed -from starfish.core.image._filter.util import bin_open, bin_thresh +from starfish.core.image.Filter.util import bin_open, bin_thresh from starfish.core.imagestack.imagestack import ImageStack from starfish.core.segmentation_mask import SegmentationMaskCollection from starfish.core.types import Axes, Coordinates, Number -from starfish.core.util import click from ._base import SegmentAlgorithmBase @@ -108,19 +107,6 @@ def show(self, figsize: Tuple[int, int]=(10, 10)) -> None: else: raise RuntimeError('Run segmentation before attempting to show results.') - @staticmethod - @click.command("Watershed") - @click.option( - "--nuclei-threshold", default=.16, type=float, help="Nuclei threshold") - @click.option( - "--input-threshold", default=.22, type=float, help="Input threshold") - @click.option( - "--min-distance", default=57, type=int, help="Minimum distance between cells") - @click.pass_context - def _cli(ctx, nuclei_threshold, input_threshold, min_distance): - ctx.obj["component"]._cli_run( - ctx, Watershed(nuclei_threshold, input_threshold, min_distance)) - class _WatershedSegmenter: def __init__(self, nuclei_img: np.ndarray, stain_img: np.ndarray) -> None: diff --git a/starfish/core/image/__init__.py b/starfish/core/image/__init__.py index c37ec80d7..282ff3e6d 100644 --- a/starfish/core/image/__init__.py +++ b/starfish/core/image/__init__.py @@ -1,4 +1,2 @@ -from ._filter import Filter -from ._registration._apply_transform import ApplyTransform -from ._registration._learn_transform import LearnTransform -from ._segment import Segment +from ._registration import ApplyTransform +from ._registration import LearnTransform diff --git a/starfish/core/image/_filter/__init__.py b/starfish/core/image/_filter/__init__.py deleted file mode 100644 index da0e306a2..000000000 --- a/starfish/core/image/_filter/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from starfish.core.pipeline import import_all_submodules -from ._base import Filter -import_all_submodules(__file__, __package__) diff --git a/starfish/core/image/_filter/_base.py b/starfish/core/image/_filter/_base.py deleted file mode 100644 index 1f24aba3a..000000000 --- a/starfish/core/image/_filter/_base.py +++ /dev/null @@ -1,43 +0,0 @@ -from abc import abstractmethod -from typing import Type - -from starfish.core.imagestack.imagestack import ImageStack -from starfish.core.pipeline.algorithmbase import AlgorithmBase -from starfish.core.pipeline.pipelinecomponent import PipelineComponent -from starfish.core.util import click -from starfish.core.util.click.indirectparams import ImageStackParamType - - -class Filter(PipelineComponent): - @classmethod - def _cli_run(cls, ctx, instance): - output = ctx.obj["output"] - stack = ctx.obj["stack"] - filtered = instance.run(stack) - filtered.export(output) - - @staticmethod - @click.group("Filter") - @click.option("-i", "--input", type=ImageStackParamType) - @click.option("-o", "--output", required=True) - @click.pass_context - def _cli(ctx, input, output): - """smooth, sharpen, denoise, etc""" - print("Filtering images...") - ctx.obj = dict( - component=Filter, - input=input, - output=output, - stack=input, - ) - - -class FilterAlgorithmBase(AlgorithmBase): - @classmethod - def get_pipeline_component_class(cls) -> Type[PipelineComponent]: - return Filter - - @abstractmethod - def run(self, stack: ImageStack, *args) -> ImageStack: - """Perform filtering of an image stack""" - raise NotImplementedError() diff --git a/starfish/core/image/_registration/ApplyTransform/__init__.py b/starfish/core/image/_registration/ApplyTransform/__init__.py new file mode 100644 index 000000000..3f4afd1b9 --- /dev/null +++ b/starfish/core/image/_registration/ApplyTransform/__init__.py @@ -0,0 +1 @@ +from .warp import Warp diff --git a/starfish/core/image/_registration/ApplyTransform/_base.py b/starfish/core/image/_registration/ApplyTransform/_base.py new file mode 100644 index 000000000..9afde3090 --- /dev/null +++ b/starfish/core/image/_registration/ApplyTransform/_base.py @@ -0,0 +1,12 @@ +from abc import abstractmethod + +from starfish.core.imagestack.imagestack import ImageStack +from starfish.core.pipeline.algorithmbase import AlgorithmBase + + +class ApplyTransformBase(metaclass=AlgorithmBase): + + @abstractmethod + def run(self, stack, transforms_list, *args) -> ImageStack: + """Performs registration on the stack provided.""" + raise NotImplementedError() diff --git a/starfish/core/image/_registration/_apply_transform/test/__init__.py b/starfish/core/image/_registration/ApplyTransform/test/__init__.py similarity index 100% rename from starfish/core/image/_registration/_apply_transform/test/__init__.py rename to starfish/core/image/_registration/ApplyTransform/test/__init__.py diff --git a/starfish/core/image/_registration/_apply_transform/test/test_warp.py b/starfish/core/image/_registration/ApplyTransform/test/test_warp.py similarity index 92% rename from starfish/core/image/_registration/_apply_transform/test/test_warp.py rename to starfish/core/image/_registration/ApplyTransform/test/test_warp.py index 03ec80aa2..caf78bf46 100644 --- a/starfish/core/image/_registration/_apply_transform/test/test_warp.py +++ b/starfish/core/image/_registration/ApplyTransform/test/test_warp.py @@ -1,8 +1,8 @@ import numpy as np from starfish import data -from starfish.core.image._registration._apply_transform.warp import Warp -from starfish.core.image._registration._learn_transform.translation import Translation +from starfish.core.image._registration.ApplyTransform.warp import Warp +from starfish.core.image._registration.LearnTransform.translation import Translation from starfish.core.types import Axes diff --git a/starfish/core/image/_registration/_apply_transform/warp.py b/starfish/core/image/_registration/ApplyTransform/warp.py similarity index 92% rename from starfish/core/image/_registration/_apply_transform/warp.py rename to starfish/core/image/_registration/ApplyTransform/warp.py index 61272298d..325cf751a 100644 --- a/starfish/core/image/_registration/_apply_transform/warp.py +++ b/starfish/core/image/_registration/ApplyTransform/warp.py @@ -8,11 +8,10 @@ from tqdm import tqdm from starfish.core.config import StarfishConfig -from starfish.core.image._registration._apply_transform._base import ApplyTransformBase +from starfish.core.image._registration.ApplyTransform._base import ApplyTransformBase from starfish.core.image._registration.transforms_list import TransformsList from starfish.core.imagestack.imagestack import ImageStack from starfish.core.types import Axes -from starfish.core.util import click class Warp(ApplyTransformBase): @@ -61,12 +60,6 @@ def run(self, stack: ImageStack, transforms_list: TransformsList, stack.set_slice(selector, warped_image) return stack - @staticmethod - @click.command("Warp") - @click.pass_context - def _cli(ctx): - ctx.obj["component"]._cli_run(ctx, Warp()) - def warp(image: Union[xr.DataArray, np.ndarray], transformation_object: GeometricTransform, diff --git a/starfish/core/image/_registration/LearnTransform/__init__.py b/starfish/core/image/_registration/LearnTransform/__init__.py new file mode 100644 index 000000000..1e96e7250 --- /dev/null +++ b/starfish/core/image/_registration/LearnTransform/__init__.py @@ -0,0 +1 @@ +from .translation import Translation diff --git a/starfish/core/image/_registration/LearnTransform/_base.py b/starfish/core/image/_registration/LearnTransform/_base.py new file mode 100644 index 000000000..733e58496 --- /dev/null +++ b/starfish/core/image/_registration/LearnTransform/_base.py @@ -0,0 +1,12 @@ +from abc import abstractmethod + +from starfish.core.image._registration.transforms_list import TransformsList +from starfish.core.pipeline.algorithmbase import AlgorithmBase + + +class LearnTransformBase(metaclass=AlgorithmBase): + + @abstractmethod + def run(self, stack, *args) -> TransformsList: + """Learns Transforms for a given stack.""" + raise NotImplementedError() diff --git a/starfish/core/image/_registration/_learn_transform/test/__init__.py b/starfish/core/image/_registration/LearnTransform/test/__init__.py similarity index 100% rename from starfish/core/image/_registration/_learn_transform/test/__init__.py rename to starfish/core/image/_registration/LearnTransform/test/__init__.py diff --git a/starfish/core/image/_registration/_learn_transform/test/test_translation.py b/starfish/core/image/_registration/LearnTransform/test/test_translation.py similarity index 94% rename from starfish/core/image/_registration/_learn_transform/test/test_translation.py rename to starfish/core/image/_registration/LearnTransform/test/test_translation.py index bb11840cb..2bda85aff 100644 --- a/starfish/core/image/_registration/_learn_transform/test/test_translation.py +++ b/starfish/core/image/_registration/LearnTransform/test/test_translation.py @@ -1,7 +1,7 @@ import numpy as np from starfish import data -from starfish.core.image._registration._learn_transform.translation import Translation +from starfish.core.image._registration.LearnTransform.translation import Translation from starfish.core.types import Axes diff --git a/starfish/core/image/_registration/_learn_transform/translation.py b/starfish/core/image/_registration/LearnTransform/translation.py similarity index 80% rename from starfish/core/image/_registration/_learn_transform/translation.py rename to starfish/core/image/_registration/LearnTransform/translation.py index a8a921fa7..be83cc0b1 100644 --- a/starfish/core/image/_registration/_learn_transform/translation.py +++ b/starfish/core/image/_registration/LearnTransform/translation.py @@ -5,8 +5,6 @@ from starfish.core.image._registration.transforms_list import TransformsList from starfish.core.imagestack.imagestack import ImageStack from starfish.core.types import Axes, TransformType -from starfish.core.util import click -from starfish.core.util.click.indirectparams import ImageStackParamType from ._base import LearnTransformBase @@ -77,17 +75,3 @@ def run(self, stack: ImageStack, verbose: bool=False, *args) -> TransformsList: SimilarityTransform(translation=shift)) return transforms - - @staticmethod - @click.command("Translation") - @click.option("--reference-stack", required=True, type=ImageStackParamType, - help="The image to align the input ImageStack to.") - @click.option("--axes", default="r", type=str, help="The axes to iterate over.") - @click.option("--upsampling", default=1, type=int, help="Upsampling factor.") - @click.pass_context - def _cli(ctx, reference_stack, axes, upsampling): - ctx.obj["component"]._cli_run( - ctx, - Translation( - reference_stack=reference_stack, - axes=Axes(axes), upsampling=upsampling)) diff --git a/starfish/core/image/_registration/_apply_transform/__init__.py b/starfish/core/image/_registration/_apply_transform/__init__.py deleted file mode 100644 index db6186683..000000000 --- a/starfish/core/image/_registration/_apply_transform/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from starfish.core.pipeline import import_all_submodules -from ._base import ApplyTransform -import_all_submodules(__file__, __package__) diff --git a/starfish/core/image/_registration/_apply_transform/_base.py b/starfish/core/image/_registration/_apply_transform/_base.py deleted file mode 100644 index b2f9e91b2..000000000 --- a/starfish/core/image/_registration/_apply_transform/_base.py +++ /dev/null @@ -1,50 +0,0 @@ -from abc import abstractmethod -from typing import Type - -from starfish.core.image._registration.transforms_list import TransformsList -from starfish.core.imagestack.imagestack import ImageStack -from starfish.core.pipeline import PipelineComponent -from starfish.core.pipeline.algorithmbase import AlgorithmBase -from starfish.core.util import click -from starfish.core.util.click.indirectparams import ImageStackParamType - - -class ApplyTransform(PipelineComponent): - """ - ApplyTransform exposes methods to align image data by transforming (and re-sampling if sub-pixel - shifts are passed) input data according to a provided transformation. - """ - @classmethod - def _cli_run(cls, ctx, instance): - output = ctx.obj["output"] - stack = ctx.obj["stack"] - transformation_list = ctx.obj["transformation_list"] - transformed = instance.run(stack, transformation_list) - transformed.export(output) - - @staticmethod - @click.group("ApplyTransform") - @click.option("-i", "--input", type=ImageStackParamType) - @click.option("-o", "--output", required=True) - @click.option("--transformation-list", required=True, type=click.Path(exists=True), - help="The list of transformations to apply to the ImageStack.") - @click.pass_context - def _cli(ctx, input, output, transformation_list): - print("Applying Transform to images...") - ctx.obj = dict( - component=ApplyTransform, - output=output, - stack=input, - transformation_list=TransformsList.from_json(transformation_list) - ) - - -class ApplyTransformBase(AlgorithmBase): - @classmethod - def get_pipeline_component_class(cls) -> Type[PipelineComponent]: - return ApplyTransform - - @abstractmethod - def run(self, stack, transforms_list, *args) -> ImageStack: - """Performs registration on the stack provided.""" - raise NotImplementedError() diff --git a/starfish/core/image/_registration/_learn_transform/__init__.py b/starfish/core/image/_registration/_learn_transform/__init__.py deleted file mode 100644 index 6a83151a4..000000000 --- a/starfish/core/image/_registration/_learn_transform/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from starfish.core.pipeline import import_all_submodules -from ._base import LearnTransform -import_all_submodules(__file__, __package__) diff --git a/starfish/core/image/_registration/_learn_transform/_base.py b/starfish/core/image/_registration/_learn_transform/_base.py deleted file mode 100644 index ed31c771b..000000000 --- a/starfish/core/image/_registration/_learn_transform/_base.py +++ /dev/null @@ -1,46 +0,0 @@ -from abc import abstractmethod -from typing import Type - - -from starfish.core.image._registration.transforms_list import TransformsList -from starfish.core.pipeline.algorithmbase import AlgorithmBase -from starfish.core.pipeline.pipelinecomponent import PipelineComponent -from starfish.core.util import click -from starfish.core.util.click.indirectparams import ImageStackParamType - - -class LearnTransform(PipelineComponent): - """ - LearnTransform exposes methods to learn transformations that align ImageStacks. - """ - @classmethod - def _cli_run(cls, ctx, instance, *args, **kwargs): - output = ctx.obj["output"] - stack = ctx.obj["stack"] - transformation_list = instance.run(stack) - transformation_list.to_json(output) - - @staticmethod - @click.group("LearnTransform") - @click.option("-i", "--input", type=ImageStackParamType) - @click.option("-o", "--output", required=True) - @click.pass_context - def _cli(ctx, input, output): - """Learn a set of transforms for an ImageStack.""" - print("Learning Transforms for images...") - ctx.obj = dict( - component=LearnTransform, - output=output, - stack=input, - ) - - -class LearnTransformBase(AlgorithmBase): - @classmethod - def get_pipeline_component_class(cls) -> Type[PipelineComponent]: - return LearnTransform - - @abstractmethod - def run(self, stack, *args) -> TransformsList: - """Learns Transforms for a given stack.""" - raise NotImplementedError() diff --git a/starfish/core/image/_registration/test/test_transforms_list.py b/starfish/core/image/_registration/test/test_transforms_list.py index 318a7d591..fe505d54e 100644 --- a/starfish/core/image/_registration/test/test_transforms_list.py +++ b/starfish/core/image/_registration/test/test_transforms_list.py @@ -3,7 +3,7 @@ import numpy as np from starfish import data -from starfish.core.image._registration._learn_transform.translation import Translation +from starfish.core.image._registration.LearnTransform.translation import Translation from starfish.core.image._registration.transforms_list import TransformsList from starfish.core.types import Axes, TransformType diff --git a/starfish/core/image/_segment/__init__.py b/starfish/core/image/_segment/__init__.py deleted file mode 100644 index e6cc1ce05..000000000 --- a/starfish/core/image/_segment/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from starfish.core.pipeline import import_all_submodules -from ._base import Segment -import_all_submodules(__file__, __package__) diff --git a/starfish/core/image/_segment/_base.py b/starfish/core/image/_segment/_base.py deleted file mode 100644 index 075cb68c1..000000000 --- a/starfish/core/image/_segment/_base.py +++ /dev/null @@ -1,57 +0,0 @@ -from abc import abstractmethod -from typing import Type - -from starfish.core.imagestack.imagestack import ImageStack -from starfish.core.pipeline import PipelineComponent -from starfish.core.pipeline.algorithmbase import AlgorithmBase -from starfish.core.segmentation_mask import SegmentationMaskCollection -from starfish.core.util import click -from starfish.core.util.click.indirectparams import ImageStackParamType - - -class Segment(PipelineComponent): - """ - Starfish class implementing segmentation approaches. - """ - @classmethod - def _cli_run(cls, ctx, instance): - output = ctx.obj["output"] - pri_stack = ctx.obj["primary_images"] - nuc_stack = ctx.obj["nuclei"] - - masks = instance.run(pri_stack, nuc_stack) - - print(f"Writing masks to {output}") - masks.save(output) - - @staticmethod - @click.group("Segment") - @click.option("--primary-images", required=True, type=ImageStackParamType) - @click.option("--nuclei", required=True, type=ImageStackParamType) - @click.option("-o", "--output", required=True) - @click.pass_context - def _cli(ctx, primary_images, nuclei, output): - """define masks for cell boundaries and assign spots""" - print('Segmenting ...') - ctx.obj = dict( - component=Segment, - output=output, - primary_images=primary_images, - nuclei=nuclei, - ) - - -class SegmentAlgorithmBase(AlgorithmBase): - @classmethod - def get_pipeline_component_class(cls) -> Type[PipelineComponent]: - return Segment - - @abstractmethod - def run( - self, - primary_image_stack: ImageStack, - nuclei_stack: ImageStack, - *args - ) -> SegmentationMaskCollection: - """Performs segmentation on the stack provided.""" - raise NotImplementedError() diff --git a/starfish/core/pipeline/__init__.py b/starfish/core/pipeline/__init__.py index 0f0680f09..e69de29bb 100644 --- a/starfish/core/pipeline/__init__.py +++ b/starfish/core/pipeline/__init__.py @@ -1,2 +0,0 @@ -from .algorithmbase import AlgorithmBase -from .pipelinecomponent import import_all_submodules, PipelineComponent diff --git a/starfish/core/pipeline/algorithmbase.py b/starfish/core/pipeline/algorithmbase.py index 7690b22ed..322a45f40 100644 --- a/starfish/core/pipeline/algorithmbase.py +++ b/starfish/core/pipeline/algorithmbase.py @@ -1,32 +1,19 @@ import functools import inspect -from abc import ABCMeta, abstractmethod -from typing import Type +from abc import ABCMeta from starfish.core.imagestack.imagestack import ImageStack from starfish.core.intensity_table.intensity_table import IntensityTable from starfish.core.types import LOG from starfish.core.types._constants import STARFISH_EXTRAS_KEY from starfish.core.util.logging import LogEncoder -from .pipelinecomponent import PipelineComponent -class AlgorithmBaseType(ABCMeta): +class AlgorithmBase(ABCMeta): def __init__(cls, name, bases, namespace): super().__init__(name, bases, namespace) if not inspect.isabstract(cls): - AlgorithmBaseType.register_with_pipeline_component(cls) - cls.run = AlgorithmBaseType.run_with_logging(cls.run) - - @staticmethod - def register_with_pipeline_component(algorithm_cls): - pipeline_component_cls = algorithm_cls.get_pipeline_component_class() - if pipeline_component_cls._algorithm_to_class_map_int is None: - pipeline_component_cls._algorithm_to_class_map_int = {} - pipeline_component_cls._algorithm_to_class_map_int[algorithm_cls.__name__] = algorithm_cls - setattr(pipeline_component_cls, algorithm_cls._get_algorithm_name(), algorithm_cls) - - pipeline_component_cls._cli.add_command(algorithm_cls._cli) + cls.run = AlgorithmBase.run_with_logging(cls.run) @staticmethod def run_with_logging(func): @@ -60,58 +47,3 @@ def helper(*args, **kwargs): it.attrs[STARFISH_EXTRAS_KEY] = LogEncoder().encode({LOG: stack.log}) return result return helper - - -class AlgorithmBase(metaclass=AlgorithmBaseType): - - """ - This is the base class of any algorithm that starfish exposes. - - Subclasses of this base class are paired with subclasses of PipelineComponent. The subclasses of - PipelineComponent retrieve subclasses of the paired AlgorithmBase. Together, the two classes - enable starfish to expose a paired API and CLI. - - Examples - -------- - - PipelineComponent: `starfish.image._segment.Segmentation(PipelineComponent)` - - AlgorithmBase: `starfish.image._segment._base.SegmentationAlgorithmBase(AlgorithmBase)` - - Implementing Algorithms: - - `starfish.image._segment.watershed.Watershed(SegmentationAlgorithmBase)` - - This pattern exposes the API as follows: - - `starfish.image.Segmentation.` - - and the CLI as: - - `$> starfish segmentation watershed` - - To create an entirely new group of related algorithms, like `Segmentation`, a new subclass of - both `AlgorithmBase` and `PipelineComponent` must be created. - - To add to an existing group of algorithms like "Segmentation", an algorithm implementation must - subclass the corresponding subclass of `AlgorithmBase`. In this case, - `SegmentationAlgorithmBase`. - - See Also - -------- - starfish.pipeline.pipelinecomponent.py - """ - @classmethod - def _get_algorithm_name(cls): - """ - Returns the name of the algorithm. This should be a valid python identifier, i.e., - https://docs.python.org/3/reference/lexical_analysis.html#identifiers - """ - return cls.__name__ - - @classmethod - @abstractmethod - def get_pipeline_component_class(cls) -> Type[PipelineComponent]: - """ - Returns the class of PipelineComponent this algorithm implements. - """ - raise NotImplementedError() diff --git a/starfish/core/pipeline/pipelinecomponent.py b/starfish/core/pipeline/pipelinecomponent.py deleted file mode 100644 index 401e29c3e..000000000 --- a/starfish/core/pipeline/pipelinecomponent.py +++ /dev/null @@ -1,69 +0,0 @@ -import importlib -from abc import abstractmethod -from pathlib import Path -from typing import Mapping, Optional, Set, Type - - -class PipelineComponent: - """ - This is the base class for any method executed by the CLI. - - PipelineComponent is an Abstract Class that exposes two private methods to link any subclassing - method to the CLI, _algorithm_to_class_map, which fetches all the algorithms that extend this - base class at run time, and _cli_register, which registers those methods to the CLI. It exposes - two additional abstract private methods that must be extended by subclasses: - - Methods - ------- - _get_algorithm_base_class() - should simply return an instance of the AlgorithmBase. See, e.g. - starfish.image.segmentation.Segmentation - _cli_run(ctx, instance, *args, **kwargs) - implements the behavior of the pipeline component that must occur when this component is - evoked from the CLI. This often includes loading serialized objects into memory and - passing them to the API's run command. - - See Also - -------- - starfish.pipeline.algorithmbase.py - """ - - _algorithm_to_class_map_int: Optional[Mapping[str, Type]] = None - - @classmethod - def _algorithm_to_class_map(cls) -> Mapping[str, Type]: - """Returns a mapping from algorithm names to the classes that implement them.""" - assert cls._algorithm_to_class_map_int is not None - return cls._algorithm_to_class_map_int - - @classmethod - @abstractmethod - def _cli_run(cls, ctx, instance): - raise NotImplementedError() - - -def import_all_submodules(path_str: str, package: str, excluded: Optional[Set[str]]=None) -> None: - """ - Given a path of a __init__.py file, find all the .py files in that directory and import them - relatively to a package. - - Parameters - ---------- - path_str : str - The path of a __init__.py file. - package : str - The package name that the modules should be imported relative to. - excluded : Optional[Set[str]] - A set of files not to include. If this is not provided, it defaults to set("__init__.py"). - """ - if excluded is None: - excluded = {"__init__.py"} - - path: Path = Path(path_str).parent - for entry in path.iterdir(): - if not entry.suffix.lower().endswith(".py"): - continue - if entry.name.lower() in excluded: - continue - - importlib.import_module(f".{entry.stem}", package) diff --git a/starfish/core/recipe/__init__.py b/starfish/core/recipe/__init__.py deleted file mode 100644 index d536fc101..000000000 --- a/starfish/core/recipe/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -from .errors import ( - ConstructorError, - ConstructorExtraParameterWarning, - ExecutionError, - RecipeError, - RunInsufficientParametersError, - TypeInferenceError, -) -from .recipe import Recipe -from .runnable import Runnable diff --git a/starfish/core/recipe/cli.py b/starfish/core/recipe/cli.py deleted file mode 100644 index 3adb311d3..000000000 --- a/starfish/core/recipe/cli.py +++ /dev/null @@ -1,28 +0,0 @@ -from slicedimage.io import resolve_path_or_url - -from starfish.core.config import StarfishConfig -from starfish.core.starfish import starfish -from starfish.core.util import click -from .recipe import Recipe - - -@starfish.command("recipe") -@click.option("--recipe", required=True, type=str, metavar="RECIPE_PATH_OR_URL") -@click.option( - "--input", type=str, multiple=True, metavar="INPUT_FILE_PATH_OR_URL", - help="input file paths or urls to map to the recipe input parameters") -@click.option( - "--output", type=str, multiple=True, metavar="OUTPUT_FILE_PATH", - help="output file paths to write recipe outputs to") -@click.pass_context -def run_recipe(ctx, recipe, input, output): - """Runs a recipe with a given set of inputs and outputs.""" - config = StarfishConfig() - - backend, relativeurl, _ = resolve_path_or_url( - recipe, backend_config=config.slicedimage) - with backend.read_contextmanager(relativeurl) as fh: - recipe_str = fh.read() - - recipe_obj = Recipe(recipe_str, input, output) - recipe_obj.run_and_save() diff --git a/starfish/core/recipe/errors.py b/starfish/core/recipe/errors.py deleted file mode 100644 index 82f8a1ef3..000000000 --- a/starfish/core/recipe/errors.py +++ /dev/null @@ -1,30 +0,0 @@ -class RecipeWarning(RuntimeWarning): - pass - - -class RecipeError(Exception): - pass - - -class ConstructorExtraParameterWarning(RecipeWarning): - """Raised when a recipe contains parameters that an algorithms constructor does not expect.""" - - -class TypeInferenceError(RecipeError): - """Raised when we cannot infer the type of object an algorithm expects in its constructor or - its run method. This can be fixed by ensuring all the parameters to the constructor and the run - method have type hints.""" - - -class ConstructorError(RecipeError): - """Raised when there is an error raised during the construction of an algorithm class.""" - pass - - -class RunInsufficientParametersError(RecipeError): - """Raised when the recipe does not provide sufficient parameters for the run method.""" - - -class ExecutionError(RecipeError): - """Raised when there is an error raised during the execution of an algorithm.""" - pass diff --git a/starfish/core/recipe/filesystem.py b/starfish/core/recipe/filesystem.py deleted file mode 100644 index a1ac6ec35..000000000 --- a/starfish/core/recipe/filesystem.py +++ /dev/null @@ -1,108 +0,0 @@ -import enum -from typing import Any, Callable, Type - -from starfish.core.codebook.codebook import Codebook -from starfish.core.expression_matrix.expression_matrix import ExpressionMatrix -from starfish.core.imagestack.imagestack import ImageStack -from starfish.core.intensity_table.intensity_table import IntensityTable -from starfish.core.util.indirectfile import ( - convert, - GetCodebook, - GetCodebookFromExperiment, - GetImageStack, - GetImageStackFromExperiment, -) - - -def imagestack_convert(indirect_path_or_url: str) -> ImageStack: - """Converts a path or URL to an ImageStack. This supports the indirect syntax, where a user - provides a string like @[fov_name][image_name]. If the indirect - syntax is used, the experiment.json is automatically fetched and traversed to find the specified - image in the specified field of view.""" - return convert( - indirect_path_or_url, - [ - GetImageStack(), - GetImageStackFromExperiment(), - ], - ) - - -def codebook_convert(indirect_path_or_url: str) -> Codebook: - """Converts a path or URL to a Codebook. This supports the indirect syntax, where a user - provides a string like @. If the indirect syntax is used, the - experiment.json is automatically fetched to find the codebook.""" - return convert( - indirect_path_or_url, - [ - GetCodebook(), - GetCodebookFromExperiment(), - ], - ) - - -class FileTypes(enum.Enum): - """These are the filetypes supported as inputs and outputs for recipes. Each filetype is - associated with the implementing class, the method to invoke to load such a filetype, and the - method to invoke to save back to the filetype. - - The load method is expected to be called with a string, which is the file or url to load from, - and is expected to return an instantiated object. - - The save method is expected to be called with the object and a string, which is the path to - write the object to. - """ - IMAGESTACK = (ImageStack, imagestack_convert, ImageStack.export) - INTENSITYTABLE = (IntensityTable, IntensityTable.open_netcdf, IntensityTable.to_netcdf) - EXPRESSIONMATRIX = (ExpressionMatrix, ExpressionMatrix.load, ExpressionMatrix.save) - CODEBOOK = (Codebook, codebook_convert, Codebook.to_json) - - def __init__(self, cls: Type, loader: Callable[[str], Any], saver: Callable[[Any, str], None]): - self._cls = cls - self._load = loader - self._save = saver - - @property - def load(self) -> Callable[[str], Any]: - return self._load - - @property - def save(self) -> Callable[[Any, str], None]: - return self._save - - @staticmethod - def resolve_by_class(cls: Type) -> "FileTypes": - for member in FileTypes.__members__.values(): - if cls == member.value[0]: - return member - raise TypeError(f"filetype {cls} not supported.") - - @staticmethod - def resolve_by_instance(instance) -> "FileTypes": - for member in FileTypes.__members__.values(): - if isinstance(instance, member.value[0]): - return member - raise TypeError(f"filetype of {instance.__class__} not supported.") - - -class FileProvider: - """This is used to wrap paths or URLs that are passed into Runnables via the `file_inputs` magic - variable. This is so we can differentiate between strings and `file_inputs` values, which must - be first constructed into a starfish object via its loader.""" - def __init__(self, path_or_url: str) -> None: - self.path_or_uri = path_or_url - - def __str__(self): - return f"FileProvider(\"{self.path_or_uri}\")" - - -class TypedFileProvider: - """Like :py:class:`FileProvider`, this is used to wrap paths or URLs that are passed into - Runnables via the `file_inputs` magic variable. In this case, the object type has been - resolved by examining the type annotation.""" - def __init__(self, backing_file_provider: FileProvider, object_class: Type) -> None: - self.backing_file_provider = backing_file_provider - self.type = FileTypes.resolve_by_class(object_class) - - def load(self) -> Any: - return self.type.load(self.backing_file_provider.path_or_uri) diff --git a/starfish/core/recipe/recipe.py b/starfish/core/recipe/recipe.py deleted file mode 100644 index df81ed505..000000000 --- a/starfish/core/recipe/recipe.py +++ /dev/null @@ -1,234 +0,0 @@ -from typing import ( - AbstractSet, - Any, - Mapping, - MutableMapping, - MutableSequence, - Optional, - Sequence, - Set, -) - -import numpy as np - -from starfish import image, spots -from starfish.types import Axes, Coordinates -from .filesystem import FileProvider, FileTypes -from .runnable import Runnable - - -class ExecutionComplete(Exception): - """Raised by :py:class:`Execution` when it is complete. We don't rely on catching StopIteration - because some underlying library may have raised that instead.""" - pass - - -class _Execution: - """Encompasses the state of a single execution of a recipe.""" - def __init__( - self, - runnable_sequence: Sequence[Runnable], - output_runnables: Sequence[Runnable], - output_paths: Sequence[str], - ) -> None: - self._runnable_sequence = iter(runnable_sequence) - self._output_runnables = output_runnables - self._output_paths = output_paths - - # build a map between each runnable and its dependents. each time a runnable completes, we - # go through each of its dependencies to see if its results are still needed. - runnable_dependents: MutableMapping[Runnable, Set[Runnable]] = dict() - for output_runnable in self._output_runnables: - _Execution._build_graph(output_runnable, runnable_dependents) - self.runnable_dependents: Mapping[Runnable, AbstractSet[Runnable]] = runnable_dependents - - # completed results - self._completed_runnables: Set[Runnable] = set() - self._completed_results: MutableMapping[Runnable, Any] = dict() - - def _run_one_tick(self) -> None: - """Run one tick of the execution graph. Raises StopIteration if it's done.""" - try: - runnable = next(self._runnable_sequence) - except StopIteration as ex: - raise ExecutionComplete from ex - - result = runnable.run(self._completed_results) - - # record what's been done. - self._completed_runnables.add(runnable) - self._completed_results[runnable] = result - - # examine all the dependencies, and discard the results if no one else needs it. - for dependency in runnable.runnable_dependencies: - if dependency in self._output_runnables: - # it's required by the outputs, so preserve this. - continue - - for dependent in self.runnable_dependents[dependency]: - if dependent not in self._completed_runnables: - # someone still needs this runnable's result. - break - else: - # every dependent is complete. drop the result. - del self._completed_results[dependency] - - def _save(self) -> None: - for runnable, output_path in zip(self._output_runnables, self._output_paths): - # get the result - result = self._completed_results[runnable] - - filetype = FileTypes.resolve_by_instance(result) - filetype.save(result, output_path) - - def run_and_save(self) -> None: - """Run execution graph to completion and save the results to disk.""" - while True: - try: - self._run_one_tick() - except ExecutionComplete: - break - - self._save() - - @staticmethod - def _build_graph( - runnable: Runnable, - runnable_dependents: MutableMapping[Runnable, Set[Runnable]], - seen_runnables: Optional[Set[Runnable]]=None, - ) -> None: - if seen_runnables is None: - seen_runnables = set() - - if runnable in seen_runnables: - return - seen_runnables.add(runnable) - - for dependency in runnable.runnable_dependencies: - # mark ourselves a dependent of each of our dependencies. - if dependency not in runnable_dependents: - runnable_dependents[dependency] = set() - runnable_dependents[dependency].add(runnable) - _Execution._build_graph(dependency, runnable_dependents, seen_runnables) - - -class OrderedSequence: - def __init__(self) -> None: - self._sequence: MutableSequence[Runnable] = list() - - def __call__(self, *args, **kwargs): - result = Runnable(*args, **kwargs) - self._sequence.append(result) - return result - - @property - def sequence(self) -> Sequence[Runnable]: - return self._sequence - - -class Recipe: - """Recipes are snippets of python code that describe an image processing pipeline. Executing a - recipe requires pairing the recipe code with locations of input files and the locations to write - outputs to. - - A few additional variables are injected into the recipe's scope: - - ==================== ========================================================================== - Variable name Description - -------------------- -------------------------------------------------------------------------- - file_inputs A 0-based array with the locations of input files provided for a recipe's - execution. - file_outputs Any objects written to this 0-based array will be written out to disk at - the end of a recipe's execution. - np A reference to the numpy library. - Axes A reference to the :py:class:`~starfish.types.Axes` enum. - Coordinates A reference to the :py:class:`~starfish.types.Coordinates` enum. - ApplyTransform A reference to :py:class:`~starfish.image.ApplyTransform`. - Filter A reference to :py:class:`~starfish.image.Filter`. - LearnTransform A reference to :py:class:`~starfish.image.LearnTransform`. - Segment A reference to :py:class:`~starfish.image.Segment`. - Decode A reference to :py:class:`~starfish.spots.Decode`. - DetectPixels A reference to :py:class:`~starfish.spots.DetectPixels`. - DetectSpots A reference to :py:class:`~starfish.spots.DetectSpots`. - AssignTargets A reference to :py:class:`~starfish.spots.AssignTargets`. - ==================== ========================================================================== - - Recipes also have access to a ``compute`` method: - :code:`compute(algorithm_class, parameters_to_run_method..., - constructor_argument_name=constructor_argument_value....)` - - The first parameter is the algorithm class that implements a pipeline algorithm. It should - subclass :py:class:`~starfish.pipeline.AlgorithmBase`. - - The subsequent positional parameters are the parameters passed to the ``run()`` method of the - algorithm class. - - The keyword parameters (parameters where a name and a value are provided) are the parameters - passed to the algorithm class's constructor. - - Parameters can include input files that are passed in as a member of the ``file_inputs`` array. - It can also include the output of previous ``compute()`` method calls. - - The return value of ``compute()`` method calls is a future, i.e., a promise for the data. It - should not be assumed that the resulting data will be available during the recipe's scope. - """ - def __init__( - self, - recipe_str: str, - input_paths_or_urls: Sequence[str], - output_paths: Sequence[str], - ): - ordered_sequence = OrderedSequence() - file_outputs: MutableMapping[int, Runnable] = {} - recipe_scope = { - "file_inputs": [ - FileProvider(input_path_or_url) - for input_path_or_url in input_paths_or_urls - ], - "compute": ordered_sequence, - "file_outputs": file_outputs, - } - - # inject numpy. - recipe_scope['np'] = np - - component: Any - - # inject some useful starfish classes. - for component in (Axes, Coordinates): - recipe_scope[component.__name__] = component - - # inject all the pipeline components. - for component in ( - image.ApplyTransform, image.Filter, image.LearnTransform, image.Segment): - recipe_scope[component.__name__] = component - for component in ( - spots.Decode, spots.DetectPixels, spots.DetectSpots, spots.AssignTargets): - recipe_scope[component.__name__] = component - - ast = compile(recipe_str, "", "exec") - exec(ast, recipe_scope) - - assert len(file_outputs) == len(output_paths), \ - "Recipe generates more outputs than output paths provided!" - - # verify that the outputs are sequential. - ordered_outputs: MutableSequence[Runnable] = list() - for ix in range(len(file_outputs)): - assert ix in file_outputs, \ - f"file_outputs[{ix}] is not set" - assert isinstance(file_outputs[ix], Runnable), \ - f"file_outputs[{ix}] is not the result of a compute(..)" - ordered_outputs.append(file_outputs[ix]) - - self._runnable_order = ordered_sequence.sequence - self._outputs: Sequence[Runnable] = ordered_outputs - self._output_paths = output_paths - - def _execution(self) -> _Execution: - return _Execution(self._runnable_order, self._outputs, self._output_paths) - - def run_and_save(self): - """Run recipe to completion and save the results to disk.""" - execution = self._execution() - execution.run_and_save() diff --git a/starfish/core/recipe/runnable.py b/starfish/core/recipe/runnable.py deleted file mode 100644 index e0417a2c9..000000000 --- a/starfish/core/recipe/runnable.py +++ /dev/null @@ -1,247 +0,0 @@ -import inspect -import warnings -from typing import ( - Any, - Callable, - cast, - Mapping, - MutableMapping, - MutableSequence, - Sequence, - Set, - Type, -) - -from starfish.core.pipeline.algorithmbase import AlgorithmBase -from .errors import ( - ConstructorError, - ConstructorExtraParameterWarning, - ExecutionError, - RunInsufficientParametersError, - TypeInferenceError, -) -from .filesystem import FileProvider, TypedFileProvider - - -class Runnable: - """Runnable represents a single invocation of a pipeline component, with a specific algorithm - implementation. For arguments to the algorithm's constructor and run method, it can accept - :py:class:`~starfish.recipe.filesystem.FileProvider` objects, which represent a file path or - url. For arguments to the algorithm's run method, it can accept the results of other Runnables. - - One can compose any starfish pipeline run using a directed acyclic graph of Runnables objects. - """ - def __init__( - self, - algorithm_cls: Type, - *inputs, - **algorithm_options - ) -> None: - self._pipeline_component_cls: Type = \ - cast(AlgorithmBase, algorithm_cls).get_pipeline_component_class() - self._algorithm_cls: Type = algorithm_cls - self._raw_inputs = inputs - self._raw_algorithm_options = algorithm_options - - # retrieve the actual __init__ method - signature = Runnable._get_actual_method_signature( - self._algorithm_cls.__init__) - formatted_algorithm_options = self._format_algorithm_constructor_arguments( - signature, self._raw_algorithm_options, self.__str__) - - try: - self._algorithm_instance: AlgorithmBase = self._algorithm_cls( - **formatted_algorithm_options) - except Exception as ex: - raise ConstructorError(f"Error instantiating the algorithm for {str(self)}") from ex - - # retrieve the actual run method - signature = Runnable._get_actual_method_signature( - self._algorithm_instance.run) # type: ignore - self._inputs = self._format_run_arguments(signature, self._raw_inputs, self.__str__) - - @staticmethod - def _get_actual_method_signature(run_method: Callable) -> inspect.Signature: - if hasattr(run_method, "__closure__"): - # it's a closure, probably because of AlgorithmBaseType.run_with_logging. Unwrap to - # find the underlying method. - closure = run_method.__closure__ # type: ignore - if closure is not None: - run_method = closure[0].cell_contents - - return inspect.signature(run_method) - - @staticmethod - def _format_algorithm_constructor_arguments( - constructor_signature: inspect.Signature, - algorithm_options: Mapping[str, Any], - str_callable: Callable[[], str], - ) -> Mapping[str, Any]: - """Given the constructor's signature and a mapping of keyword argument names to values, - format them such that the constructor can be invoked. - - Some parameters may be :py:class:`starfish.recipe.filesystem.FileProvider` instances. Use - the type hints in the constructor's signature to identify the expected file type, and load - them into memory accordingly. - - Parameters - ---------- - constructor_signature : inspect.Signature - The signature for the constructor. - algorithm_options : Mapping[str, Any] - The parameters for the constructor, as provided to the Runnable. - str_callable : Callable[[], str] - A callable that can be invoked to provide a user-friendly representation of the - Runnable, in case any errors or warnings are generated. - - Returns - ------- - Mapping[str, Any] : The parameters for the constructor, ready to be passed into the - constructor. - """ - parameters = constructor_signature.parameters - - formatted_algorithm_options: MutableMapping[str, Any] = {} - for algorithm_option_name, algorithm_option_value in algorithm_options.items(): - if isinstance(algorithm_option_value, Runnable): - raise RuntimeError("Runnable's constructors cannot depend on another runnable") - - try: - option_class = parameters[algorithm_option_name].annotation - except KeyError: - warnings.warn( - f"Constructor for {str_callable()} does not have an explicitly typed " - + f"parameter {algorithm_option_name}.", - category=ConstructorExtraParameterWarning, - ) - continue - - if isinstance(algorithm_option_value, FileProvider): - try: - provider = TypedFileProvider(algorithm_option_value, option_class) - except TypeError as ex: - raise TypeInferenceError( - f"Error inferring the types for the parameters to the algorithm's" - + f" constructor for {str_callable()}") from ex - formatted_algorithm_options[algorithm_option_name] = provider.load() - else: - formatted_algorithm_options[algorithm_option_name] = algorithm_option_value - - return formatted_algorithm_options - - @staticmethod - def _format_run_arguments( - run_signature: inspect.Signature, - inputs: Sequence, - str_callable: Callable[[], str], - ) -> Sequence: - """Given the run method's signature and a sequence of parameters, format them such that the - run method can be invoked. - - Some parameters may be :py:class:`starfish.recipe.filesystem.FileProvider` instances. Use - the type hints in the run method's signature to identify the expected file type, and load - them into memory accordingly. - - Parameters that are the outputs of other Runnables are not resolved to their values until - the run method is invoked. Therefore, the sequence of parameters returned may include - the dependent Runnable objects. - - Parameters - ---------- - run_signature : inspect.Signature - The signature for the run method. - inputs : Sequence - The parameters for the run method, as provided to the Runnable. - str_callable : Callable[[], str] - A callable that can be invoked to provide a user-friendly representation of the - Runnable, in case any errors or warnings are generated. - - Returns - ------- - Sequence : The parameters for the run method, ready to be passed into the constructor, - except for dependent Runnables, which are resolved later. - """ - formatted_inputs: MutableSequence = [] - - keys_iter = iter(run_signature.parameters.keys()) - inputs_iter = iter(inputs) - - # first parameter to the run method should be "self" - assert next(keys_iter) == "self" - - # match up the parameters as best as we can. - for _input, key in zip(inputs_iter, keys_iter): - if isinstance(_input, FileProvider): - annotation = run_signature.parameters[key].annotation - try: - provider = TypedFileProvider(_input, annotation) - except TypeError as ex: - raise TypeInferenceError( - f"Error inferring the types for the parameters to the algorithm's" - + f" run method for {str_callable()}") from ex - formatted_inputs.append(provider) - else: - formatted_inputs.append(_input) - - # are there any parameters left in the signature? if so, they must have default values - # because we don't have values. - no_default = inspect._empty # type: ignore - - for key in keys_iter: - if (run_signature.parameters[key].default == no_default - and run_signature.parameters[key].kind not in ( - inspect.Parameter.VAR_POSITIONAL, - inspect.Parameter.VAR_KEYWORD, - )): - raise RunInsufficientParametersError(f"No value for parameter {key}") - - return formatted_inputs - - @property - def runnable_dependencies(self) -> Set["Runnable"]: - """Retrieves a set of Runnables that this Runnable depends on.""" - return set(runnable for runnable in self._inputs if isinstance(runnable, Runnable)) - - def run(self, previous_results: Mapping["Runnable", Any]) -> Any: - """Invoke the run method. Results for dependent Runnables are retrieved from the - `previous_results` mapping. - - Parameters - ---------- - previous_results : Mapping[Runnable, Any] - The results calculated thus far in an execution run. - - Returns - ------- - The result from invoking the run method. - """ - inputs = list() - for _input in self._inputs: - if isinstance(_input, Runnable): - inputs.append(previous_results[_input]) - elif isinstance(_input, TypedFileProvider): - inputs.append(_input.load()) - else: - inputs.append(_input) - try: - return self._algorithm_instance.run(*inputs) # type: ignore - except Exception as ex: - raise ExecutionError(f"Error running the algorithm for {str(self)}") from ex - - def __str__(self): - inputs_arr = [""] - inputs_arr.extend([str(raw_input) for raw_input in self._raw_inputs]) - algorithm_options_arr = [""] - algorithm_options_arr.extend([ - f"{algorithm_option_name}={str(algorithm_option_value)}" - for algorithm_option_name, algorithm_option_value in - self._raw_algorithm_options.items() - ]) - - inputs_str = ", ".join(inputs_arr) - algorithm_options_str = ", ".join(algorithm_options_arr) - - return (f"compute(" - + f"{self._pipeline_component_cls.__name__}.{self._algorithm_cls.__name__}" - + f"{inputs_str}" - + f"{algorithm_options_str})") diff --git a/starfish/core/recipe/test/fakefilter.py b/starfish/core/recipe/test/fakefilter.py deleted file mode 100644 index 69eb6f017..000000000 --- a/starfish/core/recipe/test/fakefilter.py +++ /dev/null @@ -1,81 +0,0 @@ -from starfish import ImageStack -from starfish.core.image._filter._base import FilterAlgorithmBase -from starfish.core.util import click - - -class SimpleFilterAlgorithm(FilterAlgorithmBase): - def __init__(self, multiplicand: float): - self.multiplicand = multiplicand - - def run(self, stack: ImageStack, *args) -> ImageStack: - numpy_array = stack.xarray - numpy_array = numpy_array * self.multiplicand - return ImageStack.from_numpy(numpy_array) - - @staticmethod - @click.command("SimpleFilterAlgorithm") - @click.option( - "--multiplicand", default=1.0, type=float) - @click.pass_context - def _cli(ctx, multiplicand): - ctx.obj["component"]._cli_run(ctx, SimpleFilterAlgorithm(multiplicand=multiplicand)) - - -class AdditiveFilterAlgorithm(FilterAlgorithmBase): - def __init__(self, additive: ImageStack): - self.additive = additive - - def run(self, stack: ImageStack, *args) -> ImageStack: - numpy_array = stack.xarray - numpy_array = numpy_array + stack.xarray - return ImageStack.from_numpy(numpy_array) - - @staticmethod - @click.command("AdditiveFilterAlgorithm") - @click.option( - "--imagestack", type=click.Path(exists=True)) - @click.pass_context - def _cli(ctx, imagestack): - ctx.obj["component"]._cli_run( - ctx, - AdditiveFilterAlgorithm(additive=ImageStack.from_path_or_url(imagestack))) - - -class FilterAlgorithmWithMissingConstructorTyping(FilterAlgorithmBase): - def __init__(self, additive): - self.additive = additive - - def run(self, stack: ImageStack, *args) -> ImageStack: - numpy_array = stack.xarray - numpy_array = numpy_array + stack.xarray - return ImageStack.from_numpy(numpy_array) - - @staticmethod - @click.command("FilterAlgorithmWithMissingConstructorTyping") - @click.option( - "--imagestack", type=click.Path(exists=True)) - @click.pass_context - def _cli(ctx, imagestack): - ctx.obj["component"]._cli_run( - ctx, - FilterAlgorithmWithMissingConstructorTyping( - additive=ImageStack.from_path_or_url(imagestack))) - - -class FilterAlgorithmWithMissingRunTyping(FilterAlgorithmBase): - def __init__(self, multiplicand: float): - self.multiplicand = multiplicand - - def run(self, stack, *args) -> ImageStack: - numpy_array = stack.xarray - numpy_array = numpy_array * self.multiplicand - return ImageStack.from_numpy(numpy_array) - - @staticmethod - @click.command("FilterAlgorithmWithMissingRunTyping") - @click.option( - "--multiplicand", default=1.0, type=float) - @click.pass_context - def _cli(ctx, multiplicand): - ctx.obj["component"]._cli_run( - ctx, FilterAlgorithmWithMissingRunTyping(multiplicand=multiplicand)) diff --git a/starfish/core/recipe/test/test_recipe.py b/starfish/core/recipe/test/test_recipe.py deleted file mode 100644 index 0cb8c6616..000000000 --- a/starfish/core/recipe/test/test_recipe.py +++ /dev/null @@ -1,147 +0,0 @@ -import os -import tempfile -import warnings - -import numpy as np -import pytest - -from starfish import ImageStack -from . import fakefilter # noqa: F401 -from ..errors import ConstructorError, ConstructorExtraParameterWarning -from ..recipe import Recipe - - -BASE_EXPECTED = np.array([ - [0.227543, 0.223117, 0.217014, 0.221241, 0.212863, 0.211963, 0.210575, - 0.198611, 0.194827, 0.181964], - [0.216617, 0.214710, 0.212467, 0.218158, 0.211429, 0.210361, 0.205737, - 0.190814, 0.182010, 0.165667], - [0.206744, 0.204685, 0.208774, 0.212909, 0.215274, 0.206180, 0.196674, - 0.179080, 0.169207, 0.157549], - [0.190845, 0.197131, 0.188540, 0.195361, 0.196765, 0.200153, 0.183627, - 0.167590, 0.159930, 0.150805], - [0.181231, 0.187457, 0.182910, 0.179416, 0.175357, 0.172137, 0.165072, - 0.156344, 0.153735, 0.150378], - [0.169924, 0.184604, 0.182422, 0.174441, 0.159823, 0.157229, 0.157259, - 0.151690, 0.147265, 0.139940], - [0.164874, 0.169467, 0.178012, 0.173129, 0.161425, 0.155978, 0.152712, - 0.150286, 0.145159, 0.140658], - [0.164508, 0.165042, 0.171420, 0.174990, 0.162951, 0.152422, 0.149325, - 0.151675, 0.141588, 0.139010], - [0.162448, 0.156451, 0.158419, 0.162722, 0.160388, 0.152865, 0.142885, - 0.142123, 0.140093, 0.135836], - [0.150072, 0.147295, 0.145495, 0.153216, 0.156085, 0.149981, 0.145571, - 0.141878, 0.138857, 0.136965]], - dtype=np.float32) -URL = "https://d2nhj9g34unfro.cloudfront.net/20181005/ISS-TEST/fov_001/hybridization.json" - - -def test_simple_recipe(): - """Test that a simple recipe can execute correctly.""" - recipe_str = """ -file_outputs[0] = compute(Filter.SimpleFilterAlgorithm, file_inputs[0], multiplicand=0.5) - """ - - with tempfile.TemporaryDirectory() as tempdir: - output_path = os.path.join(tempdir, "output.json") - recipe = Recipe(recipe_str, [URL], [output_path]) - - execution = recipe._execution() - execution.run_and_save() - - result_stack = ImageStack.from_path_or_url(output_path) - assert np.allclose( - BASE_EXPECTED * .5, - result_stack.xarray[2, 2, 0, 40:50, 40:50] - ) - - -def test_chained_recipe(): - """Test that a recipe with a complex graph can execute correctly.""" - recipe_str = """ -a = compute(Filter.SimpleFilterAlgorithm, file_inputs[0], multiplicand=0.5) -b = compute(Filter.SimpleFilterAlgorithm, a, multiplicand=.3) -file_outputs[0] = compute(Filter.SimpleFilterAlgorithm, b, multiplicand=0.2) -c = compute(Filter.SimpleFilterAlgorithm, a, multiplicand=.2) -file_outputs[1] = compute(Filter.SimpleFilterAlgorithm, c, multiplicand=.3) - """ - - with tempfile.TemporaryDirectory() as tempdir: - output_0_path = os.path.join(tempdir, "output_0.json") - output_1_path = os.path.join(tempdir, "output_1.json") - recipe = Recipe(recipe_str, [URL], [output_0_path, output_1_path]) - - execution = recipe._execution() - execution.run_and_save() - - for path in (output_0_path, output_1_path): - result_stack = ImageStack.from_path_or_url(path) - assert np.allclose( - BASE_EXPECTED * .03, - result_stack.xarray[2, 2, 0, 40:50, 40:50] - ) - - -def test_garbage_collection(): - """Test that recipe execution discards intermediate results that are no longer necessary.""" - recipe_str = """ -a = compute(Filter.SimpleFilterAlgorithm, file_inputs[0], multiplicand=0.5) -b = compute(Filter.SimpleFilterAlgorithm, a, multiplicand=0.3) -c = compute(Filter.SimpleFilterAlgorithm, b, multiplicand=4.) -d = compute(Filter.SimpleFilterAlgorithm, c, multiplicand=0.5) -file_outputs[0] = compute(Filter.SimpleFilterAlgorithm, d, multiplicand=0.5) - """ - - with tempfile.TemporaryDirectory() as tempdir: - output_path = os.path.join(tempdir, "output.json") - recipe = Recipe(recipe_str, [URL], [output_path]) - - execution = recipe._execution() - execution._run_one_tick() - execution._run_one_tick() - - assert len(execution._completed_results) == 1 - - execution.run_and_save() - - result_stack = ImageStack.from_path_or_url(output_path) - assert np.allclose( - BASE_EXPECTED * .15, - result_stack.xarray[2, 2, 0, 40:50, 40:50] - ) - - -def test_recipe_constructor_missing_args(): - """Test that recipe construction detects missing arguments to the constructor.""" - recipe_str = """ -file_output[0] = compute(Filter.SimpleFilterAlgorithm, file_inputs[0]) -""" - - with tempfile.TemporaryDirectory() as tempdir: - output_path = os.path.join(tempdir, "output.json") - with pytest.raises(ConstructorError): - Recipe(recipe_str, [URL], [output_path]) - - -def test_recipe_constructor_extra_args(): - """Test that recipe construction detects missing arguments to the constructor.""" - recipe_str = """ -file_outputs[0] = compute(Filter.SimpleFilterAlgorithm, file_inputs[0], multiplicand=.5, x=1) -""" - - with tempfile.TemporaryDirectory() as tempdir: - output_path = os.path.join(tempdir, "output.json") - with warnings.catch_warnings(record=True) as w: - recipe = Recipe(recipe_str, [URL], [output_path]) - - assert len(w) == 1 - assert issubclass(w[-1].category, ConstructorExtraParameterWarning) - - execution = recipe._execution() - execution.run_and_save() - - result_stack = ImageStack.from_path_or_url(output_path) - assert np.allclose( - BASE_EXPECTED * .5, - result_stack.xarray[2, 2, 0, 40:50, 40:50] - ) diff --git a/starfish/core/recipe/test/test_runnable.py b/starfish/core/recipe/test/test_runnable.py deleted file mode 100644 index 0a4a12c83..000000000 --- a/starfish/core/recipe/test/test_runnable.py +++ /dev/null @@ -1,250 +0,0 @@ -import warnings - -import numpy as np -import pytest - -from starfish import ImageStack -from .fakefilter import ( - AdditiveFilterAlgorithm, - FilterAlgorithmWithMissingConstructorTyping, - FilterAlgorithmWithMissingRunTyping, - SimpleFilterAlgorithm, -) -from ..errors import ( - ConstructorError, - ConstructorExtraParameterWarning, - ExecutionError, - RunInsufficientParametersError, - TypeInferenceError, -) -from ..filesystem import FileProvider -from ..runnable import Runnable - - -BASE_EXPECTED = np.array([ - [0.227543, 0.223117, 0.217014, 0.221241, 0.212863, 0.211963, 0.210575, - 0.198611, 0.194827, 0.181964], - [0.216617, 0.214710, 0.212467, 0.218158, 0.211429, 0.210361, 0.205737, - 0.190814, 0.182010, 0.165667], - [0.206744, 0.204685, 0.208774, 0.212909, 0.215274, 0.206180, 0.196674, - 0.179080, 0.169207, 0.157549], - [0.190845, 0.197131, 0.188540, 0.195361, 0.196765, 0.200153, 0.183627, - 0.167590, 0.159930, 0.150805], - [0.181231, 0.187457, 0.182910, 0.179416, 0.175357, 0.172137, 0.165072, - 0.156344, 0.153735, 0.150378], - [0.169924, 0.184604, 0.182422, 0.174441, 0.159823, 0.157229, 0.157259, - 0.151690, 0.147265, 0.139940], - [0.164874, 0.169467, 0.178012, 0.173129, 0.161425, 0.155978, 0.152712, - 0.150286, 0.145159, 0.140658], - [0.164508, 0.165042, 0.171420, 0.174990, 0.162951, 0.152422, 0.149325, - 0.151675, 0.141588, 0.139010], - [0.162448, 0.156451, 0.158419, 0.162722, 0.160388, 0.152865, 0.142885, - 0.142123, 0.140093, 0.135836], - [0.150072, 0.147295, 0.145495, 0.153216, 0.156085, 0.149981, 0.145571, - 0.141878, 0.138857, 0.136965]], - dtype=np.float32) -URL = "https://d2nhj9g34unfro.cloudfront.net/20181005/ISS-TEST/fov_001/hybridization.json" - - -def test_str(): - """Verify that we can get a sane string for a runnable.""" - filter_runnable = Runnable( - SimpleFilterAlgorithm, - FileProvider(URL), - multiplicand=.5, - ) - assert str(filter_runnable) == ("compute(Filter.SimpleFilterAlgorithm," - + f" FileProvider(\"{URL}\"), multiplicand=0.5)") - - -def test_constructor_error(): - """Verify that we get a properly typed error when the constructor does not execute correctly. - In this case, we do not provide enough parameters to `SimpleFilterAlgorithm`.""" - with pytest.raises(ConstructorError): - Runnable( - SimpleFilterAlgorithm, - FileProvider(URL), - ) - - -def test_execution_error(): - """Verify that we get a properly typed error when the constructor does not execute correctly.""" - filter_runnable = Runnable( - SimpleFilterAlgorithm, - FileProvider(URL), - multiplicand="abacadabra", - ) - with pytest.raises(ExecutionError): - filter_runnable.run({}) - - -def test_constructor_type_inference_error(): - """Verify that we get a properly typed error when we cannot properly infer the type for one of - the constructor's parameters.""" - with pytest.raises(TypeInferenceError): - Runnable( - FilterAlgorithmWithMissingConstructorTyping, - FileProvider(URL), - additive=FileProvider(URL), - ) - - -def test_run_type_inference_error(): - """Verify that we get a properly typed error when we cannot properly infer the type for one of - the run method's parameters. In this case, `FilterAlgorithmWithMissingRunTyping` does not - provide a type hint for one of its constructor arguments.""" - with pytest.raises(TypeInferenceError): - Runnable( - FilterAlgorithmWithMissingRunTyping, - FileProvider(URL), - multiplicand=FileProvider(URL), - ) - - -def test_extra_constructor_parameter_fileprovider(): - """Verify that we raise a warning when we provide extra parameters that are fileproviders to an - algorithm's constructor.""" - with warnings.catch_warnings(record=True) as w: - filter_runnable = Runnable( - SimpleFilterAlgorithm, - FileProvider(URL), - multiplicand=.5, - additive=FileProvider(URL), - ) - assert len(w) == 1 - assert issubclass(w[-1].category, ConstructorExtraParameterWarning) - - result = filter_runnable.run({}) - assert isinstance(result, ImageStack) - - # pick a random part of the filtered image and assert on it - assert result.xarray.dtype == np.float32 - - assert np.allclose( - BASE_EXPECTED * .5, - result.xarray[2, 2, 0, 40:50, 40:50] - ) - - -def test_extra_constructor_parameter_non_fileprovider(): - """Verify that we raise a warning when we provide extra parameters that are not fileproviders - to an algorithm's constructor.""" - with warnings.catch_warnings(record=True) as w: - filter_runnable = Runnable( - SimpleFilterAlgorithm, - FileProvider(URL), - multiplicand=.5, - additive=.5, - ) - assert len(w) == 1 - assert issubclass(w[-1].category, ConstructorExtraParameterWarning) - - result = filter_runnable.run({}) - assert isinstance(result, ImageStack) - - # pick a random part of the filtered image and assert on it - assert result.xarray.dtype == np.float32 - - assert np.allclose( - BASE_EXPECTED * .5, - result.xarray[2, 2, 0, 40:50, 40:50] - ) - - -def test_run_insufficient_parameters(): - """Verify that we can run a single runnable and get its result. - """ - with pytest.raises(RunInsufficientParametersError): - Runnable( - SimpleFilterAlgorithm, - multiplicand=.5, - ) - - -def test_run(): - """Verify that we can run a single runnable and get its result. - """ - filter_runnable = Runnable( - SimpleFilterAlgorithm, - FileProvider(URL), - multiplicand=.5, - ) - result = filter_runnable.run({}) - assert isinstance(result, ImageStack) - - # pick a random part of the filtered image and assert on it - assert result.xarray.dtype == np.float32 - - assert np.allclose( - BASE_EXPECTED * .5, - result.xarray[2, 2, 0, 40:50, 40:50] - ) - - -def test_chained_run(): - """Verify that we can run a runnable that depends on another runnable. - """ - dependency_runnable = Runnable( - SimpleFilterAlgorithm, - FileProvider(URL), - multiplicand=.5, - ) - result = dependency_runnable.run({}) - assert isinstance(result, ImageStack) - - filter_runnable = Runnable( - SimpleFilterAlgorithm, - dependency_runnable, - multiplicand=2.0, - ) - result = filter_runnable.run({dependency_runnable: result}) - assert isinstance(result, ImageStack) - - # pick a random part of the filtered image and assert on it - assert result.xarray.dtype == np.float32 - - assert np.allclose( - BASE_EXPECTED, - result.xarray[2, 2, 0, 40:50, 40:50] - ) - - -def test_chained_run_result_not_present(): - """Verify that we can run a runnable that depends on another runnable, but the results are not - present. - """ - dependency_runnable = Runnable( - SimpleFilterAlgorithm, - FileProvider(URL), - multiplicand=.5, - ) - result = dependency_runnable.run({}) - assert isinstance(result, ImageStack) - - filter_runnable = Runnable( - SimpleFilterAlgorithm, - dependency_runnable, - multiplicand=2.0, - ) - with pytest.raises(KeyError): - filter_runnable.run({}) - - -def test_load_data_for_constructor(): - """Verify that we can properly load up data from a FileProvider that is required for the - constructor.""" - filter_runnable = Runnable( - AdditiveFilterAlgorithm, - FileProvider(URL), - additive=FileProvider(URL), - ) - result = filter_runnable.run({}) - assert isinstance(result, ImageStack) - - # pick a random part of the filtered image and assert on it - assert result.xarray.dtype == np.float32 - - assert np.allclose( - BASE_EXPECTED * 2, - result.xarray[2, 2, 0, 40:50, 40:50] - ) diff --git a/starfish/core/spots/AssignTargets/__init__.py b/starfish/core/spots/AssignTargets/__init__.py new file mode 100644 index 000000000..87cefc867 --- /dev/null +++ b/starfish/core/spots/AssignTargets/__init__.py @@ -0,0 +1 @@ +from .label import Label diff --git a/starfish/core/spots/AssignTargets/_base.py b/starfish/core/spots/AssignTargets/_base.py new file mode 100644 index 000000000..09f3b030b --- /dev/null +++ b/starfish/core/spots/AssignTargets/_base.py @@ -0,0 +1,24 @@ +from abc import abstractmethod + +import numpy as np + +from starfish.core.intensity_table.intensity_table import IntensityTable +from starfish.core.pipeline.algorithmbase import AlgorithmBase + + +class AssignTargetsAlgorithm(metaclass=AlgorithmBase): + """ + AssignTargets assigns cell IDs to detected spots using an IntensityTable and + SegmentationMaskCollection. + """ + + @abstractmethod + def run( + self, + label_image: np.ndarray, + intensity_table: IntensityTable, + verbose: bool=False, + in_place: bool=False, + ) -> IntensityTable: + """Performs target (e.g. gene) assignment given the spots and the regions.""" + raise NotImplementedError() diff --git a/starfish/core/spots/_assign_targets/label.py b/starfish/core/spots/AssignTargets/label.py similarity index 93% rename from starfish/core/spots/_assign_targets/label.py rename to starfish/core/spots/AssignTargets/label.py index 6164e547b..0c143aacf 100644 --- a/starfish/core/spots/_assign_targets/label.py +++ b/starfish/core/spots/AssignTargets/label.py @@ -3,7 +3,6 @@ from starfish.core.intensity_table.intensity_table import IntensityTable from starfish.core.segmentation_mask import SegmentationMaskCollection from starfish.core.types import Features -from starfish.core.util import click from ._base import AssignTargetsAlgorithm @@ -77,9 +76,3 @@ def run( """ return self._assign(masks, intensity_table, in_place=in_place) - - @staticmethod - @click.command("Label") - @click.pass_context - def _cli(ctx): - ctx.obj["component"]._cli_run(ctx, Label()) diff --git a/starfish/core/spots/Decode/__init__.py b/starfish/core/spots/Decode/__init__.py new file mode 100644 index 000000000..eff34d571 --- /dev/null +++ b/starfish/core/spots/Decode/__init__.py @@ -0,0 +1,2 @@ +from .metric_decoder import MetricDistance +from .per_round_max_channel_decoder import PerRoundMaxChannel diff --git a/starfish/core/spots/Decode/_base.py b/starfish/core/spots/Decode/_base.py new file mode 100644 index 000000000..c429e3d21 --- /dev/null +++ b/starfish/core/spots/Decode/_base.py @@ -0,0 +1,12 @@ +from abc import abstractmethod + +from starfish.core.intensity_table.intensity_table import IntensityTable +from starfish.core.pipeline.algorithmbase import AlgorithmBase + + +class DecodeAlgorithmBase(metaclass=AlgorithmBase): + + @abstractmethod + def run(self, intensities: IntensityTable, *args): + """Performs decoding on the spots found, using the codebook specified.""" + raise NotImplementedError() diff --git a/starfish/core/spots/_decode/metric_decoder.py b/starfish/core/spots/Decode/metric_decoder.py similarity index 69% rename from starfish/core/spots/_decode/metric_decoder.py rename to starfish/core/spots/Decode/metric_decoder.py index d68e61f1e..41cc2830d 100644 --- a/starfish/core/spots/_decode/metric_decoder.py +++ b/starfish/core/spots/Decode/metric_decoder.py @@ -1,7 +1,6 @@ from starfish.core.codebook.codebook import Codebook from starfish.core.intensity_table.intensity_table import IntensityTable from starfish.core.types import Number -from starfish.core.util import click from ._base import DecodeAlgorithmBase @@ -70,30 +69,3 @@ def run( norm_order=self.norm_order, metric=self.metric, ) - - @staticmethod - @click.command("MetricDistance") - @click.option( - "--max-distance", type=float, - help="for a detected spot to be assigned a target, it must be within this distance the " - "nearest target" - ) - @click.option("--min-intensity", type=float, help="minimum intensity for a spot to be decoded") - @click.option( - "--norm-order", type=int, default=2, - help="norm to equalize the magnitudes of codes and targets" - ) - @click.option( - "--metric", type=str, default="euclidean", help="metric used to calculate distance" - ) - @click.pass_context - def _cli(ctx, max_distance, min_intensity, norm_order, metric): - codebook = ctx.obj["codebook"] - instance = MetricDistance( - codebook=codebook, - max_distance=max_distance, - min_intensity=min_intensity, - norm_order=norm_order, - metric=metric - ) - ctx.obj["component"]._cli_run(ctx, instance) diff --git a/starfish/core/spots/_decode/per_round_max_channel_decoder.py b/starfish/core/spots/Decode/per_round_max_channel_decoder.py similarity index 83% rename from starfish/core/spots/_decode/per_round_max_channel_decoder.py rename to starfish/core/spots/Decode/per_round_max_channel_decoder.py index 80b16d632..c38901ed3 100644 --- a/starfish/core/spots/_decode/per_round_max_channel_decoder.py +++ b/starfish/core/spots/Decode/per_round_max_channel_decoder.py @@ -1,6 +1,5 @@ from starfish.core.codebook.codebook import Codebook from starfish.core.intensity_table.intensity_table import IntensityTable -from starfish.core.util import click from ._base import DecodeAlgorithmBase @@ -38,10 +37,3 @@ def run(self, intensities: IntensityTable, *args) -> IntensityTable: """ return self.codebook.decode_per_round_max(intensities) - - @staticmethod - @click.command("PerRoundMaxChannel") - @click.pass_context - def _cli(ctx): - codebook = ctx.obj["codebook"] - ctx.obj["component"]._cli_run(ctx, PerRoundMaxChannel(codebook)) diff --git a/starfish/core/recipe/test/__init__.py b/starfish/core/spots/Decode/test/__init__.py similarity index 100% rename from starfish/core/recipe/test/__init__.py rename to starfish/core/spots/Decode/test/__init__.py diff --git a/starfish/core/spots/_decode/test/test_decoding_without_spots.py b/starfish/core/spots/Decode/test/test_decoding_without_spots.py similarity index 100% rename from starfish/core/spots/_decode/test/test_decoding_without_spots.py rename to starfish/core/spots/Decode/test/test_decoding_without_spots.py diff --git a/starfish/core/spots/DetectPixels/__init__.py b/starfish/core/spots/DetectPixels/__init__.py new file mode 100644 index 000000000..4a0d89733 --- /dev/null +++ b/starfish/core/spots/DetectPixels/__init__.py @@ -0,0 +1 @@ +from .pixel_spot_decoder import PixelSpotDecoder diff --git a/starfish/core/spots/DetectPixels/_base.py b/starfish/core/spots/DetectPixels/_base.py new file mode 100644 index 000000000..1f705f2d0 --- /dev/null +++ b/starfish/core/spots/DetectPixels/_base.py @@ -0,0 +1,32 @@ +from abc import abstractmethod +from typing import Callable, Sequence, Tuple + +import numpy as np + +from starfish.core.imagestack.imagestack import ImageStack +from starfish.core.intensity_table.intensity_table import IntensityTable +from starfish.core.pipeline.algorithmbase import AlgorithmBase +from starfish.core.types import Number +from .combine_adjacent_features import ConnectedComponentDecodingResult + + +class DetectPixelsAlgorithmBase(metaclass=AlgorithmBase): + + @abstractmethod + def run( + self, + primary_image: ImageStack, + *args, + ) -> Tuple[IntensityTable, ConnectedComponentDecodingResult]: + """Finds spots in an ImageStack""" + raise NotImplementedError() + + @staticmethod + def _get_measurement_function(measurement_type: str) -> Callable[[Sequence], Number]: + try: + measurement_function = getattr(np, measurement_type) + except AttributeError: + raise ValueError( + f'measurement_type must be a numpy reduce function such as "max" or "mean". ' + f'{measurement_type} not found.') + return measurement_function diff --git a/starfish/core/spots/DetectPixels/combine_adjacent_features.py b/starfish/core/spots/DetectPixels/combine_adjacent_features.py new file mode 100644 index 000000000..601ffc2a5 --- /dev/null +++ b/starfish/core/spots/DetectPixels/combine_adjacent_features.py @@ -0,0 +1,431 @@ +import warnings +from functools import partial +from typing import Dict, List, NamedTuple, Optional, Tuple + +import numpy as np +import pandas as pd +from skimage.measure import label, regionprops +from skimage.measure._regionprops import _RegionProperties +from tqdm import tqdm + +from starfish.core.config import StarfishConfig +from starfish.core.intensity_table.intensity_table import IntensityTable +from starfish.core.multiprocessing.pool import Pool +from starfish.core.types import Axes, Features, Number, SpotAttributes + + +class ConnectedComponentDecodingResult(NamedTuple): + region_properties: List + label_image: np.ndarray + decoded_image: np.ndarray + + +class TargetsMap: + + def __init__(self, targets: np.ndarray) -> None: + """ + Creates an invertible mapping between string names of Codebook targets and integer IDs + that can be interpreted by skimage.measure to decode an image. + + Parameters + ---------- + targets : np.ndarray + array of string target IDs + + """ + unique_targets = set(targets) - {'nan'} + sorted_targets = sorted(unique_targets) + self._int_to_target = dict(zip(range(1, np.iinfo(np.int).max), sorted_targets)) + self._int_to_target[0] = 'nan' + self._target_to_int = {v: k for (k, v) in self._int_to_target.items()} + + def targets_as_int(self, targets: np.ndarray) -> np.ndarray: + """Transform an array of targets into their integer representation. + + Parameters + ---------- + targets : np.ndarray['U'] + array of string targets to be transformed into integer IDs + + Returns + ------- + np.ndarray[int] : + array of targets represented by their integer IDs + + """ + return np.array([self._target_to_int[v] for v in targets]) + + def targets_as_str(self, targets: np.ndarray) -> np.ndarray: + """Transform an array of integer IDs into their corresponding string target names. + + Parameters + ---------- + targets : np.ndarray[int] + array of int targets to be transformed into string names + + Returns + ------- + np.ndarray['U'] + array of unicode-encoded target names + + """ + return np.array([self._int_to_target[v] for v in targets]) + + def target_as_str(self, integer_target: int) -> np.ndarray: + return self._int_to_target[integer_target] + + +class CombineAdjacentFeatures: + + def __init__( + self, + min_area: Number, + max_area: Number, + connectivity: int=2, + mask_filtered_features: bool=True + ) -> None: + """Combines pixel-wise adjacent features into single larger features using skimage.measure + + Parameters + ---------- + min_area : Number + Combined features with area below this value are marked as failing filters + max_area : Number + Combined features with area above this value are marked as failing filters + connectivity : int + Maximum number of orthogonal hops to consider a pixel/voxel as a neighbor. See + http://scikit-image.org/docs/dev/api/skimage.measure.html#skimage.measure.label for more + detail. Default = 2. + mask_filtered_features : bool + If True, sets all pixels that are failing filters applied prior to this function equal + to zero, the background value for skimage.measure.label + + """ + self._min_area = min_area + self._max_area = max_area + self._connectivity = connectivity + self._mask_filtered = mask_filtered_features + + @staticmethod + def _intensities_to_decoded_image( + intensities: IntensityTable, + target_map: TargetsMap, + mask_filtered_features: bool=True + ) -> np.ndarray: + """ + Construct an image where each pixel corresponds to its decoded target, mapped to a unique + integer ID + + Parameters + ---------- + intensities : IntensityTable + Decoded intensities + target_map : TargetsMap + Mapping between string target names and integer target IDs + mask_filtered_features : bool + If true, all features that fail filters are mapped to zero, which is considered + 'background' and will not decode to a target (default = True). + + Returns + ------- + np.ndarray[int] + Image whose pixels are coded as the targets that the ImageStack decoded to at each + position. + + """ + # reverses the linearization that was used to transform an ImageStack into an IntensityTable + max_x = intensities[Axes.X.value].values.max() + 1 + max_y = intensities[Axes.Y.value].values.max() + 1 + max_z = intensities[Axes.ZPLANE.value].values.max() + 1 + + int_targets = target_map.targets_as_int(intensities[Features.TARGET].values) + if mask_filtered_features: + fails_filters = np.where(~intensities[Features.PASSES_THRESHOLDS])[0] + int_targets[fails_filters] = 0 + + decoded_image: np.ndarray = int_targets.reshape((max_z, max_y, max_x)) + return decoded_image + + @staticmethod + def _calculate_mean_pixel_traces( + label_image: np.ndarray, + intensities: IntensityTable, + ) -> IntensityTable: + """ + For all pixels that contribute to a connected component, calculate the mean value for + each (ch, round), producing an average "trace" of a feature across the imaging experiment + + Parameters + ---------- + label_image : np.ndarray + An image where all pixels of a connected component share the same integer ID + intensities : IntensityTable + decoded intensities + + Returns + ------- + IntensityTable : + an IntensityTable where the number of features equals the number of connected components + and the intensities of each each feature is its mean trace. + + """ + + import xarray as xr + pixel_labels = label_image.reshape(-1) + + # Use a pandas groupby approach-based approach, because it is much faster than xarray + + # If needed, it is possible to be even faster than pandas: + # https://stackoverflow.com/questions/51975512/\ + # faster-alternative-to-perform-pandas-groupby-operation + + # stack intensities + stacked = intensities.stack(traces=(Axes.CH.value, Axes.ROUND.value)) + + # drop into pandas to use their faster groupby + traces: pd.DataFrame = pd.DataFrame( + stacked.values, + index=pixel_labels, + columns=stacked.traces.to_index() + ) + + # + distances: pd.Series = pd.Series( + stacked[Features.DISTANCE].values, index=pixel_labels + ) + + grouped = traces.groupby(level=0) + pd_mean_pixel_traces = grouped.mean() + + grouped = distances.groupby(level=0) + pd_mean_distances = grouped.mean() + + pd_xarray = xr.DataArray( + pd_mean_pixel_traces, + dims=(Features.AXIS, 'traces'), + coords=dict( + traces=('traces', pd_mean_pixel_traces.columns), + distance=(Features.AXIS, pd_mean_distances), + features=(Features.AXIS, pd_mean_pixel_traces.index) + ) + ) + mean_pixel_traces = pd_xarray.unstack('traces') + + # the 0th pixel trace corresponds to background. If present, drop it. + try: + mean_pixel_traces = mean_pixel_traces.drop(0, dim=Features.AXIS) + except KeyError: + pass + + return mean_pixel_traces + + @staticmethod + def _single_spot_attributes( + spot_property: _RegionProperties, + decoded_image: np.ndarray, + target_map: TargetsMap, + min_area: Number, + max_area: Number, + ) -> Tuple[Dict[str, int], int]: + """ + Calculate starfish SpotAttributes from the RegionProperties of a connected component + feature. + + Parameters + ---------- + spot_property: _RegionProperties + Properties of the connected component. Output of skimage.measure.regionprops + decoded_image : np.ndarray + Image whose pixels correspond to the targets that the given position in the ImageStack + decodes to. + target_map : TargetsMap + Unique mapping between string target names and int target IDs. + min_area : + Combined features with area below this value are marked as failing filters + max_area : Number + Combined features with area above this value are marked as failing filters + + Returns + ------- + Dict[str, Number] : + spot attribute dictionary for this connected component, containing the x, y, z position, + target name (str) and feature radius. + int : + 1 if spot passes size filters, zero otherwise. + + """ + # because of the above skimage issue, we need to support both 2d and 3d properties + centroid = spot_property.centroid + if len(centroid) == 3: + spot_attrs = { + 'z': int(centroid[0]), + 'y': int(centroid[1]), + 'x': int(centroid[2]) + } + else: # data is 2d + spot_attrs = { + 'z': 0, + 'y': int(centroid[0]), + 'x': int(centroid[1]) + } + + # define the target index as the most repeated value in the bounding box of the spot. + # it appears there is no simpler way to do this with a regionprops object + bbox = spot_property.bbox + if len(bbox) == 6: + # 3d bbox + target_candidates = np.ndarray.flatten( + decoded_image[bbox[0]:bbox[3], bbox[1]:bbox[4], bbox[2]:bbox[5]]) + else: + # flatten and remove zeros + target_candidates = np.ndarray.flatten( + decoded_image[0, bbox[0]:bbox[2], bbox[1]:bbox[3]]) + # get the most repeated nonzero value + non_zero_target_candidates = target_candidates[target_candidates != 0] + target_index = np.argmax(np.bincount(non_zero_target_candidates)) + spot_attrs[Features.TARGET] = target_map.target_as_str(target_index) + spot_attrs[Features.SPOT_RADIUS] = spot_property.equivalent_diameter / 2 + + # filter intensities for which radius is too small + passes_area_filter = 1 if min_area <= spot_property.area < max_area else 0 + return spot_attrs, passes_area_filter + + def _create_spot_attributes( + self, + region_properties: List[_RegionProperties], + decoded_image: np.ndarray, + target_map: TargetsMap, + n_processes: Optional[int]=None + ) -> Tuple[SpotAttributes, np.ndarray]: + """ + + Parameters + ---------- + region_properties : List[_RegionProperties] + Properties of the each connected component. Output of skimage.measure.regionprops + decoded_image : np.ndarray + Image whose pixels correspond to the targets that the given position in the ImageStack + decodes to. + target_map : TargetsMap + Unique mapping between string target names and int target IDs. + n_processes : Optional[int]=None + number of processes to devote to measuring spot properties. If None, defaults to the + result of os.nproc() + + Returns + ------- + pd.DataFrame : + DataFrame containing x, y, z, radius, and target name for each connected component + feature. + np.ndarray[bool] : + An array with length equal to the number of features. If zero, indicates that a feature + has failed area filters. + """ + with Pool(processes=n_processes) as pool: + mapfunc = pool.map + applyfunc = partial( + self._single_spot_attributes, + decoded_image=decoded_image, + target_map=target_map, + min_area=self._min_area, + max_area=self._max_area + ) + + iterable = tqdm(region_properties, disable=(not StarfishConfig().verbose)) + results = mapfunc(applyfunc, iterable) + if not results: + # no spots found + warnings.warn("No spots found, please adjust threshold parameters") + return SpotAttributes.empty(extra_fields=['target']), np.array(0, dtype=np.bool) + spot_attrs, passes_area_filter = zip(*results) + + # update passes filter + passes_filter = np.array(passes_area_filter, dtype=np.bool) + + spot_attributes = SpotAttributes(pd.DataFrame.from_records(spot_attrs)) + return spot_attributes, passes_filter + + def run( + self, intensities: IntensityTable, + n_processes: Optional[int] = None, + ) -> Tuple[IntensityTable, ConnectedComponentDecodingResult]: + """ + Execute the combine_adjacent_features method on an IntensityTable containing pixel + intensities + + Parameters + ---------- + intensities : IntensityTable + Pixel intensities of an imaging experiment + n_processes : Optional[int] + Number of parallel processes to devote to calculating the filter + + Returns + ------- + IntensityTable : + Table whose features comprise sets of adjacent pixels that decoded to the same target + ConnectedComponentDecodingResult : + NamedTuple containing : + region_properties : + the properties of each connected component, in the same order as the + IntensityTable + label_image : np.ndarray + An image where all pixels of a connected component share the same integer ID + decoded_image : np.ndarray + Image whose pixels correspond to the targets that the given position in the + ImageStack decodes to. + + """ + + # map target molecules to integers so they can be reshaped into an image that can + # be subjected to a connected-component algorithm to find adjacent pixels with the + # same targets + targets = intensities[Features.TARGET].values + target_map = TargetsMap(targets) + + # create the decoded_image + decoded_image = self._intensities_to_decoded_image( + intensities, + target_map, + self._mask_filtered, + ) + + # label the decoded image to extract connected component features + label_image: np.ndarray = label(decoded_image, connectivity=self._connectivity) + + # calculate properties of each feature + props: List = regionprops(np.squeeze(label_image)) + + # calculate mean intensities across the pixels of each feature + mean_pixel_traces = self._calculate_mean_pixel_traces( + label_image, + intensities, + ) + + # Create SpotAttributes and determine feature filtering outcomes + spot_attributes, passes_filter = self._create_spot_attributes( + props, + decoded_image, + target_map, + n_processes=n_processes + ) + + # augment the SpotAttributes with filtering results and distances from nearest codes + spot_attributes.data[Features.DISTANCE] = mean_pixel_traces[Features.DISTANCE] + spot_attributes.data[Features.PASSES_THRESHOLDS] = passes_filter + + # create new indexes for the output IntensityTable + channel_index = mean_pixel_traces.indexes[Axes.CH] + round_index = mean_pixel_traces.indexes[Axes.ROUND] + coords = IntensityTable._build_xarray_coords(spot_attributes, channel_index, round_index) + + # create the output IntensityTable + dims = (Features.AXIS, Axes.CH.value, Axes.ROUND.value) + intensity_table = IntensityTable( + data=mean_pixel_traces, coords=coords, dims=dims + ) + + # combine the various non-IntensityTable results into a NamedTuple before returning + ccdr = ConnectedComponentDecodingResult(props, label_image, decoded_image) + + return intensity_table, ccdr diff --git a/starfish/core/spots/_detect_pixels/pixel_spot_decoder.py b/starfish/core/spots/DetectPixels/pixel_spot_decoder.py similarity index 71% rename from starfish/core/spots/_detect_pixels/pixel_spot_decoder.py rename to starfish/core/spots/DetectPixels/pixel_spot_decoder.py index 7a7aa9c0b..592d9ce7e 100644 --- a/starfish/core/spots/_detect_pixels/pixel_spot_decoder.py +++ b/starfish/core/spots/DetectPixels/pixel_spot_decoder.py @@ -1,13 +1,11 @@ from typing import Optional, Tuple -import numpy as np from starfish.core.codebook.codebook import Codebook from starfish.core.imagestack.imagestack import ImageStack from starfish.core.intensity_table.intensity_table import IntensityTable from starfish.core.intensity_table.intensity_table_coordinates import \ transfer_physical_coords_from_imagestack_to_intensity_table -from starfish.core.util import click from ._base import DetectPixelsAlgorithmBase from .combine_adjacent_features import CombineAdjacentFeatures, ConnectedComponentDecodingResult @@ -90,43 +88,3 @@ def run( transfer_physical_coords_from_imagestack_to_intensity_table(image_stack=primary_image, intensity_table=decoded_spots) return decoded_spots, image_decoding_results - - @staticmethod - @click.command("PixelSpotDecoder") - @click.option("--metric", type=str, default='euclidean') - @click.option( - "--distance-threshold", type=float, default=0.5176, - help="maximum distance a pixel may be from a codeword before it is filtered" - ) - @click.option( - "--magnitude-threshold", type=float, default=1, - help="minimum magnitude of a feature" - ) - @click.option( - "--min-area", type=int, default=2, - help="minimum area of a feature" - ) - @click.option( - "--max-area", type=int, default=np.inf, - help="maximum area of a feature" - ) - @click.option( - "--norm-order", type=int, default=2, - help="order of L_p norm to apply to intensities " - "and codes when using metric_decode to pair each intensities to its closest target" - ) - @click.pass_context - def _cli( - ctx, metric, distance_threshold, magnitude_threshold, min_area, max_area, norm_order - ): - codebook = ctx.obj["codebook"] - instance = PixelSpotDecoder( - codebook=codebook, - metric=metric, - distance_threshold=distance_threshold, - magnitude_threshold=magnitude_threshold, - min_area=min_area, - max_area=max_area, - norm_order=norm_order, - ) - ctx.obj["component"]._cli_run(ctx, instance) diff --git a/starfish/core/spots/_decode/test/__init__.py b/starfish/core/spots/DetectPixels/test/__init__.py similarity index 100% rename from starfish/core/spots/_decode/test/__init__.py rename to starfish/core/spots/DetectPixels/test/__init__.py diff --git a/starfish/core/spots/_detect_pixels/test/test_calculate_mean_pixel_traces.py b/starfish/core/spots/DetectPixels/test/test_calculate_mean_pixel_traces.py similarity index 96% rename from starfish/core/spots/_detect_pixels/test/test_calculate_mean_pixel_traces.py rename to starfish/core/spots/DetectPixels/test/test_calculate_mean_pixel_traces.py index decfd7f2c..da112b82e 100644 --- a/starfish/core/spots/_detect_pixels/test/test_calculate_mean_pixel_traces.py +++ b/starfish/core/spots/DetectPixels/test/test_calculate_mean_pixel_traces.py @@ -8,7 +8,7 @@ from skimage.measure import label from starfish import ImageStack, IntensityTable -from starfish.core.spots._detect_pixels.combine_adjacent_features import CombineAdjacentFeatures +from starfish.core.spots.DetectPixels.combine_adjacent_features import CombineAdjacentFeatures from starfish.core.types import Features diff --git a/starfish/core/spots/_detect_pixels/test/test_create_spot_attributes.py b/starfish/core/spots/DetectPixels/test/test_create_spot_attributes.py similarity index 96% rename from starfish/core/spots/_detect_pixels/test/test_create_spot_attributes.py rename to starfish/core/spots/DetectPixels/test/test_create_spot_attributes.py index 850a4107e..fd26e8c1c 100644 --- a/starfish/core/spots/_detect_pixels/test/test_create_spot_attributes.py +++ b/starfish/core/spots/DetectPixels/test/test_create_spot_attributes.py @@ -5,7 +5,7 @@ import numpy as np from skimage.measure import regionprops -from starfish.core.spots._detect_pixels.combine_adjacent_features import ( +from starfish.core.spots.DetectPixels.combine_adjacent_features import ( CombineAdjacentFeatures, TargetsMap ) from starfish.core.types import Axes, Features, SpotAttributes diff --git a/starfish/core/spots/_detect_pixels/test/test_intensities_to_decoded_image.py b/starfish/core/spots/DetectPixels/test/test_intensities_to_decoded_image.py similarity index 97% rename from starfish/core/spots/_detect_pixels/test/test_intensities_to_decoded_image.py rename to starfish/core/spots/DetectPixels/test/test_intensities_to_decoded_image.py index 48f811b8e..4529dbeaf 100644 --- a/starfish/core/spots/_detect_pixels/test/test_intensities_to_decoded_image.py +++ b/starfish/core/spots/DetectPixels/test/test_intensities_to_decoded_image.py @@ -7,7 +7,7 @@ import numpy as np from starfish import ImageStack, IntensityTable -from starfish.core.spots._detect_pixels.combine_adjacent_features import ( +from starfish.core.spots.DetectPixels.combine_adjacent_features import ( CombineAdjacentFeatures, TargetsMap ) from starfish.core.types import Features diff --git a/starfish/core/spots/_detect_pixels/test/test_targets_map.py b/starfish/core/spots/DetectPixels/test/test_targets_map.py similarity index 85% rename from starfish/core/spots/_detect_pixels/test/test_targets_map.py rename to starfish/core/spots/DetectPixels/test/test_targets_map.py index 6fbcba845..9254c2a4d 100644 --- a/starfish/core/spots/_detect_pixels/test/test_targets_map.py +++ b/starfish/core/spots/DetectPixels/test/test_targets_map.py @@ -4,7 +4,7 @@ import numpy as np -from starfish.core.spots._detect_pixels.combine_adjacent_features import TargetsMap +from starfish.core.spots.DetectPixels.combine_adjacent_features import TargetsMap def test_targets_map(): diff --git a/starfish/core/spots/DetectSpots/__init__.py b/starfish/core/spots/DetectSpots/__init__.py new file mode 100644 index 000000000..1b01504fc --- /dev/null +++ b/starfish/core/spots/DetectSpots/__init__.py @@ -0,0 +1,8 @@ +# from starfish.core.pipeline import import_all_submodules +# from ._base import DetectSpots +# import_all_submodules(__file__, __package__) + +from .blob import BlobDetector +from .local_max_peak_finder import LocalMaxPeakFinder +from .local_search_blob_detector import LocalSearchBlobDetector +from .trackpy_local_max_peak_finder import TrackpyLocalMaxPeakFinder diff --git a/starfish/core/spots/_detect_spots/_base.py b/starfish/core/spots/DetectSpots/_base.py similarity index 71% rename from starfish/core/spots/_detect_spots/_base.py rename to starfish/core/spots/DetectSpots/_base.py index d6530eb22..942164618 100644 --- a/starfish/core/spots/_detect_spots/_base.py +++ b/starfish/core/spots/DetectSpots/_base.py @@ -1,5 +1,5 @@ from abc import abstractmethod -from typing import Any, Callable, Optional, Sequence, Tuple, Type, Union +from typing import Any, Callable, Optional, Sequence, Tuple, Union import numpy as np import xarray as xr @@ -7,13 +7,10 @@ from starfish.core.imagestack.imagestack import ImageStack from starfish.core.intensity_table.intensity_table import IntensityTable from starfish.core.pipeline.algorithmbase import AlgorithmBase -from starfish.core.pipeline.pipelinecomponent import PipelineComponent from starfish.core.types import Axes, Number, SpotAttributes -from starfish.core.util import click -from starfish.core.util.click.indirectparams import ImageStackParamType -class DetectSpots(PipelineComponent): +class DetectSpotsAlgorithmBase(metaclass=AlgorithmBase): """ Starfish spot detectors use a variety of means to detect bright spots against dark backgrounds. Starfish's spot detectors each have different strengths and weaknesses. @@ -76,60 +73,7 @@ class DetectSpots(PipelineComponent): detector is only applicable to experiments with "one-hot" codebooks, such as those generated by in-situ sequencing, which guarantee that only one channel will be "on" per round. - """ - @classmethod - def _cli_run(cls, ctx, instance): - output = ctx.obj["output"] - image_stack = ctx.obj["image_stack"] - blobs_stack = ctx.obj["blobs_stack"] - blobs_axes = ctx.obj["blobs_axes"] - - intensities: IntensityTable = instance.run( - image_stack, - blobs_stack, - blobs_axes, - ) - - # TODO ambrosejcarr find a way to save arbitrary detector results - intensities.to_netcdf(output) - - @staticmethod - @click.group("DetectSpots") - @click.option("-i", "--input", required=True, type=ImageStackParamType) - @click.option("-o", "--output", required=True) - @click.option( - "--blobs-stack", - default=None, - required=False, - type=ImageStackParamType, - help="ImageStack that contains the blobs." - ) - @click.option( - "--blobs-axis", - type=click.Choice([Axes.ROUND.value, Axes.CH.value, Axes.ZPLANE.value]), - multiple=True, - required=False, - help="The axes that the blobs image will be maj-projected to produce the blobs_image" - ) - @click.pass_context - def _cli(ctx, input, output, blobs_stack, blobs_axis): - """detect spots""" - print('Detecting Spots ...') - _blobs_axes = tuple(Axes(_blobs_axis) for _blobs_axis in blobs_axis) - - ctx.obj = dict( - component=DetectSpots, - image_stack=input, - output=output, - blobs_stack=blobs_stack, - blobs_axes=_blobs_axes, - ) - - -class DetectSpotsAlgorithmBase(AlgorithmBase): - @classmethod - def get_pipeline_component_class(cls) -> Type[PipelineComponent]: - return DetectSpots + """ @abstractmethod def run( diff --git a/starfish/core/spots/_detect_spots/blob.py b/starfish/core/spots/DetectSpots/blob.py similarity index 80% rename from starfish/core/spots/_detect_spots/blob.py rename to starfish/core/spots/DetectSpots/blob.py index 585ea23bf..eea388f31 100644 --- a/starfish/core/spots/_detect_spots/blob.py +++ b/starfish/core/spots/DetectSpots/blob.py @@ -8,7 +8,6 @@ from starfish.core.imagestack.imagestack import ImageStack from starfish.core.intensity_table.intensity_table import IntensityTable from starfish.core.types import Axes, Features, Number, SpotAttributes -from starfish.core.util import click from ._base import DetectSpotsAlgorithmBase from .detect import detect_spots, measure_spot_intensity @@ -159,30 +158,3 @@ def run( radius_is_gyration=False) return intensity_table - - @staticmethod - @click.command("BlobDetector") - @click.option( - "--min-sigma", default=4, type=int, help="Minimum spot size (in standard deviation)") - @click.option( - "--max-sigma", default=6, type=int, help="Maximum spot size (in standard deviation)") - @click.option( - "--num-sigma", default=20, type=int, help="Number of sigmas to try") - @click.option( - "--threshold", default=.01, type=float, help="Dots threshold") - @click.option( - "--overlap", default=0.5, type=float, - help="dots with overlap of greater than this fraction are combined") - @click.option( - "--show", default=False, is_flag=True, help="display results visually") - @click.option( - "--detector_method", default='blob_log', - help="str ['blob_dog', 'blob_doh', 'blob_log'] name of the type of " - "detection method used from skimage.feature. Default: blob_log" - ) - @click.pass_context - def _cli(ctx, min_sigma, max_sigma, num_sigma, threshold, overlap, show, detector_method): - instance = BlobDetector(min_sigma, max_sigma, num_sigma, threshold, overlap, - detector_method=detector_method) - # FIXME: measurement_type, is_volume missing as options; show missing as ctor args - ctx.obj["component"]._cli_run(ctx, instance) diff --git a/starfish/core/spots/_detect_spots/detect.py b/starfish/core/spots/DetectSpots/detect.py similarity index 100% rename from starfish/core/spots/_detect_spots/detect.py rename to starfish/core/spots/DetectSpots/detect.py diff --git a/starfish/core/spots/_detect_spots/local_max_peak_finder.py b/starfish/core/spots/DetectSpots/local_max_peak_finder.py similarity index 86% rename from starfish/core/spots/_detect_spots/local_max_peak_finder.py rename to starfish/core/spots/DetectSpots/local_max_peak_finder.py index 3aa5f7ef5..3e979fcd5 100644 --- a/starfish/core/spots/_detect_spots/local_max_peak_finder.py +++ b/starfish/core/spots/DetectSpots/local_max_peak_finder.py @@ -13,7 +13,6 @@ from starfish.core.imagestack.imagestack import ImageStack from starfish.core.intensity_table.intensity_table import IntensityTable from starfish.core.types import Axes, Features, Number, SpotAttributes -from starfish.core.util import click from ._base import DetectSpotsAlgorithmBase from .detect import detect_spots @@ -306,41 +305,3 @@ def run( radius_is_gyration=False) return intensity_table - - @staticmethod - @click.command("LocalMaxPeakFinder") - @click.option( - "--min-distance", default=4, type=int, - help="Minimum spot size (in number of pixels deviation)") - @click.option( - "--min-obj-area", default=6, type=int, - help="Maximum spot size (in number of pixels") - @click.option( - "--max_obj_area", default=300, type=int, - help="Maximum spot size (in number of pixels)") - @click.option( - "--stringency", default=0, type=int, - help="Number of indices in threshold list to look past " - "for the threhsold finding algorithm") - @click.option( - "--threshold", default=None, type=float, - help="Threshold on which to threshold " - "image prior to spot detection") - @click.option( - "--min-num-spots-detected", default=3, type=int, - help="Minimum number of spots detected at which to stop a" - "utomatic thresholding algorithm") - @click.option( - "--measurement-type", default='max', type=str, - help="How to aggregate pixel intensities in a spot") - @click.option( - "--is-volume", default=False, help="Find spots in 3D or not") - @click.option( - "--verbose", default=True, help="Verbosity flag") - @click.pass_context - def _cli(ctx, min_distance, min_obj_area, max_obj_area, stringency, threshold, - min_num_spots_detected, measurement_type, is_volume, verbose): - instance = LocalMaxPeakFinder(min_distance, min_obj_area, max_obj_area, - stringency, threshold, - min_num_spots_detected, measurement_type, is_volume, verbose) - ctx.obj["component"]._cli_run(ctx, instance) diff --git a/starfish/core/spots/_detect_spots/local_search_blob_detector.py b/starfish/core/spots/DetectSpots/local_search_blob_detector.py similarity index 92% rename from starfish/core/spots/_detect_spots/local_search_blob_detector.py rename to starfish/core/spots/DetectSpots/local_search_blob_detector.py index 86b367c63..45f1f5d62 100644 --- a/starfish/core/spots/_detect_spots/local_search_blob_detector.py +++ b/starfish/core/spots/DetectSpots/local_search_blob_detector.py @@ -12,17 +12,15 @@ import numpy as np import pandas as pd import xarray as xr -from click import Choice from sklearn.neighbors import NearestNeighbors from starfish.core.compat import blob_dog, blob_log -from starfish.core.image._filter.util import determine_axes_to_group_by +from starfish.core.image.Filter.util import determine_axes_to_group_by from starfish.core.imagestack.imagestack import ImageStack from starfish.core.intensity_table.intensity_table import IntensityTable from starfish.core.intensity_table.intensity_table_coordinates import \ transfer_physical_coords_from_imagestack_to_intensity_table from starfish.core.types import Axes, Features, Number, SpotAttributes -from starfish.core.util import click from ._base import DetectSpotsAlgorithmBase blob_detectors = { @@ -423,30 +421,3 @@ def image_to_spots(self, data_image: Union[np.ndarray, xr.DataArray]) -> SpotAtt # LocalSearchBlobDetector does not follow the same contract as the remaining spot detectors. # TODO: (ambrosejcarr) Rationalize the spot detectors by contract and then remove this hack. raise NotImplementedError() - - @staticmethod - @click.command("LocalSearchBlobDetector") - @click.option( - "--min-sigma", default=4, type=int, help="Minimum spot size (in standard deviation).") - @click.option( - "--max-sigma", default=6, type=int, help="Maximum spot size (in standard deviation).") - @click.option( - "--threshold", default=.01, type=float, help="Dots threshold.") - @click.option( - "--overlap", default=0.5, type=float, - help="Dots with overlap of greater than this fraction are combined.") - @click.option( - "--detector-method", default='blob_log', type=Choice(['blob_log', 'blob_dog']), - help="Name of the type of the skimage blob detection method.") - @click.option( - "--search-radius", default=3, type=int, - help="Number of pixels over which to search for spots in other image tiles.") - @click.pass_context - def _cli( - ctx, min_sigma, max_sigma, threshold, overlap, show, detector_method, search_radius - ) -> None: - instance = LocalSearchBlobDetector( - min_sigma, max_sigma, threshold, overlap, - detector_method=detector_method, search_radius=search_radius - ) - ctx.obj["component"]._cli_run(ctx, instance) diff --git a/starfish/core/spots/_detect_pixels/test/__init__.py b/starfish/core/spots/DetectSpots/test/__init__.py similarity index 100% rename from starfish/core/spots/_detect_pixels/test/__init__.py rename to starfish/core/spots/DetectSpots/test/__init__.py diff --git a/starfish/core/spots/_detect_spots/test/test_local_search_blob_detector.py b/starfish/core/spots/DetectSpots/test/test_local_search_blob_detector.py similarity index 98% rename from starfish/core/spots/_detect_spots/test/test_local_search_blob_detector.py rename to starfish/core/spots/DetectSpots/test/test_local_search_blob_detector.py index 3228bce8f..41580639d 100644 --- a/starfish/core/spots/_detect_spots/test/test_local_search_blob_detector.py +++ b/starfish/core/spots/DetectSpots/test/test_local_search_blob_detector.py @@ -2,7 +2,7 @@ from scipy.ndimage.filters import gaussian_filter from starfish import ImageStack -from starfish.core.spots._detect_spots.local_search_blob_detector import LocalSearchBlobDetector +from starfish.core.spots.DetectSpots.local_search_blob_detector import LocalSearchBlobDetector from starfish.core.types import Axes diff --git a/starfish/core/spots/_detect_spots/test/test_spot_detection.py b/starfish/core/spots/DetectSpots/test/test_spot_detection.py similarity index 100% rename from starfish/core/spots/_detect_spots/test/test_spot_detection.py rename to starfish/core/spots/DetectSpots/test/test_spot_detection.py diff --git a/starfish/core/spots/_detect_spots/test/test_synthetic_data.py b/starfish/core/spots/DetectSpots/test/test_synthetic_data.py similarity index 98% rename from starfish/core/spots/_detect_spots/test/test_synthetic_data.py rename to starfish/core/spots/DetectSpots/test/test_synthetic_data.py index 1fa1d40c5..3da66f70a 100644 --- a/starfish/core/spots/_detect_spots/test/test_synthetic_data.py +++ b/starfish/core/spots/DetectSpots/test/test_synthetic_data.py @@ -1,7 +1,7 @@ import numpy as np import pytest -from starfish.core.spots._detect_spots.blob import BlobDetector +from starfish.core.spots.DetectSpots.blob import BlobDetector from starfish.core.test.factories import SyntheticData from starfish.core.types import Axes, Features diff --git a/starfish/core/spots/_detect_spots/trackpy_local_max_peak_finder.py b/starfish/core/spots/DetectSpots/trackpy_local_max_peak_finder.py similarity index 77% rename from starfish/core/spots/_detect_spots/trackpy_local_max_peak_finder.py rename to starfish/core/spots/DetectSpots/trackpy_local_max_peak_finder.py index 4b09e1dbf..2230a294e 100644 --- a/starfish/core/spots/_detect_spots/trackpy_local_max_peak_finder.py +++ b/starfish/core/spots/DetectSpots/trackpy_local_max_peak_finder.py @@ -8,7 +8,6 @@ from starfish.core.imagestack.imagestack import ImageStack from starfish.core.intensity_table.intensity_table import IntensityTable from starfish.core.types import Axes, SpotAttributes -from starfish.core.util import click from ._base import DetectSpotsAlgorithmBase from .detect import detect_spots @@ -174,44 +173,3 @@ def run( radius_is_gyration=True) return intensity_table - - @staticmethod - @click.command("TrackpyLocalMaxPeakFinder") - @click.option("--spot-diameter", type=str, help='expected spot size') - @click.option( - "--min-mass", default=4, type=int, help="minimum integrated spot intensity") - @click.option( - "--max-size", default=6, type=int, help="maximum radius of gyration of brightness") - @click.option( - "--separation", default=5, type=float, help="minimum distance between spots") - @click.option( - "--noise-size", default=None, type=int, - help="width of gaussian blurring kernel, in pixels") - @click.option( - "--smoothing-size", default=None, type=int, - help="odd integer. Size of boxcar (moving average) filter in pixels. Default is the " - "Diameter") - @click.option( - "--preprocess", is_flag=True, - help="if passed, gaussian and boxcar filtering are applied") - @click.option( - "--max-iterations", default=10, type=int, - help="Max number of loops to refine the center of mass. Default is 10") - @click.option( - "--show", default=False, is_flag=True, help="display results visually") - @click.option( - "--percentile", default=None, type=float, - help="clip bandpass below this value. Thresholding is done on already background-" - "subtracted images. Default 1 for integer images and 1/255 for float") - @click.option( - "--is-volume", is_flag=True, - help="indicates that the image stack should be filtered in 3d") - @click.pass_context - def _cli(ctx, spot_diameter, min_max, max_size, separation, noise_size, smoothing_size, - preprocess, max_iterations, show, percentile, is_volume): - - instance = TrackpyLocalMaxPeakFinder(spot_diameter, min_max, max_size, - separation, noise_size, smoothing_size, - preprocess, max_iterations, show, - percentile, is_volume) - ctx.obj["component"]._cli_run(ctx, instance) diff --git a/starfish/core/spots/__init__.py b/starfish/core/spots/__init__.py index 6b542f934..e69de29bb 100644 --- a/starfish/core/spots/__init__.py +++ b/starfish/core/spots/__init__.py @@ -1,4 +0,0 @@ -from ._assign_targets import AssignTargets -from ._decode import Decode -from ._detect_pixels import DetectPixels -from ._detect_spots import DetectSpots diff --git a/starfish/core/spots/_assign_targets/__init__.py b/starfish/core/spots/_assign_targets/__init__.py deleted file mode 100644 index d5a0d61f8..000000000 --- a/starfish/core/spots/_assign_targets/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from starfish.core.pipeline import import_all_submodules -from ._base import AssignTargets -import_all_submodules(__file__, __package__) diff --git a/starfish/core/spots/_assign_targets/_base.py b/starfish/core/spots/_assign_targets/_base.py deleted file mode 100644 index eef59f5c6..000000000 --- a/starfish/core/spots/_assign_targets/_base.py +++ /dev/null @@ -1,60 +0,0 @@ -import os -from abc import abstractmethod -from typing import Type - -import numpy as np - -from starfish.core.intensity_table.intensity_table import IntensityTable -from starfish.core.pipeline.algorithmbase import AlgorithmBase -from starfish.core.pipeline.pipelinecomponent import PipelineComponent -from starfish.core.segmentation_mask import SegmentationMaskCollection -from starfish.core.util import click - - -class AssignTargets(PipelineComponent): - @classmethod - def _cli_run(cls, ctx, instance): - output = ctx.obj["output"] - intensity_table = ctx.obj["intensity_table"] - label_image = ctx.obj["label_image"] - assigned: IntensityTable = instance.run(label_image, intensity_table) - print(f"Writing intensities, including cell ids to {output}") - assigned.to_netcdf(os.path.join(output)) - - @staticmethod - @click.group("AssignTargets") - @click.option("--label-image", required=True, type=click.Path(exists=True)) - @click.option("--intensities", required=True, type=click.Path(exists=True)) - @click.option("-o", "--output", required=True) - @click.pass_context - def _cli(ctx, label_image, intensities, output): - """assign targets to cells""" - - print('Assigning targets to cells...') - ctx.obj = dict( - component=AssignTargets, - output=output, - intensity_table=IntensityTable.open_netcdf(intensities), - label_image=SegmentationMaskCollection.from_disk(label_image) - ) - - -class AssignTargetsAlgorithm(AlgorithmBase): - """ - AssignTargets assigns cell IDs to detected spots using an IntensityTable and - SegmentationMaskCollection. - """ - @classmethod - def get_pipeline_component_class(cls) -> Type[PipelineComponent]: - return AssignTargets - - @abstractmethod - def run( - self, - label_image: np.ndarray, - intensity_table: IntensityTable, - verbose: bool=False, - in_place: bool=False, - ) -> IntensityTable: - """Performs target (e.g. gene) assignment given the spots and the regions.""" - raise NotImplementedError() diff --git a/starfish/core/spots/_decode/__init__.py b/starfish/core/spots/_decode/__init__.py deleted file mode 100644 index 5d1785b28..000000000 --- a/starfish/core/spots/_decode/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from starfish.core.pipeline import import_all_submodules -from ._base import Decode -import_all_submodules(__file__, __package__) diff --git a/starfish/core/spots/_decode/_base.py b/starfish/core/spots/_decode/_base.py deleted file mode 100644 index bb7dca542..000000000 --- a/starfish/core/spots/_decode/_base.py +++ /dev/null @@ -1,55 +0,0 @@ -from abc import abstractmethod -from typing import Type - -from starfish.core.intensity_table.intensity_table import IntensityTable -from starfish.core.pipeline.algorithmbase import AlgorithmBase -from starfish.core.pipeline.pipelinecomponent import PipelineComponent -from starfish.core.util import click -from starfish.core.util.click.indirectparams import CodebookParamType - - -class Decode(PipelineComponent): - """ - The Decode class exposes methods to compare detected spots to expected patterns of - fluorescence across the rounds and channels of an experiment and map them to target genes or - proteins. - - For single molecule FISH or RNAscope experiments, these codebooks are often simple mappings of - (round, channel) pairs to targets. For coded assays, these codebooks can be much more complex. - - Example codebooks are associated with each experiment in :py:mod:`starfish.data` and can - be accessed with :py:meth`Experiment.codebook`. - """ - @classmethod - def _cli_run(cls, ctx, instance): - table = ctx.obj["intensities"] - output = ctx.obj["output"] - intensities: IntensityTable = instance.run(table) - intensities.to_netcdf(output) - - @staticmethod - @click.group("Decode") - @click.option("-i", "--input", required=True, type=click.Path(exists=True)) - @click.option("-o", "--output", required=True) - @click.option("--codebook", required=True, type=CodebookParamType) - @click.pass_context - def _cli(ctx, input, output, codebook): - """assign genes to spots""" - ctx.obj = dict( - component=Decode, - input=input, - output=output, - intensities=IntensityTable.open_netcdf(input), - codebook=codebook, - ) - - -class DecodeAlgorithmBase(AlgorithmBase): - @classmethod - def get_pipeline_component_class(cls) -> Type[PipelineComponent]: - return Decode - - @abstractmethod - def run(self, intensities: IntensityTable, *args): - """Performs decoding on the spots found, using the codebook specified.""" - raise NotImplementedError() diff --git a/starfish/core/spots/_detect_pixels/__init__.py b/starfish/core/spots/_detect_pixels/__init__.py deleted file mode 100644 index 0b9d66989..000000000 --- a/starfish/core/spots/_detect_pixels/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from starfish.core.pipeline import import_all_submodules -from ._base import DetectPixels -import_all_submodules(__file__, __package__) diff --git a/starfish/core/spots/_detect_pixels/_base.py b/starfish/core/spots/_detect_pixels/_base.py deleted file mode 100644 index 8fe06cb20..000000000 --- a/starfish/core/spots/_detect_pixels/_base.py +++ /dev/null @@ -1,75 +0,0 @@ -from abc import abstractmethod -from typing import Callable, Sequence, Tuple, Type - -import numpy as np - -from starfish.core.imagestack.imagestack import ImageStack -from starfish.core.intensity_table.intensity_table import IntensityTable -from starfish.core.pipeline.algorithmbase import AlgorithmBase -from starfish.core.pipeline.pipelinecomponent import PipelineComponent -from starfish.core.types import Number -from starfish.core.util import click -from starfish.core.util.click.indirectparams import CodebookParamType, ImageStackParamType -from .combine_adjacent_features import ConnectedComponentDecodingResult - - -class DetectPixels(PipelineComponent): - """ - Decode an image by first coding each pixel, then combining the results into spots. - """ - @classmethod - def _cli_run(cls, ctx, instance): - output = ctx.obj["output"] - image_stack: ImageStack = ctx.obj["image_stack"] - intensities: IntensityTable - intensities, _ = instance.run(image_stack) - intensities.to_netcdf(output) - - @staticmethod - @click.group("DetectPixels") - @click.option("-i", "--input", required=True, type=ImageStackParamType) - @click.option("-o", "--output", required=True) - @click.option( - "--codebook", - default=None, required=True, type=CodebookParamType, - help=( - "A spaceTx spec-compliant json file that describes a three dimensional tensor " - "whose values are the expected intensity of a spot for each code in each imaging " - "round and each color channel." - ) - ) - @click.pass_context - def _cli(ctx, input, output, codebook): - """pixel-wise spot detection and decoding""" - print('Detecting Spots ...') - ctx.obj = dict( - component=DetectPixels, - image_stack=input, - output=output, - codebook=codebook, - ) - - -class DetectPixelsAlgorithmBase(AlgorithmBase): - @classmethod - def get_pipeline_component_class(cls) -> Type[PipelineComponent]: - return DetectPixels - - @abstractmethod - def run( - self, - primary_image: ImageStack, - *args, - ) -> Tuple[IntensityTable, ConnectedComponentDecodingResult]: - """Finds spots in an ImageStack""" - raise NotImplementedError() - - @staticmethod - def _get_measurement_function(measurement_type: str) -> Callable[[Sequence], Number]: - try: - measurement_function = getattr(np, measurement_type) - except AttributeError: - raise ValueError( - f'measurement_type must be a numpy reduce function such as "max" or "mean". ' - f'{measurement_type} not found.') - return measurement_function diff --git a/starfish/core/spots/_detect_pixels/combine_adjacent_features.py b/starfish/core/spots/_detect_pixels/combine_adjacent_features.py index caeeef3df..601ffc2a5 100644 --- a/starfish/core/spots/_detect_pixels/combine_adjacent_features.py +++ b/starfish/core/spots/_detect_pixels/combine_adjacent_features.py @@ -321,29 +321,29 @@ def _create_spot_attributes( An array with length equal to the number of features. If zero, indicates that a feature has failed area filters. """ - pool = Pool(processes=n_processes) - mapfunc = pool.map - applyfunc = partial( - self._single_spot_attributes, - decoded_image=decoded_image, - target_map=target_map, - min_area=self._min_area, - max_area=self._max_area - ) + with Pool(processes=n_processes) as pool: + mapfunc = pool.map + applyfunc = partial( + self._single_spot_attributes, + decoded_image=decoded_image, + target_map=target_map, + min_area=self._min_area, + max_area=self._max_area + ) - iterable = tqdm(region_properties, disable=(not StarfishConfig().verbose)) - results = mapfunc(applyfunc, iterable) - if not results: - # no spots found - warnings.warn("No spots found, please adjust threshold parameters") - return SpotAttributes.empty(extra_fields=['target']), np.array(0, dtype=np.bool) - spot_attrs, passes_area_filter = zip(*results) + iterable = tqdm(region_properties, disable=(not StarfishConfig().verbose)) + results = mapfunc(applyfunc, iterable) + if not results: + # no spots found + warnings.warn("No spots found, please adjust threshold parameters") + return SpotAttributes.empty(extra_fields=['target']), np.array(0, dtype=np.bool) + spot_attrs, passes_area_filter = zip(*results) - # update passes filter - passes_filter = np.array(passes_area_filter, dtype=np.bool) + # update passes filter + passes_filter = np.array(passes_area_filter, dtype=np.bool) - spot_attributes = SpotAttributes(pd.DataFrame.from_records(spot_attrs)) - return spot_attributes, passes_filter + spot_attributes = SpotAttributes(pd.DataFrame.from_records(spot_attrs)) + return spot_attributes, passes_filter def run( self, intensities: IntensityTable, diff --git a/starfish/core/spots/_detect_spots/__init__.py b/starfish/core/spots/_detect_spots/__init__.py deleted file mode 100644 index 50793582d..000000000 --- a/starfish/core/spots/_detect_spots/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from starfish.core.pipeline import import_all_submodules -from ._base import DetectSpots -import_all_submodules(__file__, __package__) diff --git a/starfish/core/spots/_detect_spots/test/__init__.py b/starfish/core/spots/_detect_spots/test/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/starfish/core/starfish.py b/starfish/core/starfish.py index c7ffa920d..6f2d94176 100755 --- a/starfish/core/starfish.py +++ b/starfish/core/starfish.py @@ -6,23 +6,9 @@ import pkg_resources -from starfish.core.experiment.builder.cli import build as build_cli -from starfish.core.image import ( - ApplyTransform, - Filter, - LearnTransform, - Segment, -) from starfish.core.spacetx_format.cli import validate as validate_cli -from starfish.core.spots import ( - AssignTargets, - Decode, - DetectPixels, - DetectSpots, -) from starfish.core.util import click - PROFILER_KEY = "profiler" """This is the dictionary key we use to attach the profiler to pass to the resultcallback.""" PROFILER_LINES = 15 @@ -92,16 +78,5 @@ def install_strict_dependencies(): ]) -# Pipelines -starfish.add_command(LearnTransform._cli) # type: ignore -starfish.add_command(ApplyTransform._cli) # type: ignore -starfish.add_command(Filter._cli) # type: ignore -starfish.add_command(DetectPixels._cli) -starfish.add_command(DetectSpots._cli) # type: ignore -starfish.add_command(Segment._cli) # type: ignore -starfish.add_command(AssignTargets._cli) # type: ignore -starfish.add_command(Decode._cli) # type: ignore - # Other -starfish.add_command(build_cli) # type: ignore starfish.add_command(validate_cli) # type: ignore diff --git a/starfish/core/test/factories.py b/starfish/core/test/factories.py index 26f273f86..00d1c7e13 100644 --- a/starfish/core/test/factories.py +++ b/starfish/core/test/factories.py @@ -7,9 +7,9 @@ from skimage import img_as_float32, img_as_uint from starfish import Codebook, ImageStack, IntensityTable -from starfish.core.image._filter.white_tophat import WhiteTophat +from starfish.core.image.Filter.white_tophat import WhiteTophat from starfish.core.imagestack.test.factories import create_imagestack_from_codebook -from starfish.core.spots._detect_spots.blob import BlobDetector +from starfish.core.spots.DetectSpots.blob import BlobDetector from starfish.core.types import Axes, Features diff --git a/starfish/core/util/click/indirectparams.py b/starfish/core/util/click/indirectparams.py deleted file mode 100644 index 3040cd139..000000000 --- a/starfish/core/util/click/indirectparams.py +++ /dev/null @@ -1,63 +0,0 @@ -import abc -from typing import Generic, Iterable, TypeVar - -from starfish.core.codebook.codebook import Codebook -from starfish.core.imagestack.imagestack import ImageStack -from starfish.core.util.indirectfile import ( - ConversionFormula, - convert, - GetCodebook, - GetCodebookFromExperiment, - GetImageStack, - GetImageStackFromExperiment, - NoApplicableConversionFormulaError, - NoSuccessfulConversionFormulaError, -) -from . import ParamType - - -IndirectResultType = TypeVar("IndirectResultType") - - -class IndirectFile(ParamType, Generic[IndirectResultType]): - def convert(self, value: str, param, ctx): - conversion_formulas = self.get_conversion_formulas() - try: - return convert(value, conversion_formulas) - except (NoApplicableConversionFormulaError, NoSuccessfulConversionFormulaError) as ex: - self.fail(ex.args[0]) - - @abc.abstractmethod - def get_conversion_formulas(self) -> Iterable[ConversionFormula[IndirectResultType]]: - """Return one or more conversion Formulas to get from an input string to the type of object - we want. - """ - raise NotImplementedError() - - -class CodebookParam(IndirectFile[Codebook]): - def __init__(self): - self.name = "codebook" - - def get_conversion_formulas(self) -> Iterable[ConversionFormula[Codebook]]: - return [ - GetCodebookFromExperiment(), - GetCodebook(), - ] - - -CodebookParamType = CodebookParam() - - -class ImageStackParam(IndirectFile[ImageStack]): - def __init__(self): - self.name = "imagestack" - - def get_conversion_formulas(self) -> Iterable[ConversionFormula[ImageStack]]: - return [ - GetImageStackFromExperiment(), - GetImageStack(), - ] - - -ImageStackParamType = ImageStackParam() diff --git a/starfish/core/util/click/test/test_help.py b/starfish/core/util/click/test/test_help.py deleted file mode 100644 index 4205e55b7..000000000 --- a/starfish/core/util/click/test/test_help.py +++ /dev/null @@ -1,58 +0,0 @@ -import subprocess -import unittest - -from starfish.core.util import exec - - -def assert_contains(actual, expected): - if isinstance(actual, bytes): - actual = actual.decode("utf-8") - if expected not in actual: - raise Exception(f"counldn't find: 'f{expected}'") - - -class TestHelpReturnCode(unittest.TestCase): - """ - Tests that the CLI supports a '--help' option at all of the expected levels. - """ - - STAGES = ( - [ - "starfish", "--help", - lambda tempdir: tempdir - ], - [ - "starfish", "DetectSpots", "--help", - lambda tempdir: tempdir - ], - [ - "starfish", "DetectSpots", "BlobDetector", "--help", - lambda tempdir: tempdir - ], - ) - - def test_run_build(self): - exec.stages( - TestHelpReturnCode.STAGES, - keep_data=False) - -class TestHelpStandardOut(unittest.TestCase): - """ - Tests that the calls to CLI's help produce the output that users expect. - """ - - def test_first(self): - actual = subprocess.check_output(["starfish", "--help"]) - expected = """Usage: starfish [OPTIONS] COMMAND [ARGS]...""" - assert_contains(actual, expected) - - def test_second(self): - actual = subprocess.check_output(["starfish", "DetectSpots", "--help"]) - expected = """Usage: starfish DetectSpots [OPTIONS] COMMAND [ARGS]...""" - actual = actual.decode("utf-8") - assert_contains(actual, expected) - - def test_third(self): - actual = subprocess.check_output(["starfish", "DetectSpots", "BlobDetector", "--help"]) - expected = """Usage: starfish DetectSpots BlobDetector [OPTIONS]""" - assert_contains(actual, expected) diff --git a/starfish/core/util/indirectfile/__init__.py b/starfish/core/util/indirectfile/__init__.py deleted file mode 100644 index 9362922b5..000000000 --- a/starfish/core/util/indirectfile/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -from ._base import ( - ConversionFormula, - convert, - NoApplicableConversionFormulaError, - NoSuccessfulConversionFormulaError, -) -from ._codebook import GetCodebook, GetCodebookFromExperiment -from ._imagestack import GetImageStack, GetImageStackFromExperiment diff --git a/starfish/core/util/indirectfile/_base.py b/starfish/core/util/indirectfile/_base.py deleted file mode 100644 index 549ec4a43..000000000 --- a/starfish/core/util/indirectfile/_base.py +++ /dev/null @@ -1,75 +0,0 @@ -import abc -from typing import Generic, Iterable, TypeVar - - -FormulaResultType = TypeVar("FormulaResultType") - - -class ConversionFormula(Generic[FormulaResultType]): - """A conversion formula represents a plausible contract to convert a string value to another - data type. Each conversion formula implements two methods -- a lightweight method to determine - if this formula can be applied given a string value, and a load method that actually does the - conversion. - """ - @abc.abstractmethod - def applicable(self, input_parameter: str) -> bool: - """Returns true iff this formula might work for the given input.""" - raise NotImplementedError() - - @abc.abstractmethod - def load(self, input_parameter: str) -> FormulaResultType: - """Attempt to run this conversion formula against this input.""" - raise NotImplementedError() - - -class NoApplicableConversionFormulaError(Exception): - """Raised when no conversion formula declared itself applicable to this input string.""" - pass - - -class NoSuccessfulConversionFormulaError(Exception): - """Raised when all the conversion formulas that declared itself applicable failed to execute - successfully.""" - pass - - -ConvertResultType = TypeVar("ConvertResultType") - - -def convert( - value: str, - conversion_formulas: Iterable[ConversionFormula[ConvertResultType]]) -> ConvertResultType: - """ - Given a string value and a series of conversion formulas, attempt to convert the value using the - formulas. - - If none of the formulas declare themselves as applicable, then raise - :py:class:`NoApplicableConversionformulaError`. If none of the formulas that declare themselves - as eligible run successfully, then raise :py:class:`NoSuccessfulConversionformulaError`. - - Parameters - ---------- - value : str - The string value we are attempting to convert. - conversion_formulas : Iterable[ConversionFormula[ConvertResultType]] - A series of conversion formulas. - - Returns - ------- - The converted value. - """ - none_applied = True - - for conversion_formula in conversion_formulas: - if conversion_formula.applicable(value): - none_applied = False - try: - return conversion_formula.load(value) - except Exception: - pass - - if none_applied: - raise NoApplicableConversionFormulaError( - f"Could not find applicable gonversion formula for {value}") - raise NoSuccessfulConversionFormulaError( - f"All applicable conversion formulas failed to run successfully for {value}.") diff --git a/starfish/core/util/indirectfile/_codebook.py b/starfish/core/util/indirectfile/_codebook.py deleted file mode 100644 index 67f432b3a..000000000 --- a/starfish/core/util/indirectfile/_codebook.py +++ /dev/null @@ -1,21 +0,0 @@ -from starfish.core.codebook.codebook import Codebook -from starfish.core.experiment.experiment import Experiment -from starfish.core.util.indirectfile._base import ConversionFormula - - -class GetCodebookFromExperiment(ConversionFormula): - def applicable(self, input_parameter: str) -> bool: - return input_parameter.startswith("@") - - def load(self, input_parameter: str) -> Codebook: - path = input_parameter[1:] - experiment = Experiment.from_json(path) - return experiment.codebook - - -class GetCodebook(ConversionFormula): - def applicable(self, input_parameter: str) -> bool: - return not input_parameter.startswith("@") - - def load(self, input_parameter: str) -> Codebook: - return Codebook.open_json(input_parameter) diff --git a/starfish/core/util/indirectfile/_imagestack.py b/starfish/core/util/indirectfile/_imagestack.py deleted file mode 100644 index ea385cbb6..000000000 --- a/starfish/core/util/indirectfile/_imagestack.py +++ /dev/null @@ -1,28 +0,0 @@ -import re - -from starfish.core.experiment.experiment import Experiment -from starfish.core.imagestack.imagestack import ImageStack -from starfish.core.util.indirectfile._base import ConversionFormula - - -CRE = re.compile("@(?P.+)\[(?P[^\[\]]+)\]\[(?P[^\[\]]+)\]") # noqa: W605 - - -class GetImageStackFromExperiment(ConversionFormula[ImageStack]): - def applicable(self, input_parameter: str) -> bool: - return CRE.match(input_parameter) is not None - - def load(self, input_parameter: str) -> ImageStack: - mo = CRE.match(input_parameter) - assert mo is not None - experiment = Experiment.from_json(mo.group("path")) - fov = experiment[mo.group("fov")] - return fov.get_image(mo.group("image_type")) - - -class GetImageStack(ConversionFormula[ImageStack]): - def applicable(self, input_parameter: str) -> bool: - return not CRE.match(input_parameter) - - def load(self, input_parameter: str) -> ImageStack: - return ImageStack.from_path_or_url(input_parameter) diff --git a/starfish/test/full_pipelines/cli/__init__.py b/starfish/test/full_pipelines/cli/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/starfish/test/full_pipelines/cli/_base_cli_test.py b/starfish/test/full_pipelines/cli/_base_cli_test.py deleted file mode 100644 index 9cd77ae27..000000000 --- a/starfish/test/full_pipelines/cli/_base_cli_test.py +++ /dev/null @@ -1,36 +0,0 @@ -import os -import shutil - -from starfish.core.intensity_table.intensity_table import IntensityTable -from starfish.core.util import exec - - -class CLITest: - """This is a base class for testing CLI methods. Each stage should correspond - to a different pipeline step. Running the test will go through each stage and - run the command line method with given arguments. The last stage should produce - a file called results containing an IntensityTable. Each cli test should define - it's own verify_results method. - """ - @property - def subdirs(self): - raise NotImplementedError() - - @property - def stages(self): - raise NotImplementedError() - - @property - def spots_file(self): - return "spots.nc" - - def verify_results(self, intensities: IntensityTable): - raise NotImplementedError() - - def test_run_pipline(self): - tempdir = exec.stages(self.stages, self.subdirs, keep_data=True) - intensities = IntensityTable.open_netcdf(os.path.join(tempdir, "results", self.spots_file)) - self.verify_results(intensities) - - if os.getenv("TEST_KEEP_DATA") is None: - shutil.rmtree(tempdir) diff --git a/starfish/test/full_pipelines/cli/test_allen_smFISH_cli.py b/starfish/test/full_pipelines/cli/test_allen_smFISH_cli.py deleted file mode 100644 index f0f0400cc..000000000 --- a/starfish/test/full_pipelines/cli/test_allen_smFISH_cli.py +++ /dev/null @@ -1,87 +0,0 @@ -import os -import sys -import unittest - -from ._base_cli_test import CLITest - - -@unittest.skip("skipping for now") -class TestAllenData(CLITest, unittest.TestCase): - - @property - def subdirs(self): - return ( - "registered", - "filtered", - "results" - ) - - @property - def stages(self): - return ( - [ - sys.executable, - "starfish/test/full_pipelines/cli/get_cli_test_data.py", - "https://d2nhj9g34unfro.cloudfront.net/20180828/" - + "allen_smFISH-TEST/allen_smFISH_test_data.zip", - lambda tempdir, *args, **kwargs: os.path.join(tempdir, "registered") - ], - [ - "starfish", "Filter", - "--input", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "registered/fov_001", "primary_images.json"), - "--output", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "clip_filtered.json"), - "Clip", - "--p-min", "10", - "--p-max", "100" - ], - [ - "starfish", "Filter", - "--input", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "clip_filtered.json"), - "--output", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "bandpass_filtered.json"), - "Bandpass", - "--lshort", ".5", - "--llong", "7", - "--truncate", "4" - ], - [ - "starfish", "Filter", - "--input", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "bandpass_filtered.json"), - "--output", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "clip2_filtered.json"), - "Clip", - "--p-min", "10", - "--p-max", "100" - ], - [ - "starfish", "Filter", - "--input", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "clip2_filtered.json"), - "--output", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "gaussian_filtered.json"), - "GaussianLowPass", - "--sigma", "1" - ], - [ - "starfish", "DetectSpots", - "--input", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "gaussian_filtered.json"), - "--output", lambda tempdir, *args, **kwargs: os.path.join(tempdir, "results"), - "LocalMaxPeakFinder", - "--spot-diameter", "3", - "--min-mass", "300", - "--max-size", "3", - "--separation", "5", - "--percentile", "10", - "--is-volume" - ], - - ) - - def verify_results(self, intensities): - # TODO DEEP SAYS WAIT ON THIS TEST TILL STUFF GETS FIGURED OUT - pass diff --git a/starfish/test/full_pipelines/cli/test_dartfish_cli.py b/starfish/test/full_pipelines/cli/test_dartfish_cli.py deleted file mode 100644 index bbe1e1c45..000000000 --- a/starfish/test/full_pipelines/cli/test_dartfish_cli.py +++ /dev/null @@ -1,107 +0,0 @@ -import os -import unittest - -import numpy as np -import pandas as pd -import pytest - -from starfish import IntensityTable -from starfish.types import Features -from ._base_cli_test import CLITest - - -EXPERIMENT_JSON_URL = "https://d2nhj9g34unfro.cloudfront.net/20181005/DARTFISH-TEST/experiment.json" - - -@pytest.mark.slow -class TestWithDartfishData(CLITest, unittest.TestCase): - - @property - def subdirs(self): - return ( - "registered", - "filtered", - "results" - ) - - @property - def stages(self): - return ( - [ - "starfish", "Filter", - "--input", - f"@{EXPERIMENT_JSON_URL}[fov_001][primary]", - "--output", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "scale_filtered.json"), - "Clip", - "--p-max", "100", - "--expand-dynamic-range" - ], - [ - "starfish", "Filter", - "--input", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "scale_filtered.json"), - "--output", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "zero_filtered.json"), - "ZeroByChannelMagnitude", - "--thresh", ".05", - ], - [ - "starfish", "DetectPixels", - "--input", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "zero_filtered.json"), - "--output", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "results", "spots.nc"), - "--codebook", f"@{EXPERIMENT_JSON_URL}", - "PixelSpotDecoder", - "--distance-threshold", "3", - "--magnitude-threshold", ".5", - "--min-area", "5", - "--max-area", "30", - ], - ) - - def verify_results(self, intensities): - assert intensities[Features.PASSES_THRESHOLDS].sum() - - spots_df = IntensityTable( - intensities.where(intensities[Features.PASSES_THRESHOLDS], drop=True) - ).to_features_dataframe() - spots_df['area'] = np.pi * spots_df['radius'] ** 2 - - # verify number of spots detected - spots_passing_filters = intensities[Features.PASSES_THRESHOLDS].sum() - assert spots_passing_filters == 53 # TODO note, had to change this by 1 - - # compare to benchmark data -- note that this particular part of the dataset - # appears completely uncorrelated - cnts_benchmark = pd.read_csv( - 'https://d2nhj9g34unfro.cloudfront.net/20181005/DARTFISH/fov_001/counts.csv') - - min_dist = 0.6 - cnts_starfish = spots_df[spots_df.distance <= min_dist].groupby('target').count()['area'] - cnts_starfish = cnts_starfish.reset_index(level=0) - cnts_starfish.rename(columns={'target': 'gene', 'area': 'cnt_starfish'}, inplace=True) - - # get top 5 genes and verify they are correct - high_expression_genes = cnts_starfish.sort_values('cnt_starfish', ascending=False).head(5) - - assert np.array_equal( - high_expression_genes['cnt_starfish'].values, - [7, 3, 2, 2, 2] - ) - assert np.array_equal( - high_expression_genes['gene'].values, - ['MBP', 'MOBP', 'ADCY8', 'TRIM66', 'SYT6'] - ) - - # verify correlation is accurate for this subset of the image - benchmark_comparison = pd.merge(cnts_benchmark, cnts_starfish, on='gene', how='left') - benchmark_comparison.head(20) - - x = benchmark_comparison.dropna().cnt.values - y = benchmark_comparison.dropna().cnt_starfish.values - corrcoef = np.corrcoef(x, y) - corrcoef = corrcoef[0, 1] - - assert np.round(corrcoef, 5) == 0.03028 diff --git a/starfish/test/full_pipelines/cli/test_iss.py b/starfish/test/full_pipelines/cli/test_iss.py deleted file mode 100644 index c33849579..000000000 --- a/starfish/test/full_pipelines/cli/test_iss.py +++ /dev/null @@ -1,178 +0,0 @@ -""" -Notes ------ -This test and docs/source/usage/iss/iss_cli.sh test the same code paths and should be updated -together -""" -import os -import unittest - -import numpy as np -import pandas as pd -import pytest - -from starfish.types import Features -from ._base_cli_test import CLITest - - -EXPERIMENT_JSON_URL = "https://d2nhj9g34unfro.cloudfront.net/20181005/ISS-TEST/experiment.json" - - -@pytest.mark.slow -class TestWithIssData(CLITest, unittest.TestCase): - - @property - def spots_file(self): - return "decoded-spots.nc" - - @property - def subdirs(self): - return ( - "max_projected", - "transforms", - "registered", - "filtered", - "results", - ) - - @property - def stages(self): - return ( - [ - "starfish", "validate", "experiment", EXPERIMENT_JSON_URL, - ], - [ - "starfish", "Filter", - "--input", - f"@{EXPERIMENT_JSON_URL}[fov_001][primary]", - "--output", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "max_projected", "primary_images.json"), - "MaxProject", - "--dims", "c", - "--dims", "z" - - ], - [ - "starfish", "LearnTransform", - "--input", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "max_projected", "primary_images.json"), - "--output", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "transforms", "transforms.json"), - "Translation", - "--reference-stack", - f"@{EXPERIMENT_JSON_URL}[fov_001][dots]", - "--upsampling", "1000", - "--axes", "r" - ], - [ - "starfish", "ApplyTransform", - "--input", - f"@{EXPERIMENT_JSON_URL}[fov_001][primary]", - "--output", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "registered", "primary_images.json"), - "--transformation-list", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "transforms", "transforms.json"), - "Warp", - ], - [ - "starfish", "Filter", - "--input", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "registered", "primary_images.json"), - "--output", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "primary_images.json"), - "WhiteTophat", - "--masking-radius", "15", - ], - [ - "starfish", "Filter", - "--input", - f"@{EXPERIMENT_JSON_URL}[fov_001][nuclei]", - "--output", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "nuclei.json"), - "WhiteTophat", - "--masking-radius", "15", - ], - [ - "starfish", "Filter", - "--input", - f"@{EXPERIMENT_JSON_URL}[fov_001][dots]", - "--output", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "dots.json"), - "WhiteTophat", - "--masking-radius", "15", - ], - [ - "starfish", "DetectSpots", - "--input", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "primary_images.json"), - "--output", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "results", "spots.nc"), - "--blobs-stack", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "dots.json"), - "--blobs-axis", "r", "--blobs-axis", "c", - "BlobDetector", - "--min-sigma", "4", - "--max-sigma", "6", - "--num-sigma", "20", - "--threshold", "0.01", - ], - [ - "starfish", "Segment", - "--primary-images", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "primary_images.json"), - "--nuclei", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "nuclei.json"), - "-o", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "results", "masks.tgz"), - "Watershed", - "--nuclei-threshold", ".16", - "--input-threshold", ".22", - "--min-distance", "57", - ], - [ - "starfish", "AssignTargets", - "--label-image", - lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "results", "masks.tgz"), - "--intensities", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "results", "spots.nc"), - "--output", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "results", "targeted-spots.nc"), - "Label", - ], - [ - "starfish", "Decode", - "-i", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "results", "targeted-spots.nc"), - "--codebook", - f"@{EXPERIMENT_JSON_URL}", - "-o", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "results", "decoded-spots.nc"), - "PerRoundMaxChannel", - ], - - # Validate results/{spots,targeted-spots,decoded-spots}.nc - [ - "starfish", "validate", "xarray", - lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "results", "spots.nc") - ], - [ - "starfish", "validate", "xarray", - lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "results", "targeted-spots.nc") - ], - [ - "starfish", "validate", "xarray", - lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "results", "decoded-spots.nc") - ], - ) - - def verify_results(self, intensities): - # TODO make this test stronger - genes, counts = np.unique( - intensities.coords[Features.TARGET], return_counts=True) - gene_counts = pd.Series(counts, genes) - # TODO THERE"S NO HUMAN/MOUSE KEYS? - assert gene_counts['ACTB'] diff --git a/starfish/test/full_pipelines/cli/test_merfish_cli.py b/starfish/test/full_pipelines/cli/test_merfish_cli.py deleted file mode 100644 index b888a5f5c..000000000 --- a/starfish/test/full_pipelines/cli/test_merfish_cli.py +++ /dev/null @@ -1,83 +0,0 @@ -import os -import sys -import unittest - -from ._base_cli_test import CLITest - - -@unittest.skip("skipping for now") -class TestWithMerfishData(CLITest, unittest.TestCase): - # __test__ = True - # TODO this test currently fails because it doesn't do the scaling the notebook does in memory - # Need to figure out ScaleByPercentile change - - @property - def subdirs(self): - return ( - "registered", - "filtered", - "results" - ) - - @property - def stages(self): - return ( - [ - sys.executable, - "starfish/test/full_pipelines/cli/get_cli_test_data.py", - "https://d2nhj9g34unfro.cloudfront.net/20181005/MERFISH-TEST/experiment.json", - lambda tempdir, *args, **kwargs: os.path.join(tempdir, "registered") - ], - [ - "starfish", "Filter", - "--input", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "registered/fov_001", "primary_images.json"), - "--output", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "gaussian_filtered.json"), - "GaussianHighPass", - "--sigma", "3", - ], - [ - "starfish", "Filter", - "--input", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "gaussian_filtered.json"), - "--output", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "deconvolve_filtered.json"), - "DeconvolvePSF", - "--sigma", "2", - "--num-iter", "9" - ], - [ - "starfish", "Filter", - "--input", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "deconvolve_filtered.json"), - "--output", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "gaussian_filtered.json"), - "GaussianLowPass", - "--sigma", "1" - ], - [ - "starfish", "Filter", - "--input", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "deconvolve_filtered.json"), - "--output", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "scale_filtered.json"), - "ScaleByPercentile", - "--p", "90", - ], - [ - "starfish", "DetectSpots", - "--input", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "filtered", "scale_filtered.json"), - "--output", lambda tempdir, *args, **kwargs: os.path.join(tempdir, "results"), - "PixelSpotDetector", - "--codebook", lambda tempdir, *args, **kwargs: os.path.join( - tempdir, "registered", "codebook.json"), - "--distance-threshold", "0.5176", - "--magnitude-threshold", "5e-5", - "--norm-order", "2", - ], - ) - - def verify_results(self, intensities): - pass diff --git a/starfish/test/full_pipelines/recipe/__init__.py b/starfish/test/full_pipelines/recipe/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/starfish/test/full_pipelines/recipe/_base_recipe_test.py b/starfish/test/full_pipelines/recipe/_base_recipe_test.py deleted file mode 100644 index e5b544eeb..000000000 --- a/starfish/test/full_pipelines/recipe/_base_recipe_test.py +++ /dev/null @@ -1,45 +0,0 @@ -import os -import shutil -from pathlib import Path -from typing import Iterable, Optional - -from starfish.core.util import exec - - -class RecipeTest: - """This is a base class for testing recipes. Each recipe test should define its recipe file, - the input files, the output files, and a test method that verifies the correctness of the - results. - """ - @property - def recipe(self) -> Path: - raise NotImplementedError() - - @property - def input_url_or_paths(self) -> Iterable[str]: - raise NotImplementedError() - - @property - def output_paths(self) -> Iterable[Path]: - raise NotImplementedError() - - def verify_results(self, tempdir: Path): - raise NotImplementedError() - - def test_run_recipe(self): - cmdline = ["starfish", "recipe", "--recipe", self.recipe] - for input_url_or_path in self.input_url_or_paths: - cmdline.extend(["--input", input_url_or_path]) - for output_path in self.output_paths: - cmdline.extend([ - "--output", - lambda tempdir, *args, **kwargs: os.path.join(tempdir, os.fspath(output_path))]) - - tempdir: Optional[str] = None - try: - tempdir = exec.stages([cmdline], keep_data=True) - - self.verify_results(Path(tempdir)) - finally: - if tempdir is not None and os.getenv("TEST_KEEP_DATA") is None: - shutil.rmtree(tempdir) diff --git a/starfish/test/full_pipelines/recipe/iss_recipe.txt b/starfish/test/full_pipelines/recipe/iss_recipe.txt deleted file mode 100644 index 2b1dea0c5..000000000 --- a/starfish/test/full_pipelines/recipe/iss_recipe.txt +++ /dev/null @@ -1,55 +0,0 @@ -primary_image = file_inputs[0] -dots = file_inputs[1] -nuclei = file_inputs[2] -codebook = file_inputs[3] - -max_proj = compute( - Filter.MaxProject, - primary_image, - dims=['c', 'z']) - -transformation_list = compute( - LearnTransform.Translation, - max_proj, - reference_stack=dots, upsampling=1000, axes=Axes.ROUND) - -transformed = compute( - ApplyTransform.Warp, - primary_image, - transformation_list) - -filtered_primary = compute( - Filter.WhiteTophat, - transformed, - masking_radius=15) - -filtered_nuclei = compute( - Filter.WhiteTophat, - nuclei, - masking_radius=15) - -filtered_dots = compute( - Filter.WhiteTophat, - dots, - masking_radius=15) - -spots = compute( - DetectSpots.BlobDetector, - filtered_primary, filtered_dots, {Axes.ROUND, Axes.CH}, - min_sigma=4, max_sigma=6, num_sigma=20, threshold=0.01) - -segmentation = compute( - Segment.Watershed, - filtered_primary, filtered_nuclei, - nuclei_threshold=.16, input_threshold=.22, min_distance=57) - -target_assignment = compute( - AssignTargets.Label, - segmentation, spots) - -decoded = compute( - Decode.PerRoundMaxChannel, - target_assignment, - codebook=codebook) - -file_outputs[0] = decoded diff --git a/starfish/test/full_pipelines/recipe/test_iss.py b/starfish/test/full_pipelines/recipe/test_iss.py deleted file mode 100644 index b4fec70e6..000000000 --- a/starfish/test/full_pipelines/recipe/test_iss.py +++ /dev/null @@ -1,53 +0,0 @@ -""" -Notes ------ -This test and docs/source/usage/iss/iss_cli.sh test the same code paths and should be updated -together -""" -import os -import unittest -from pathlib import Path -from typing import Iterable - -import numpy as np -import pandas as pd -import pytest - -from starfish import IntensityTable -from starfish.types import Features -from ._base_recipe_test import RecipeTest - - -URL = "https://d2nhj9g34unfro.cloudfront.net/20181005/ISS-TEST/experiment.json" - - -@pytest.mark.slow -class TestWithIssData(RecipeTest, unittest.TestCase): - @property - def recipe(self) -> Path: - test_file_path = Path(__file__) - recipe = test_file_path.parent / "iss_recipe.txt" - return recipe - - @property - def input_url_or_paths(self) -> Iterable[str]: - return [ - f"@{URL}[fov_001][primary]", # primary image - f"@{URL}[fov_001][dots]", # dots image - f"@{URL}[fov_001][nuclei]", # nuclei image - f"@{URL}", # codebook - ] - - @property - def output_paths(self) -> Iterable[Path]: - return [ - Path("decoded_spots.nc") - ] - - def verify_results(self, tempdir: Path): - intensities = IntensityTable.open_netcdf(os.fspath(tempdir / "decoded_spots.nc")) - genes, counts = np.unique( - intensities.coords[Features.TARGET], return_counts=True) - gene_counts = pd.Series(counts, genes) - assert gene_counts['ACTB'] == 9 - assert gene_counts['GAPDH'] == 9