Skip to content

Commit

Permalink
Merge pull request #61 from MahdiAll99/dev
Browse files Browse the repository at this point in the history
Documentation update
  • Loading branch information
MahdiAll99 committed Apr 20, 2024
2 parents 549156d + 743345b commit 11aef7b
Show file tree
Hide file tree
Showing 12 changed files with 80 additions and 65 deletions.
2 changes: 1 addition & 1 deletion MEDimage/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
logging.getLogger(__name__).addHandler(stream_handler)

__author__ = "MEDomicsLab consortium"
__version__ = "0.9.0"
__version__ = "0.9.4"
__copyright__ = "Copyright (C) MEDomicsLab consortium"
__license__ = "GNU General Public License 3.0"
__maintainer__ = "MAHDI AIT LHAJ LOUTFI"
Expand Down
2 changes: 1 addition & 1 deletion MEDimage/learning/DataCleaner.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def cut_off_missing_per_sample(self, var_of_type: List[str], missing_cutoff : fl
"""
# Initialization
n_observation, n_features = self.df_features.shape
empty_vec = np.zeros(n_observation, dtype=np.int)
empty_vec = np.zeros(n_observation, dtype=int)
data = self.df_features[var_of_type]
empty_vec += data.isna().sum(axis=1).values

Expand Down
2 changes: 1 addition & 1 deletion MEDimage/processing/segmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def get_sep_roi_names(name_roi_in: str,
ind_start = strfind(string=name_roi_in, pattern="{")
n_roi = len(ind_start)
ind_stop = strfind(string=name_roi_in, pattern="}")
ind_keep = np.ones(n_delim, dtype=np.bool)
ind_keep = np.ones(n_delim, dtype=bool)
for d in np.arange(n_delim):
for r in np.arange(n_roi):
# Thus not indise a ROI name
Expand Down
24 changes: 20 additions & 4 deletions MEDimage/wrangling/DataManager.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def __init__(
path_save: Union[Path, str] = None,
path_save_checks: Union[Path, str] = None,
path_pre_checks_settings: Union[Path, str] = None,
save: bool = False,
save: bool = True,
n_batch: int = 2
) -> None:
"""Constructor of the class DataManager.
Expand Down Expand Up @@ -144,7 +144,6 @@ def __init__(
self.__studies = []
self.__institutions = []
self.__scans = []
self.__warned = False

def __find_uid_cell_index(self, uid: Union[str, List[str]], cell: List[str]) -> List:
"""Finds the cell with the same `uid`. If not is present in `cell`, creates a new position
Expand Down Expand Up @@ -202,10 +201,11 @@ def __associate_rt_stuct(self) -> None:
"""
print('--> Associating all RT objects to imaging volumes')
n_rs = len(self.__dicom.stack_path_rs)
self.__dicom.stack_series_rs = list(dict.fromkeys(self.__dicom.stack_series_rs))
if n_rs:
for i in trange(0, n_rs):
try: # PUT ALL THE DICOM PATHS WITH THE SAME UID IN THE SAME PATH LIST
self.__dicom.stack_series_rs = list(set(self.__dicom.stack_series_rs))
try:
# PUT ALL THE DICOM PATHS WITH THE SAME UID IN THE SAME PATH LIST
ind_series_id = self.__find_uid_cell_index(
self.__dicom.stack_series_rs[i],
self.__dicom.cell_series_id)
Expand Down Expand Up @@ -275,6 +275,8 @@ def __read_all_dicoms(self) -> None:
except:
frame_uid = info.FrameOfReferenceUID
self.__dicom.stack_frame_rs += [frame_uid]
else:
print("Modality not supported: ", info.Modality)

except Exception as e:
print(f'Error while reading: {file}, error: {e}\n')
Expand Down Expand Up @@ -551,9 +553,18 @@ def process_all_niftis(self) -> List[MEDscan]:
Returns:
List[MEDscan]: List of MEDscan instances.
"""

# Reading all NIfTI files
self.__read_all_niftis()

# Create the MEDscan instances
print('--> Reading all NIfTI objects (imaging volumes & masks) to create MEDscan classes')
list_instances = []
for file in tqdm(self.__nifti.stack_path_images):
# Assert the list of instances does not exceed the a size of 10
if len(list_instances) >= 10:
print('The number of MEDscan instances exceeds 10, please consider saving the instances')
break
# INITIALIZE MEDscan INSTANCE AND UPDATE ATTRIBUTES
medscan = MEDscan()
medscan.patientID = os.path.basename(file).split("_")[0]
Expand All @@ -577,6 +588,8 @@ def process_all_niftis(self) -> List[MEDscan]:
# SAVE MEDscan INSTANCE
if self.save and self.paths._path_save:
save_MEDscan(medscan, self.paths._path_save)
else:
list_instances.append(medscan)

# Update the path to the created instances
name_save = self.__get_MEDscan_name_save(medscan)
Expand Down Expand Up @@ -611,6 +624,9 @@ def process_all_niftis(self) -> List[MEDscan]:
"naming convention 'study-institution-id' (Ex: Glioma-TCGA-001)")
print('DONE')

if list_instances:
return list_instances

def update_from_csv(self, path_csv: Union[str, Path] = None) -> None:
"""Updates the class from a given CSV and summarizes the processed scans again according to it.
Expand Down
11 changes: 5 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
[![PyPI - Python Version](https://img.shields.io/badge/python-3.8%20|%203.9%20|%203.10-blue)](https://www.python.org/downloads/release/python-380/)
[![TestPyPI - version](https://img.shields.io/badge/pypi-v0.2.0-blue)](https://test.pypi.org/project/medimage-pkg/0.2.0/)
[![Continuous Integration](https://github.com/MahdiAll99/MEDimage/actions/workflows/python-app.yml/badge.svg)](https://github.com/MahdiAll99/MEDimage/actions/workflows/python-app.yml)
[![Upload Python Package](https://github.com/MahdiAll99/MEDimage/actions/workflows/python-publish.yml/badge.svg)](https://github.com/MahdiAll99/MEDimage/actions/workflows/python-publish.yml)
[![Documentation Status](https://readthedocs.org/projects/medimage/badge/?version=latest)](https://medimage.readthedocs.io/en/latest/?badge=latest)
[![License: GPL-3](https://img.shields.io/badge/license-GPLv3-blue)](LICENSE)
[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/MahdiAll99/MEDimage/blob/main/notebooks/tutorial/DataManager-Tutorial.ipynb)
Expand All @@ -20,7 +19,7 @@
* [5. Tutorials](#5-tutorials)
* [6. IBSI Standardization](#6-ibsi-standardization)
* [IBSI Chapter 1](#ibsi-chapter-1)
* [IBSI Chapter 2 (In progress)](#ibsi-chapter-2-in-progress)
* [IBSI Chapter 2](#ibsi-chapter-2)
* [7. Acknowledgement](#7-acknowledgement)
* [8. Authors](#8-authors)
* [9. Statement](#9-statement)
Expand All @@ -39,7 +38,7 @@ The MEDimage package requires *Python 3.8* or more. If you don't have it install
### Package installation
You can easily install the ``MEDimage`` package from PyPI using:
```
pip install MEDimage
pip install medimage-pkg
```

For more installation options (Conda, Poetry...) check out the [installation documentation](https://medimage.readthedocs.io/en/latest/Installation.html).
Expand Down Expand Up @@ -111,8 +110,8 @@ The image biomarker standardization initiative ([IBSI](https://theibsi.github.io
- **Phase 1**: [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/MahdiAll99/MEDimage/blob/main/notebooks/ibsi/ibsi1p1.ipynb)
- **Phase 2**: [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/MahdiAll99/MEDimage/blob/main/notebooks/ibsi/ibsi1p2.ipynb)

- ### IBSI Chapter 2 (In progress)
[The IBSI chapter 2](https://theibsi.github.io/ibsi2/) was launched in June 2020 and is still in progress. It is dedicated to the standardization of commonly used imaging filters in radiomic studies. We have created two [jupyter notebooks](https://github.com/MahdiAll99/MEDimage/tree/main/notebooks/ibsi) for each phase of the chapter and made them available for the users to run the IBSI tests for themselves and validate image filtering and image biomarker calculations from filter response maps. The tests can also be explored in interactive Colab notebooks that are directly accessible here:
- ### IBSI Chapter 2
[The IBSI chapter 2](https://theibsi.github.io/ibsi2/) was launched in June 2020 and reached completion in February 2024. It is dedicated to the standardization of commonly used imaging filters in radiomic studies. We have created two [jupyter notebooks](https://github.com/MahdiAll99/MEDimage/tree/main/notebooks/ibsi) for each phase of the chapter and made them available for the users to run the IBSI tests for themselves and validate image filtering and image biomarker calculations from filter response maps. The tests can also be explored in interactive Colab notebooks that are directly accessible here:

- **Phase 1**: [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/MahdiAll99/MEDimage/blob/main/notebooks/ibsi/ibsi2p1.ipynb)
- **Phase 2**: [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/MahdiAll99/MEDimage/blob/main/notebooks/ibsi/ibsi2p2.ipynb)
Expand Down Expand Up @@ -177,4 +176,4 @@ Here's what the license entails:
9. The software author or license can not be held liable for any damages inflicted by the software.
```

More information on about the [LICENSE can be found here](https://github.com/MahdiAll99/MEDimage/blob/main/LICENSE.md)
More information on about the [LICENSE can be found here](https://github.com/MEDomics-UdeS/MEDimage/blob/main/LICENSE.md)
2 changes: 1 addition & 1 deletion docs/Installation.rst
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ Install via pip
---------------
``MEDimage`` is available on PyPi for installation via ``pip`` which allows you to install the package in one step ::

pip install MEDimage
pip install medimage-pkg

Install from source
-------------------
Expand Down
16 changes: 8 additions & 8 deletions docs/tutorials.rst
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,18 @@ Instructions

Download dataset
----------------
In all tutorials, we use an open-access dataset containing medical images for various cancer types (Glioma, sarcoma...)
and different imaging modalities (MR, CT, and PET). The dataset has been pre-processed to adhere to package norms.
In all tutorials, we use open-access data of medical images of various cancer types (Glioma, sarcoma...)
and with different imaging modalities (MR, CT, and PET). All data has been pre-processed to adhere to package norms.

To download the dataset (~3.2 GB) and organize it in your local workspace, run the following command in your terminal from
the package parent folder ::
In order to run the tutorials, you must first download the dataset. We recommend downloading only a subset (~347 MB) instead of the
full dataset (~3.2 GB). To do so, run the following command in your terminal from the package parent folder: ::
python scripts/download_data.py --full-sts
python scripts/download_data.py --subset
.. note::
The dataset is large, and options are available to download only a subset. For more information, run:
python scripts/download_data.py --help
To download the full dataset, simply run the following command in your terminal from the package parent folder: ::

python scripts/download_data.py --full-sts

CSV file
--------
Expand Down
4 changes: 2 additions & 2 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ dependencies:
- nibabel
- pandas<2.0.0
- pillow
- pydicom
- pydicom>1.2.0, <=1.3.0
- pywavelets
- scikit-image
- scipy
Expand All @@ -27,7 +27,7 @@ dependencies:
- numpyencoder
- protobuf
- pycaret
- ray
- ray[default]
- scikit_image
- SimpleITK
- scikit_learn
Expand Down
60 changes: 30 additions & 30 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "medimage-pkg"
version = "0.9.0"
version = "0.9.4"
description = "MEDimage is a Python package for processing and extracting features from medical images"
authors = ["MEDomics Consortium <medomics.info@gmail.com>"]
license = "GPL-3.0"
Expand All @@ -15,38 +15,38 @@ packages = [ {include = "MEDimage"} ]

[tool.poetry.dependencies]
python = ">=3.8.0,<=3.10"
isort = "5.10.1"
ipykernel = "^6.15.1"
ipywidgets = "^8.0.1"
jupyter = "^1.0.0"
matplotlib = "3.7.0"
networkx = "2.8.5"
neuroCombat = "0.2.12"
nibabel = "3.2.2"
nilearn = "0.10.1"
numpy = "1.22.4"
numpyencoder = "0.3.0"
pandas = "1.5.3"
Pillow = "9.2.0"
protobuf = "3.20.*"
pycaret = "3.0.4"
pydicom = "1.2.2"
PyWavelets = "1.1.1"
ray = "2.5.1"
scikit-image = "0.18.2"
scipy = "1.7.0"
setuptools = "62.2.0"
SimpleITK = "2.1.1.2"
scikit_learn = "1.2.2"
seaborn = "0.13.2"
Sphinx = "7.2.6"
numpy = "*"
isort = "*"
ipykernel = "*"
ipywidgets = "*"
jupyter = "*"
matplotlib = "*"
networkx = "*"
neuroCombat = "*"
nibabel = "*"
nilearn = "*"
numpyencoder = "*"
pandas = "<2.0.0"
Pillow = "*"
protobuf = "*"
pycaret = "*"
pydicom = ">1.2.0, <=1.3.0"
PyWavelets = "*"
ray = { version = "*", extras = ["default"] }
scikit_image = "*"
scipy = "*"
setuptools = "*"
SimpleITK = "*"
scikit_learn = "*"
seaborn = "*"
Sphinx = "*"
sphinx-carousel = "1.2.0"
sphinx-jsonschema = "1.19.1"
sphinx-rtd-dark-mode = "1.2.4"
tqdm = "4.65.0"
wget = "3.2"
tabulate = "0.9.0"
xgboost = "1.7.6"
tqdm = "*"
wget = "*"
tabulate = "*"
xgboost = "*"

[tool.poetry.dev-dependencies]

Expand Down
4 changes: 2 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@ pandas<2.0.0
Pillow
protobuf
pycaret
pydicom
pydicom>1.2.0, <=1.3.0
PyWavelets
ray
ray[default]
scikit_image
scipy
setuptools
Expand Down
16 changes: 8 additions & 8 deletions scripts/download_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def main(full_sts: bool, subset: bool) -> None:
print("\n================ Downloading first part of data ================")
try:
wget.download(
"https://sandbox.zenodo.org/record/1106515/files/MEDimage-Dataset-No-STS.zip?download=1",
"https://sandbox.zenodo.org/records/45640/files/MEDimage-Dataset-No-STS.zip?download=1",
out=os.getcwd())
except Exception as e:
print("MEDimage-Dataset-No-STS.zip download failed, error:", e)
Expand Down Expand Up @@ -114,29 +114,29 @@ def main(full_sts: bool, subset: bool) -> None:
print("\n================ Downloading second part of data ================")
try:
wget.download(
"https://sandbox.zenodo.org/record/1106515/files/MEDimage-Dataset-STS-McGill-001-005.zip?download=1",
"https://sandbox.zenodo.org/records/45644/files/MEDimage-STS-Dataset-Subset.zip?download=1",
out=os.getcwd())
pass
except Exception as e:
print("MEDimage-Dataset-STS-McGill-001-005.zip download failed, error:", e)
print("MEDimage-STS-Dataset-Subset.zip download failed, error:", e)

# unzip data
print("\n================ Extracting second part of data ================")
try:
with zipfile.ZipFile(os.getcwd() + "/MEDimage-Dataset-STS-McGill-001-005.zip", 'r') as zip_ref:
with zipfile.ZipFile(os.getcwd() + "/MEDimage-STS-Dataset-Subset.zip", 'r') as zip_ref:
zip_ref.extractall(os.getcwd())
# remove zip file after extraction
os.remove(os.getcwd() + "/MEDimage-Dataset-STS-McGill-001-005.zip")
os.remove(os.getcwd() + "/MEDimage-STS-Dataset-Subset.zip")
except Exception as e:
print("MEDimage-Dataset-STS-McGill-001-005.zip extraction failed, error:", e)
print("MEDimage-STS-Dataset-Subset.zip extraction failed, error:", e)

# organize data in the right folder
print("\n================== Organizing data in folders ==================")
try:
shutil.move(os.getcwd() + "/STS-McGill-001-005",
shutil.move(os.getcwd() + "/MEDimage-STS-Dataset-Subset",
os.getcwd() + "/notebooks" + "/tutorial" + "/data" + "/DICOM-STS")
except Exception as e:
print("Failed to move STS-McGill-001-005 folder, error:", e)
print("Failed to move MEDimage-STS-Dataset-Subset folder, error:", e)


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

setup(
name="MEDimage",
version="0.9.0",
version="0.9.4",
author="MEDomics consortium",
author_email="medomics.info@gmail.com",
description="Python Open-source package for medical images processing and radiomic features extraction",
Expand Down

0 comments on commit 11aef7b

Please sign in to comment.