Skip to content

Commit

Permalink
fix conflicts
Browse files Browse the repository at this point in the history
Former-commit-id: caad95a
  • Loading branch information
skim2257 committed Dec 13, 2021
1 parent 5b990ec commit c207bb5
Show file tree
Hide file tree
Showing 8 changed files with 30 additions and 36 deletions.
10 changes: 5 additions & 5 deletions imgtools/autopipeline.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import os
import os, pathlib
import shutil
import glob
import pickle
Expand Down Expand Up @@ -173,20 +173,20 @@ def process_one_subject(self, subject_id):
metadata[f"metadata_{colname}"] = [read_results[i].get_metadata()]
print(subject_id, " SAVED PET")
#Saving all the metadata in multiple text files
with open(os.path.join(self.output_directory,".temp",f'temp_{subject_id}.pkl'),'wb') as f:
with open(os.path.join(self.output_directory,".temp",f'{subject_id}.pkl'),'wb') as f:
pickle.dump(metadata,f)
return

def save_data(self):
files = glob.glob(os.path.join(self.output_directory,".temp","*.pkl"))
for file in files:
subject_id = ("_").join(file.replace("/","_").replace(".","_").split("_")[-3:-1])
subject_id = file.replace(".","/").split("/")[-2][5:] #From temp_ considers
filename = pathlib.Path(file).name
subject_id = os.path.splitext(filename)[0]
with open(file,"rb") as f:
metadata = pickle.load(f)
self.output_df.loc[subject_id, list(metadata.keys())] = list(metadata.values())
self.output_df.to_csv(self.output_df_path)
shutil.rmtree(os.path.join(self.output_directory,".temp"))
shutil.rmtree(os.path.join(self.output_directory, ".temp"))

def run(self):
"""Execute the pipeline, possibly in parallel.
Expand Down
17 changes: 6 additions & 11 deletions imgtools/io/dataset.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,16 @@
from genericpath import exists
import os
import numpy as np
from typing import List, Sequence, Optional, Callable, Iterable, Dict,Tuple
from tqdm import tqdm

import SimpleITK as sitk
import torchio as tio
import pandas as pd
<<<<<<< HEAD
=======
# from . import file_name_convention
# from ..ops import StructureSetToSegmentation, ImageAutoInput, Resample, BaseOp
>>>>>>> 700fbe6ff316bf36536be5bf6cca67df144096fa

from imgtools.io import file_name_convention
from imgtools.ops import StructureSetToSegmentation, ImageAutoInput, Resample, BaseOp
from tqdm import tqdm
from joblib import Parallel, delayed
import SimpleITK as sitk
import warnings
from imgtools.pipeline import Pipeline
from joblib import Parallel, delayed

class Dataset(tio.SubjectsDataset):
"""
Expand Down Expand Up @@ -49,7 +44,7 @@ def load_from_nrrd(
if not os.path.exists(path_metadata):
raise ValueError("The specified path has no file name {}".format(path_metadata))
df_metadata = pd.read_csv(path_metadata,index_col=0)
output_streams = [("_").join(cols.split("_")[1:]) for cols in df_metadata.columns if cols.split("_")[0]=="folder"]
output_streams = [("_").join(cols.split("_")[1:]) for cols in df_metadata.columns if cols.split("_")[0] == "folder"]
imp_metadata = [cols for cols in df_metadata.columns if cols.split("_")[0] in ("metadata")]
#Ignores multiple connection to single modality
if ignore_multi:
Expand Down
5 changes: 2 additions & 3 deletions imgtools/io/loaders.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import os
import os, pathlib
import glob
import re
from typing import Optional, List
Expand Down Expand Up @@ -272,8 +272,7 @@ def crawl(self, top, n_jobs=1):
database_dict[key] = db[key]

# save one level above imaging folders
parent = os.path.dirname(top)
dataset = top.split("/")[-1]
parent, dataset = os.path.split(top)

# save as json
with open(os.path.join(parent, f'imgtools_{dataset}.json'), 'w') as f:
Expand Down
5 changes: 3 additions & 2 deletions imgtools/modules/datagraph.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import numpy as np
import pandas as pd
from tqdm import tqdm
from pyvis.network import Network


class DataGraph:
Expand Down Expand Up @@ -90,6 +89,7 @@ def visualize_graph(self):
"""
Generates visualization using Pyviz, a wrapper around visJS. The visualization can be found at datanet.html
"""
from pyvis.network import Network
print("Generating visualizations...")
data_net = Network(height='100%', width='100%', bgcolor='#222222', font_color='white')

Expand Down Expand Up @@ -117,7 +117,8 @@ def visualize_graph(self):
node["title"] += "<br>Number of connections: {}".format(len(neigbour_map[node['id']]))
node["value"] = len(neigbour_map[node['id']])

vis_path = os.path.join(("/").join(self.edge_path.split("/")[:-1]),"datanet.html")

vis_path = os.path.join(os.path.basename(self.edge_path),"datanet.html")
data_net.show(vis_path)

def _form_edge_study(self, df, all_study, study_id):
Expand Down
11 changes: 5 additions & 6 deletions imgtools/ops/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,7 @@ def __init__(self,
visualize: bool = False):
self.dir_path = dir_path
self.modalities = modalities
self.dataset_name = self.dir_path.split("/")[-1]
self.parent = os.path.dirname(self.dir_path)
self.parent, self.dataset_name = os.path.split(self.dir_path)

####### CRAWLER ############
# Checks if dataset has already been indexed
Expand All @@ -96,12 +95,12 @@ def __init__(self,
####### GRAPH ##########
# Form the graph
edge_path = os.path.join(self.parent,f"imgtools_{self.dataset_name}_edges.csv")
graph = DataGraph(path_crawl=path_crawl,edge_path=edge_path,visualize=visualize)
graph = DataGraph(path_crawl=path_crawl, edge_path=edge_path, visualize=visualize)
print(f"Forming the graph based on the given modalities: {self.modalities}")
self.df_combined = graph.parser(self.modalities)
self.output_streams = [("_").join(cols.split("_")[1:]) for cols in self.df_combined.columns if cols.split("_")[0]=="folder"]
self.column_names = [cols for cols in self.df_combined.columns if cols.split("_")[0]=="folder"]
self.series_names = [cols for cols in self.df_combined.columns if cols.split("_")[0]=="series"]
self.output_streams = [("_").join(cols.split("_")[1:]) for cols in self.df_combined.columns if cols.split("_")[0] == "folder"]
self.column_names = [cols for cols in self.df_combined.columns if cols.split("_")[0] == "folder"]
self.series_names = [cols for cols in self.df_combined.columns if cols.split("_")[0] == "series"]

print(f"There are {len(self.df_combined)} cases containing all {modalities} modalities.")

Expand Down
7 changes: 5 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from setuptools import setup
from setuptools import setup, find_packages

with open("README.md", "r") as fh:
long_description = fh.read()
Expand All @@ -16,7 +16,10 @@
long_description_content_type="text/markdown",
url="https://github.com/bhklab/med-imagetools",
install_requires=reqs,
packages=setuptools.find_packages(),
packages=find_packages(),
extras_require={
'debug': ['pyvis'],
},
# classifiers=[
# "Programming Language :: Python :: 3",
# "License :: OSI Approved :: MIT License",
Expand Down
3 changes: 2 additions & 1 deletion tests/test_autopipe.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,8 @@ def test_pipeline(dataset_path, modalities):

#Check if the dataset.csv is having the correct number of components and has all the fields
comp_table = pd.read_csv(comp_path)
print(len(comp_table))
print(comp_table)
assert len(comp_table) == 2, "There was some error in making components, check datagraph.parser"

#Check the nrrd files
Expand All @@ -73,7 +75,6 @@ def test_pipeline(dataset_path, modalities):
dicom_ct, _ = nrrd.read(path_ct)
dicom_dose, _ = nrrd.read(path_dose)
dicom_pet, _ = nrrd.read(path_pet)
print(dicom_ct.shape, dicom_dose.shape, dicom_pet.shape)
assert dicom_ct.shape == dicom_dose.shape == dicom_pet.shape
elif modalities == "CT,RTSTRUCT,RTDOSE":
path_ct = os.path.join(output_path_mod, "image", os.listdir(os.path.join(output_path_mod,"image"))[0])
Expand Down
8 changes: 2 additions & 6 deletions tests/test_modalities.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,12 @@
This code is for testing functioning of different modalities
'''

<<<<<<< HEAD

import os
=======
import os
import pathlib
>>>>>>> 700fbe6ff316bf36536be5bf6cca67df144096fa
from posixpath import dirname
import shutil
import warnings
import pathlib
from multiprocessing import cpu_count

import numpy as np
Expand Down Expand Up @@ -66,5 +62,5 @@ def test_modalities(modalities, modalities_path):
make_binary_mask = StructureSetToSegmentation(roi_names=['GTV.?', 'LARYNX'], continuous=False)
mask = make_binary_mask(struc, img)
A = sitk.GetArrayFromImage(mask)
assert len(A.shape)==4
assert len(A.shape) == 4
assert A.shape[0:3] == (img.GetDepth(),img.GetHeight(),img.GetWidth())

0 comments on commit c207bb5

Please sign in to comment.