Skip to content

Commit

Permalink
Tidy ups after rebase onto new testing infrastructure.
Browse files Browse the repository at this point in the history
  • Loading branch information
JoeZiminski committed Dec 14, 2023
1 parent 3311175 commit fe3db33
Show file tree
Hide file tree
Showing 5 changed files with 3 additions and 113 deletions.
7 changes: 3 additions & 4 deletions spikewrap/pipeline/load_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,7 @@
import numpy as np
import spikeinterface.extractors as se
import spikeinterface.preprocessing as spre
<<<<<<< HEAD
from spikeinterface import load_extractor
=======
>>>>>>> 65b07d1... Adding tests and playing around with casting to float64 on load.

from spikewrap.data_classes.preprocessing import PreprocessingData
from spikewrap.utils import utils
Expand Down Expand Up @@ -126,7 +123,9 @@ def _load_spikeinterface(preprocess_data): # TODO: does not handle sync

orig_dtype = recording.dtype

recording = spre.astype(recording, np.float64) # TODO: centralise this, also think if it can be handled better.
recording = spre.astype(
recording, np.float64
) # TODO: centralise this, also think if it can be handled better.

preprocess_data.set_orig_dtype(orig_dtype) # TODO: move this out of the loop.

Expand Down
36 changes: 0 additions & 36 deletions spikewrap/pipeline/preprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,42 +338,6 @@ def preprocess_everything_by_shank(
# On the last round this will not be used.
split_recording = this_step_preprocessed_per_shank

# Mutliple aggregations mess everything up. Do not do. Docunmentation this.

if False:
(
pp_name,
pp_options,
last_pp_step_output,
new_name,
) = _get_preprocessing_step_information(
pp_info, preprocess_data, ses_name, run_name, step_num
)

# TODO: cannot repeatly apply aggregate channels.

if preprocess_per_shank:
if int(step_num) == 1:
split_recording = last_pp_step_output.split_by("group") # TODO: fix.
else:
split_recording = {
idx: rec for idx, rec in enumerate(last_pp_step_output)
}

if len(split_recording) == 1:
raise ValueError(
"`preprocess_per_shank` is set to `True` but this"
"recording only contains 1 shank. Are you sure this"
"is the correct recording?"
)

preprocessed_per_shank = []
for rec in split_recording.values():
preprocessed_per_shank.append(pp_funcs[pp_name](rec, **pp_options))

if int(step_num) == num_steps:
aggregate_channels(preprocessed_per_shank)


def _perform_preprocessing_step(
step_num: str,
Expand Down
73 changes: 0 additions & 73 deletions tests/test_integration/playing.py

This file was deleted.

Binary file removed tests/test_integration/test.npy
Binary file not shown.
Binary file removed tests/test_integration/test_demeaned.npy
Binary file not shown.

0 comments on commit fe3db33

Please sign in to comment.