Skip to content

Commit

Permalink
LorenFrankLab#630 - Reduce varchar approach
Browse files Browse the repository at this point in the history
  • Loading branch information
CBroz1 committed Oct 20, 2023
1 parent 2be4a80 commit bece03d
Show file tree
Hide file tree
Showing 9 changed files with 47 additions and 23 deletions.
4 changes: 3 additions & 1 deletion src/spyglass/common/common_nwbfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,14 +42,16 @@
class Nwbfile(dj.Manual):
definition = """
# Table for holding the NWB files.
nwb_file_name: varchar(255) # name of the NWB file
nwb_file_name: varchar(64) # name of the NWB file
---
nwb_file_abs_path: filepath@raw
INDEX (nwb_file_abs_path)
"""
# NOTE the INDEX above is implicit from filepath@... above but needs to be explicit
# so that alter() can work

# NOTE: No existing entries impacted my varchar reduction from 255 to 64

@classmethod
def insert_from_relative_file_name(cls, nwb_file_name):
"""Insert a new session from an existing NWB file.
Expand Down
6 changes: 2 additions & 4 deletions src/spyglass/figurl_views/SpikeSortingView.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
import datajoint as dj
import kachery_client as kc
import spikeinterface as si
from sortingview.SpikeSortingView import (
SpikeSortingView as SortingViewSpikeSortingView,
)
from sortingview.SpikeSortingView import SpikeSortingView as SortingViewSpikeSortingView

from ..common.common_spikesorting import SpikeSorting, SpikeSortingRecording
from ..spikesorting import SpikeSorting, SpikeSortingRecording
from .prepare_spikesortingview_data import prepare_spikesortingview_data

schema = dj.schema("figurl_view_spike_sorting_recording")
Expand Down
12 changes: 8 additions & 4 deletions src/spyglass/lfp/v1/lfp_artifact.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import datajoint as dj
import numpy as np

from spyglass.common import get_electrode_indices
from spyglass.common.common_interval import IntervalList
from spyglass.lfp.v1.lfp import LFPV1
from spyglass.lfp.v1.lfp_artifact_difference_detection import (
difference_artifact_detector,
)
from spyglass.lfp.v1.lfp_artifact_MAD_detection import mad_artifact_detector
import numpy as np
from spyglass.common import get_electrode_indices

schema = dj.schema("lfp_v1")

Expand Down Expand Up @@ -182,7 +182,7 @@ def make(self, key):
key["target_interval_list_name"],
"LFP",
key["artifact_params_name"],
"artifact_removed_valid_times",
# "artifact_removed_valid_times",
]
),
)
Expand All @@ -204,9 +204,13 @@ class LFPArtifactRemovedIntervalList(dj.Manual):
definition = """
# Stores intervals without detected artifacts. Entries can come from either
# ArtifactDetection() or alternative artifact removal analyses.
artifact_removed_interval_list_name: varchar(200)
artifact_removed_interval_list_name: varchar(128)
---
-> LFPArtifactDetectionSelection
artifact_removed_valid_times: longblob
artifact_times: longblob # np.array of artifact intervals
"""

# NOTE: 200 existing enties over this new limit.
# Existing names could be significantly cut by reducing redundancy.
# Removing final string above from existing entries means all below new 128
2 changes: 2 additions & 0 deletions src/spyglass/lock/file_lock.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@

schema = dj.schema("file_lock")

from ..common.common_nwbfile import AnalysisNwbfile, Nwbfile


@schema
class NwbfileLock(dj.Manual):
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/position_linearization/v1/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from spyglass.common.common_position import NodePicker
from spyglass.position_linearization.v1.linearization import (
LinearizationParameters,
LinearizationSelection,
LinearizedPositionV1,
NodePicker,
TrackGraph,
)
6 changes: 5 additions & 1 deletion src/spyglass/spikesorting/spikesorting_artifact.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,13 +140,17 @@ class ArtifactRemovedIntervalList(dj.Manual):
definition = """
# Stores intervals without detected artifacts.
# Note that entries can come from either ArtifactDetection() or alternative artifact removal analyses.
artifact_removed_interval_list_name: varchar(200)
artifact_removed_interval_list_name: varchar(180)
---
-> ArtifactDetectionSelection
artifact_removed_valid_times: longblob
artifact_times: longblob # np array of artifact intervals
"""

# NOTE: current max is 165
# Current entries are very messy concatenation that look like pks elsewhere
# Why name a list with 165 chars? When you could fk ref the data itself?


def _get_artifact_times(
recording: si.BaseRecording,
Expand Down
9 changes: 7 additions & 2 deletions src/spyglass/spikesorting/spikesorting_curation.py
Original file line number Diff line number Diff line change
Expand Up @@ -385,10 +385,13 @@ def _get_waveform_extractor_name(self, key):
class MetricParameters(dj.Manual):
definition = """
# Parameters for computing quality metrics of sorted units
metric_params_name: varchar(200)
metric_params_name: varchar(64)
---
metric_params: blob
"""

# NOTE: No existing entries impacted by this change

metric_default_params = {
"snr": {
"peak_sign": "neg",
Expand Down Expand Up @@ -645,12 +648,14 @@ def _get_num_spikes(
@schema
class AutomaticCurationParameters(dj.Manual):
definition = """
auto_curation_params_name: varchar(200) # name of this parameter set
auto_curation_params_name: varchar(36) # name of this parameter set
---
merge_params: blob # dictionary of params to merge units
label_params: blob # dictionary params to label units
"""

# NOTE: No existing entries impacted by this change

def insert1(self, key, **kwargs):
# validate the labels and then insert
# TODO: add validation for merge_params
Expand Down
12 changes: 9 additions & 3 deletions src/spyglass/spikesorting/spikesorting_recording.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
from ..common.common_lab import LabTeam # noqa: F401
from ..common.common_nwbfile import Nwbfile
from ..common.common_session import Session # noqa: F401
from ..utils.dj_helper_fn import dj_replace
from ..settings import recording_dir
from ..utils.dj_helper_fn import dj_replace

schema = dj.schema("spikesorting_recording")

Expand Down Expand Up @@ -321,19 +321,23 @@ def get_geometry(self, sort_group_id, nwb_file_name):
class SortInterval(dj.Manual):
definition = """
-> Session
sort_interval_name: varchar(200) # name for this interval
sort_interval_name: varchar(64) # name for this interval
---
sort_interval: longblob # 1D numpy array with start and end time for a single interval to be used for spike sorting
"""
# NOTE: Reduced key less than 2 existing entries
# All existing entries are below 69


@schema
class SpikeSortingPreprocessingParameters(dj.Manual):
definition = """
preproc_params_name: varchar(200)
preproc_params_name: varchar(32)
---
preproc_params: blob
"""
# NOTE: Reduced key less than 2 existing entries
# All existing entries are below 48

def insert_default(self):
# set up the default filter parameters
Expand Down Expand Up @@ -365,6 +369,8 @@ class SpikeSortingRecordingSelection(dj.Manual):
-> IntervalList
"""

# NOTE: Too make pks?


@schema
class SpikeSortingRecording(dj.Computed):
Expand Down
17 changes: 10 additions & 7 deletions src/spyglass/spikesorting/spikesorting_sorting.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

from ..common.common_lab import LabMember, LabTeam
from ..common.common_nwbfile import AnalysisNwbfile
from ..settings import temp_dir, sorting_dir
from ..settings import sorting_dir, temp_dir
from .spikesorting_artifact import ArtifactRemovedIntervalList
from .spikesorting_recording import (
SpikeSortingRecording,
Expand All @@ -27,12 +27,14 @@
@schema
class SpikeSorterParameters(dj.Manual):
definition = """
sorter: varchar(200)
sorter_params_name: varchar(200)
sorter: varchar(32)
sorter_params_name: varchar(64)
---
sorter_params: blob
"""

# NOTE no existing entries impacted by this change

def insert_default(self):
"""Default params from spike sorters available via spikeinterface"""
sorters = sis.available_sorters()
Expand Down Expand Up @@ -236,10 +238,11 @@ def make(self, key: dict):
self.insert1(key)

def delete(self):
"""Extends the delete method of base class to implement permission checking.
Note that this is NOT a security feature, as anyone that has access to source code
can disable it; it just makes it less likely to accidentally delete entries.
"""
"""Extends the delete method of base class to implement permission
checking. Note that this is NOT a security feature, as anyone that has
access to source code can disable it; it just makes it less likely to
accidentally delete entries."""

current_user_name = dj.config["database.user"]
entries = self.fetch()
permission_bool = np.zeros((len(entries),))
Expand Down

0 comments on commit bece03d

Please sign in to comment.