Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
53 commits
Select commit Hold shift + click to select a range
4f02ef9
Merge branch 'main' of https://github.com/AllenNeuralDynamics/aind-me…
rcpeene Sep 20, 2024
f4438af
Merge branch 'main' of https://github.com/AllenNeuralDynamics/aind-me…
rcpeene Sep 25, 2024
3094bf8
Merge branch 'dev' of https://github.com/AllenNeuralDynamics/aind-met…
rcpeene Sep 25, 2024
6443873
Merge branch 'main' of https://github.com/AllenNeuralDynamics/aind-me…
rcpeene Oct 31, 2024
261d5ff
add openephys tag to models
rcpeene Jan 29, 2025
4986a64
fix linting
rcpeene Jan 29, 2025
97c4edc
feat: adds camstim to list of etl jobs
jtyoung84 Jan 29, 2025
0631609
docs: runs linters
jtyoung84 Jan 29, 2025
096c4e1
build: bounds version of hdmf
jtyoung84 Jan 30, 2025
0307371
Accomodate behavior pkl files in metadata mapper. Readd optotagging s…
rcpeene Jan 31, 2025
9f9b952
Merge branch 'openephys_patch' of https://github.com/AllenNeuralDynam…
rcpeene Jan 31, 2025
ed5435c
merge in commented lims code (might need it later)
rcpeene Jan 31, 2025
44c42d2
merge old code into openephys_patch (may remove later)
rcpeene Jan 31, 2025
78b0d3a
passive block code
Ahad-Allen Apr 9, 2025
aa8404c
set primary target structure to root by default
rcpeene Apr 9, 2025
b5ee974
Merge branch 'main' of https://github.com/AllenNeuralDynamics/aind-me…
rcpeene Apr 9, 2025
f56de3b
Merge branch 'enhancement/passive_ophys_behavior_block' of https://gi…
rcpeene Apr 9, 2025
b12bb5a
merge ahads camstim changes
rcpeene Apr 9, 2025
c9d4bb4
revise stim epochs extraction to; ignore spontaneous times, ignore em…
rcpeene Apr 11, 2025
fd689bd
further correct/reduce stim epochs information
rcpeene Apr 16, 2025
d6175a4
Remove spontaneous occurances during short gray screens at the end
Ahad-Allen Apr 17, 2025
decb9eb
Split pos into pos x pos y
Ahad-Allen Apr 18, 2025
4726357
fix 'Pos' stim table column splitting into pos_x and pos_y
rcpeene Apr 18, 2025
f13b7f6
Remove prints
Ahad-Allen Apr 23, 2025
cd8b53a
remove prints
rcpeene Apr 23, 2025
53e9dc2
lint and document.
rcpeene Apr 23, 2025
16f610d
remove blank line
rcpeene Apr 23, 2025
ec25bcb
Add docstring for remove_short
Ahad-Allen Apr 24, 2025
ab6992a
Remove unused print
Ahad-Allen Apr 24, 2025
ee01f7f
linting behavior_utils.py
Ahad-Allen Apr 24, 2025
3d64f4b
lint, refactor extract_stim_epochs to satisfy linter
rcpeene Apr 24, 2025
adee3e6
replace usage of gratings metadata
Ahad-Allen May 2, 2025
5835caa
delete unneeded pdb
Ahad-Allen May 3, 2025
1fbb2a9
delete unneeded columns
Ahad-Allen May 3, 2025
32d6032
linting
Ahad-Allen May 3, 2025
bd47ec2
simplifying
Ahad-Allen May 3, 2025
f062333
doc strings
Ahad-Allen May 3, 2025
e421625
remove start and end frame columns
Ahad-Allen May 7, 2025
62f6c81
typo/linging
Ahad-Allen May 22, 2025
0ea9607
pins wavpack-numcodecs
mekhlakapoor May 22, 2025
98eab9e
updating tests to remove gratings
Ahad-Allen May 22, 2025
203f865
specify fingerprint
Ahad-Allen May 22, 2025
fb7fb07
fixing test cases
Ahad-Allen May 22, 2025
78a9701
remove print
Ahad-Allen May 22, 2025
ba10fbd
Merge branch 'dev' of github.com:AllenNeuralDynamics/aind-metadata-ma…
mekhlakapoor May 28, 2025
e779c97
Merge branch 'dev' into openephys_patch
mekhlakapoor May 28, 2025
be25ded
only import some methods from npc_ephys
rcpeene May 28, 2025
ffe119d
linter fix
mekhlakapoor May 28, 2025
17fbae5
pins numcodecs
mekhlakapoor May 28, 2025
241d313
fix misleading optotaggin default conditions
rcpeene May 29, 2025
487ef39
merge
rcpeene May 29, 2025
0f9ecb2
fix openephys_patch test
rcpeene May 30, 2025
71f1814
lint
rcpeene May 30, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -76,12 +76,14 @@ mesoscope = [
openephys = [
"aind-metadata-mapper[schema]",
"h5py >= 3.11.0",
"hdmf < 4.0.0",
"npc_ephys >= 0.1.18",
"scipy >= 1.11.0",
"pandas >= 2.2.2",
"numpy >= 1.26.4",
"npc_mvr >= 0.1.6",
"wavpack-numcodecs==0.2.2"
"wavpack-numcodecs<0.2.2",
"numcodecs<0.16.0",
]

dynamicrouting = [
Expand Down
17 changes: 16 additions & 1 deletion src/aind_metadata_mapper/gather_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,17 @@
JobSettings as FipSessionJobSettings,
)
from aind_metadata_mapper.fip.session import FIBEtl
from aind_metadata_mapper.mesoscope.models import (
JobSettings as MesoscopeSessionJobSettings,
)
from aind_metadata_mapper.mesoscope.session import MesoscopeEtl
from aind_metadata_mapper.models import JobSettings
from aind_metadata_mapper.open_ephys.camstim_ephys_session import (
CamstimEphysSessionEtl,
)
from aind_metadata_mapper.open_ephys.models import (
JobSettings as OpenEphysJobSettings,
)
from aind_metadata_mapper.smartspim.acquisition import SmartspimETL


Expand Down Expand Up @@ -284,8 +293,14 @@ def get_session_metadata(self) -> Optional[dict]:
session_job = MRIEtl(job_settings=session_settings)
elif isinstance(session_settings, FipSessionJobSettings):
session_job = FIBEtl(job_settings=session_settings)
else:
elif isinstance(session_settings, MesoscopeSessionJobSettings):
session_job = MesoscopeEtl(job_settings=session_settings)
elif isinstance(session_settings, OpenEphysJobSettings):
session_job = CamstimEphysSessionEtl(
job_settings=session_settings
)
else:
raise ValueError("Unknown session job settings class!")
job_response = session_job.run_job()
if job_response.status_code != 500:
return json.loads(job_response.data)
Expand Down
4 changes: 4 additions & 0 deletions src/aind_metadata_mapper/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,9 @@
from aind_metadata_mapper.mesoscope.models import (
JobSettings as MesoscopeSessionJobSettings,
)
from aind_metadata_mapper.open_ephys.models import (
JobSettings as OpenEphysJobSettings,
)
from aind_metadata_mapper.smartspim.models import (
JobSettings as SmartSpimAcquisitionJobSettings,
)
Expand All @@ -35,6 +38,7 @@ class SessionSettings(BaseSettings, extra="allow"):
BrukerSessionJobSettings,
FipSessionJobSettings,
MesoscopeSessionJobSettings,
OpenEphysJobSettings,
],
Field(discriminator="job_settings_name"),
]
Expand Down
140 changes: 106 additions & 34 deletions src/aind_metadata_mapper/open_ephys/camstim_ephys_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
from pathlib import Path
from typing import Union

import npc_ephys
import npc_mvr
import numpy as np
import pandas as pd
Expand All @@ -25,7 +24,13 @@
VisualStimulation,
)
from aind_data_schema_models.modalities import Modality
from npc_ephys import (
get_ephys_timing_on_sync,
get_newscale_coordinates,
get_single_oebin_path,
)

import aind_metadata_mapper.open_ephys.utils.pkl_utils as pkl
import aind_metadata_mapper.open_ephys.utils.sync_utils as sync
import aind_metadata_mapper.stimulus.camstim
from aind_metadata_mapper.core import GenericEtl
Expand All @@ -46,9 +51,7 @@ class CamstimEphysSessionEtl(
session_path: Path
recording_dir: Path

def __init__(
self, session_id: str, job_settings: Union[JobSettings, str, dict]
) -> None:
def __init__(self, job_settings: Union[JobSettings, str, dict]) -> None:
"""
Determine needed input filepaths from np-exp and lims, get session
start and end times from sync file, write stim tables and extract
Expand All @@ -67,48 +70,69 @@ def __init__(
job_settings_model = job_settings
GenericEtl.__init__(self, job_settings=job_settings_model)

sessions_root = Path(self.job_settings.sessions_root)
self.folder = self.get_folder(session_id, sessions_root)
self.session_path = self.get_session_path(session_id, sessions_root)
self.recording_dir = npc_ephys.get_single_oebin_path(
self.session_path
).parent
# sessions_root = Path(self.job_settings.sessions_root)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

if this code isnt being used, you can probably just remove it

# self.folder_name = self.get_folder(session_id, sessions_root)
# self.session_path = self.get_session_path(session_id, sessions_root)
self.session_path = job_settings.input_source
self.folder_name = self.session_path.name
self.output_dir = job_settings.output_directory
# sometimes data files are deleted on npexp so try files on lims
# try:
# self.recording_dir = get_single_oebin_path(
# session_inst.lims_path
# ).parent
# except:
self.recording_dir = get_single_oebin_path(self.session_path).parent

self.motor_locs_path = (
self.session_path / f"{self.folder}.motor-locs.csv"
self.session_path / f"{self.folder_name}.motor-locs.csv"
)
self.pkl_path = self.session_path / f"{self.folder}.stim.pkl"
self.opto_pkl_path = self.session_path / f"{self.folder}.opto.pkl"
self.pkl_path = self.session_path / f"{self.folder_name}.stim.pkl"
if not self.pkl_path.exists():
self.pkl_path = (
self.session_path / f"{self.folder_name}.behavior.pkl"
)
logger.debug("Using pickle:", self.pkl_path)
self.pkl_data = pkl.load_pkl(self.pkl_path)
self.fps = pkl.get_fps(self.pkl_data)

self.opto_pkl_path = self.session_path / f"{self.folder_name}.opto.pkl"
self.opto_table_path = (
self.session_path / f"{self.folder}_opto_epochs.csv"
self.session_path / f"{self.folder_name}_opto_epochs.csv"
)
self.opto_conditions_map = job_settings.opto_conditions_map
self.stim_table_path = (
self.session_path / f"{self.folder}_stim_epochs.csv"
self.session_path / f"{self.folder_name}_stim_epochs.csv"
)
self.sync_path = self.session_path / f"{self.folder}.sync"
self.sync_path = self.session_path / f"{self.folder_name}.sync"

platform_path = next(
self.session_path.glob(f"{self.folder}_platform*.json")
self.session_path.glob(f"{self.folder_name}_platform*.json")
)
self.platform_json = json.loads(platform_path.read_text())
self.project_name = self.platform_json["project"]

sync_data = sync.load_sync(self.sync_path)
self.session_start = sync.get_start_time(sync_data)
self.session_end = sync.get_stop_time(sync_data)
self.sync_data = sync.load_sync(self.sync_path)
self.session_start = sync.get_start_time(self.sync_data)
self.session_end = sync.get_stop_time(self.sync_data)
logger.debug(
f"session start: {self.session_start} \n"
f" session end: {self.session_end}"
)

self.session_uuid = self.get_session_uuid()
self.mtrain_regimen = self.get_mtrain()
self.mtrain_server = job_settings.mtrain_server
self.stage_name = pkl.get_stage(self.pkl_data)
self.behavior = self._is_behavior()

if not self.stim_table_path.exists() or (
self.job_settings.overwrite_tables
):
logger.debug("building stim table")
self.build_stimulus_table()
if self.behavior:
self.build_behavior_table()
else:
self.build_stimulus_table()
if self.opto_pkl_path.exists() and (
not self.opto_table_path.exists()
or self.job_settings.overwrite_tables
Expand All @@ -127,7 +151,7 @@ def run_job(self):
"""Transforms all metadata for the session into relevant files"""
self._extract()
self._transform()
return self._load(self.session_json, self.session_path)
return self._load(self.session_json, self.output_dir)

def _extract(self):
"""TODO: refactor a lot of the __init__ code here"""
Expand All @@ -146,7 +170,7 @@ def _transform(self) -> Session:
session_type=self.job_settings.session_type,
iacuc_protocol=self.job_settings.iacuc_protocol,
rig_id=self.platform_json["rig_id"],
subject_id=self.folder.split("_")[1],
subject_id=self.folder_name.split("_")[1],
data_streams=self.data_streams(),
stimulus_epochs=self.stim_epochs,
mouse_platform_name=self.job_settings.mouse_platform_name,
Expand Down Expand Up @@ -235,9 +259,7 @@ def ephys_modules(self) -> list:
"""
Return list of schema ephys modules for each available probe.
"""
newscale_coords = npc_ephys.get_newscale_coordinates(
self.motor_locs_path
)
newscale_coords = get_newscale_coordinates(self.motor_locs_path)

ephys_modules = []
for probe_letter in self.available_probes:
Expand All @@ -251,7 +273,7 @@ def ephys_modules(self) -> list:
arc_angle=0.0,
module_angle=0.0,
rotation_angle=0.0,
primary_targeted_structure="none",
primary_targeted_structure="root",
manipulator_coordinates=manipulator_coordinates,
notes=notes,
)
Expand All @@ -265,7 +287,7 @@ def ephys_stream(self) -> Stream:
"""
probe_exp = r"(?<=[pP{1}]robe)[-_\s]*(?P<letter>[A-F]{1})(?![a-zA-Z])"

times = npc_ephys.get_ephys_timing_on_sync(
times = get_ephys_timing_on_sync(
sync=self.sync_path, recording_dirs=[self.recording_dir]
)

Expand Down Expand Up @@ -336,10 +358,62 @@ def data_streams(self) -> tuple[Stream, ...]:
"""
data_streams = []
data_streams.append(self.ephys_stream())
data_streams.append(self.sync_stream())
data_streams.append(self.video_stream())
# data_streams.append(self.sync_stream())
# data_streams.append(self.video_stream())
return tuple(data_streams)

def build_optogenetics_table(self):
"""
Builds an optogenetics table from the opto pickle file and sync file.
Writes the table to a csv file.
Parameters
----------
output_opto_table_path : str
Path to write the optogenetics table to.
returns
-------
dict
Dictionary containing the path to the output opto table
"""
opto_file = pkl.load_pkl(self.opto_pkl_path)
sync_file = sync.load_sync(self.sync_path)
start_times = sync.extract_led_times(
sync_file, self.opto_conditions_map
)
condition_nums = [str(item) for item in opto_file["opto_conditions"]]
levels = opto_file["opto_levels"]
assert len(condition_nums) == len(levels)
if len(start_times) > len(condition_nums):
raise ValueError(
f"there are {len(start_times) - len(condition_nums)} extra "
f"optotagging sync times!"
)
optotagging_table = pd.DataFrame(
{
"start_time": start_times,
"condition_num": condition_nums,
"level": levels,
}
)
optotagging_table = optotagging_table.sort_values(
by="start_time", axis=0
)
stop_times = []
conditions = []
names = []
for _, row in optotagging_table.iterrows():
condition = self.opto_conditions_map[row["condition_num"]]
stop_times.append(row["start_time"] + condition["duration"])
conditions.append(condition["condition"])
names.append(condition["name"])
optotagging_table["stop_time"] = stop_times
optotagging_table["condition"] = conditions
optotagging_table["name"] = names
optotagging_table["duration"] = (
optotagging_table["stop_time"] - optotagging_table["start_time"]
)
optotagging_table.to_csv(self.opto_table_path, index=False)

def epoch_from_opto_table(self) -> StimulusEpoch:
"""
From the optogenetic stimulation table, returns a single schema
Expand All @@ -348,11 +422,9 @@ def epoch_from_opto_table(self) -> StimulusEpoch:
parameters, and include the set of all of that column's values as the
parameter values.
"""

script_obj = Software(
name=self.mtrain_regimen["name"],
name=self.stage_name,
version="1.0",
url=self.mtrain_regimen,
)

opto_table = pd.read_csv(self.opto_table_path)
Expand Down
Loading