diff --git a/ibllib/io/extractors/video_motion.py b/ibllib/io/extractors/video_motion.py index 929f18b88..144305125 100644 --- a/ibllib/io/extractors/video_motion.py +++ b/ibllib/io/extractors/video_motion.py @@ -111,10 +111,12 @@ def load_data(self, download=False): self.data.camera_times = {vidio.label_from_path(url): ts for ts, url in zip(cam.data, cam.url)} else: alf_path = self.session_path / 'alf' - self.data.wheel = alfio.load_object(alf_path, 'wheel', short_keys=True) - self.data.trials = alfio.load_object(alf_path, 'trials') + wheel_path = next(alf_path.rglob('*wheel.timestamps*')).parent + self.data.wheel = alfio.load_object(wheel_path, 'wheel', short_keys=True) + trials_path = next(alf_path.rglob('*trials.table*')).parent + self.data.trials = alfio.load_object(trials_path, 'trials') self.data.camera_times = {vidio.label_from_path(x): alfio.load_file_content(x) for x in - alf_path.glob('*Camera.times*')} + alf_path.rglob('*Camera.times*')} assert all(x is not None for x in self.data.values()) def _set_eid_or_path(self, session_path_or_eid): @@ -428,14 +430,16 @@ def fix_keys(alf_object): return ob alf_path = self.session_path.joinpath('alf') - wheel = (fix_keys(alfio.load_object(alf_path, 'wheel')) if location == 'SDSC' else alfio.load_object(alf_path, 'wheel')) + wheel_path = next(alf_path.rglob('*wheel.timestamps*')).parent + wheel = (fix_keys(alfio.load_object(wheel_path, 'wheel')) if location == 'SDSC' + else alfio.load_object(wheel_path, 'wheel')) self.wheel_timestamps = wheel.timestamps # Compute interpolated wheel position and wheel times wheel_pos, self.wheel_time = wh.interpolate_position(wheel.timestamps, wheel.position, freq=1000) # Compute wheel velocity self.wheel_vel, _ = wh.velocity_filtered(wheel_pos, 1000) # Load in original camera times - self.camera_times = alfio.load_file_content(next(alf_path.glob(f'_ibl_{self.label}Camera.times*.npy'))) + self.camera_times = alfio.load_file_content(next(alf_path.rglob(f'_ibl_{self.label}Camera.times*.npy'))) self.camera_path = str(next(self.session_path.joinpath('raw_video_data').glob(f'_iblrig_{self.label}Camera.raw*.mp4'))) self.camera_meta = vidio.get_video_meta(self.camera_path) @@ -473,8 +477,8 @@ def fix_keys(alf_object): # We attempt to load in some behavior data (trials and dlc). This is only needed for the summary plots, having # trial aligned paw velocity (from the dlc) is a nice sanity check to make sure the alignment went well try: - self.trials = alfio.load_file_content(next(alf_path.glob('_ibl_trials.table*.pqt'))) - self.dlc = alfio.load_file_content(next(alf_path.glob(f'_ibl_{self.label}Camera.dlc*.pqt'))) + self.trials = alfio.load_file_content(next(alf_path.rglob('_ibl_trials.table*.pqt'))) + self.dlc = alfio.load_file_content(next(alf_path.rglob(f'_ibl_{self.label}Camera.dlc*.pqt'))) self.dlc = likelihood_threshold(self.dlc) self.behavior = True except (ALFObjectNotFound, StopIteration): diff --git a/ibllib/oneibl/registration.py b/ibllib/oneibl/registration.py index fc6a208b8..9f4ef4791 100644 --- a/ibllib/oneibl/registration.py +++ b/ibllib/oneibl/registration.py @@ -5,6 +5,8 @@ import itertools from packaging import version +from requests import HTTPError + from one.alf.files import get_session_path, folder_parts, get_alf_path from one.registration import RegistrationClient, get_dataset_type from one.remote.globus import get_local_endpoint_id, get_lab_from_endpoint_id @@ -81,17 +83,29 @@ def register_dataset(file_list, one=None, exists=False, versions=None, **kwargs) client = IBLRegistrationClient(one) # Check for protected datasets + def _get_protected(pr_status): + if isinstance(protected_status, list): + pr = any(d['status_code'] == 403 for d in pr_status) + else: + pr = protected_status['status_code'] == 403 + + return pr + # Account for cases where we are connected to cortex lab database if one.alyx.base_url == 'https://alyx.cortexlab.net': - protected_status = IBLRegistrationClient( - ONE(base_url='https://alyx.internationalbrainlab.org', mode='remote')).check_protected_files(file_list) + try: + protected_status = IBLRegistrationClient( + ONE(base_url='https://alyx.internationalbrainlab.org', mode='remote')).check_protected_files(file_list) + protected = _get_protected(protected_status) + except HTTPError as err: + if "[Errno 500] /check-protected: 'A base session for" in str(err): + # If we get an error due to the session not existing, we take this to mean no datasets are protected + protected = False + else: + raise err else: protected_status = client.check_protected_files(file_list) - - if isinstance(protected_status, list): - protected = any(d['status_code'] == 403 for d in protected_status) - else: - protected = protected_status['status_code'] == 403 + protected = _get_protected(protected_status) # If we find a protected dataset, and we don't have a force=True flag, raise an error if protected and not kwargs.pop('force', False): diff --git a/ibllib/pipes/tasks.py b/ibllib/pipes/tasks.py index d14b20661..287dd530a 100644 --- a/ibllib/pipes/tasks.py +++ b/ibllib/pipes/tasks.py @@ -387,6 +387,13 @@ def assert_expected(self, expected_files, silent=False): files = [] for expected_file in expected_files: actual_files = list(Path(self.session_path).rglob(str(Path(*filter(None, reversed(expected_file[:2])))))) + # Account for revisions + if len(actual_files) == 0: + collection = expected_file[1] + '/#*' if expected_file[1] != '' else expected_file[1] + '#*' + expected_revision = (expected_file[0], collection, expected_file[2]) + actual_files = list( + Path(self.session_path).rglob(str(Path(*filter(None, reversed(expected_revision[:2])))))) + if len(actual_files) == 0 and expected_file[2]: everything_is_fine = False if not silent: diff --git a/ibllib/pipes/video_tasks.py b/ibllib/pipes/video_tasks.py index d9a312808..86cb49d33 100644 --- a/ibllib/pipes/video_tasks.py +++ b/ibllib/pipes/video_tasks.py @@ -506,7 +506,7 @@ def _run(self, overwrite=True, run_qc=True, plot_qc=True): if exist and overwrite: _logger.warning('EphysPostDLC outputs exist and overwrite=True, overwriting existing outputs.') # Find all available DLC files - dlc_files = list(Path(self.session_path).joinpath('alf').glob('_ibl_*Camera.dlc.*')) + dlc_files = list(Path(self.session_path).joinpath('alf').rglob('_ibl_*Camera.dlc.*')) for dlc_file in dlc_files: _logger.debug(dlc_file) output_files = [] @@ -521,7 +521,7 @@ def _run(self, overwrite=True, run_qc=True, plot_qc=True): dlc_thresh = likelihood_threshold(dlc, 0.9) # try to load respective camera times try: - dlc_t = np.load(next(Path(self.session_path).joinpath('alf').glob(f'_ibl_{cam}Camera.times.*npy'))) + dlc_t = np.load(next(Path(self.session_path).joinpath('alf').rglob(f'_ibl_{cam}Camera.times.*npy'))) times = True if dlc_t.shape[0] == 0: _logger.error(f'camera.times empty for {cam} camera. ' diff --git a/ibllib/plots/figures.py b/ibllib/plots/figures.py index e0b027537..f51666773 100644 --- a/ibllib/plots/figures.py +++ b/ibllib/plots/figures.py @@ -783,19 +783,15 @@ def dlc_qc_plot(session_path, one=None, device_collection='raw_video_data', assert any(data[f'{cam}_times'] is not None for cam in cameras), "No camera times data could be loaded, aborting." # Load session level data - for alf_object in ['trials', 'wheel', 'licks']: + for alf_object, collection in zip(['trials', 'wheel', 'licks'], [trials_collection, trials_collection, 'alf']): try: - if alf_object == 'licks': - data[f'{alf_object}'] = alfio.load_object(session_path.joinpath('alf'), - alf_object) # load locally - else: - data[f'{alf_object}'] = alfio.load_object(session_path.joinpath(trials_collection), alf_object) # load locally + data[f'{alf_object}'] = alfio.load_object(session_path.joinpath(collection), alf_object) # load locally continue except ALFObjectNotFound: pass try: # then try from alyx - data[f'{alf_object}'] = one.load_object(one.path2eid(session_path), alf_object, collection=trials_collection) + data[f'{alf_object}'] = one.load_object(one.path2eid(session_path), alf_object, collection=collection) except ALFObjectNotFound: logger.warning(f"Could not load {alf_object} object, some plots have to be skipped.") data[f'{alf_object}'] = None diff --git a/ibllib/qc/camera.py b/ibllib/qc/camera.py index 61383deda..4828b014e 100644 --- a/ibllib/qc/camera.py +++ b/ibllib/qc/camera.py @@ -1137,8 +1137,9 @@ def load_data(self, download_data: bool = None, alf_path = self.session_path / 'alf' try: assert not extract_times + cam_path = next(alf_path.rglob(f'*{self.label}Camera.times*')).parent self.data['timestamps'] = alfio.load_object( - alf_path, f'{self.label}Camera', short_keys=True)['times'] + cam_path, f'{self.label}Camera', short_keys=True)['times'] except AssertionError: # Re-extract kwargs = dict(video_path=self.video_path, labels=self.label) if self.sync == 'bpod': @@ -1154,8 +1155,8 @@ def load_data(self, download_data: bool = None, wheel_keys = ('timestamps', 'position') try: # glob in case wheel data are in sub-collections - alf_path = next(alf_path.rglob('*wheel.timestamps*')).parent - self.data['wheel'] = alfio.load_object(alf_path, 'wheel', short_keys=True) + wheel_path = next(alf_path.rglob('*wheel.timestamps*')).parent + self.data['wheel'] = alfio.load_object(wheel_path, 'wheel', short_keys=True) except ALFObjectNotFound: # Extract from raw data if self.sync != 'bpod': diff --git a/ibllib/qc/dlc.py b/ibllib/qc/dlc.py index 460170e5f..0a00aaadf 100644 --- a/ibllib/qc/dlc.py +++ b/ibllib/qc/dlc.py @@ -93,9 +93,11 @@ def load_data(self, download_data: bool = None) -> None: alf_path = self.session_path / 'alf' # Load times - self.data['camera_times'] = alfio.load_object(alf_path, f'{self.side}Camera')['times'] + cam_path = next(alf_path.rglob(f'*{self.side}Camera.times*')).parent + self.data['camera_times'] = alfio.load_object(cam_path, f'{self.side}Camera')['times'] # Load dlc traces - dlc_df = alfio.load_object(alf_path, f'{self.side}Camera', namespace='ibl')['dlc'] + dlc_path = next(alf_path.rglob(f'*{self.side}Camera.dlc*')).parent + dlc_df = alfio.load_object(dlc_path, f'{self.side}Camera', namespace='ibl')['dlc'] targets = np.unique(['_'.join(col.split('_')[:-1]) for col in dlc_df.columns]) # Set values to nan if likelihood is too low dlc_coords = {} @@ -106,11 +108,13 @@ def load_data(self, download_data: bool = None) -> None: self.data['dlc_coords'] = dlc_coords # load stim on times - self.data['stimOn_times'] = alfio.load_object(alf_path, 'trials', namespace='ibl')['stimOn_times'] + trial_path = next(alf_path.rglob('*trials.table*')).parent + self.data['stimOn_times'] = alfio.load_object(trial_path, 'trials', namespace='ibl')['stimOn_times'] # load pupil diameters if self.side in ['left', 'right']: - features = alfio.load_object(alf_path, f'{self.side}Camera', namespace='ibl')['features'] + feat_path = next(alf_path.rglob(f'*{self.side}Camera.features*')).parent + features = alfio.load_object(feat_path, f'{self.side}Camera', namespace='ibl')['features'] self.data['pupilDiameter_raw'] = features['pupilDiameter_raw'] self.data['pupilDiameter_smooth'] = features['pupilDiameter_smooth'] diff --git a/ibllib/tests/qc/test_task_metrics.py b/ibllib/tests/qc/test_task_metrics.py index b3a459978..59fea7e07 100644 --- a/ibllib/tests/qc/test_task_metrics.py +++ b/ibllib/tests/qc/test_task_metrics.py @@ -31,9 +31,10 @@ def _create_test_qc_outcomes(): class TestAggregateOutcome(unittest.TestCase): def test_deprecation_warning(self): - """Remove TaskQC.compute_session_status_from_dict after 2024-04-01.""" + """Remove TaskQC.compute_session_status_from_dict after 2024-06-01. Cherry pick commit + 3cbbd1769e1ba82a51b09a992b2d5f4929f396b2 for removal of this test and applicable code""" from datetime import datetime - self.assertFalse(datetime.now() > datetime(2024, 4, 10), 'remove TaskQC.compute_session_status_from_dict method.') + self.assertFalse(datetime.now() > datetime(2024, 6, 1), 'remove TaskQC.compute_session_status_from_dict method.') qc_dict = {'_task_iti_delays': .99} with self.assertWarns(DeprecationWarning), self.assertLogs(qcmetrics.__name__, spec.QC.WARNING): out = qcmetrics.TaskQC.compute_session_status_from_dict(qc_dict)