├── .flake8
├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
└── workflows
│ ├── ibllib_ci.yml
│ └── python-publish.yml
├── .gitignore
├── CITATION.cff
├── LICENSE
├── MANIFEST.in
├── README.md
├── brainbox
├── CONTRIBUTING.md
├── README.md
├── __init__.py
├── behavior
│ ├── __init__.py
│ ├── dlc.py
│ ├── training.py
│ └── wheel.py
├── core.py
├── docs
│ ├── Makefile
│ ├── _static
│ │ ├── IBL_b_n_w.png
│ │ ├── IBL_black_bnw.png
│ │ ├── LogoFullResAlpha.png
│ │ ├── android-chrome-192x192.png
│ │ ├── android-chrome-512x512.png
│ │ ├── apple-touch-icon.png
│ │ ├── favicon-16x16.png
│ │ ├── favicon-32x32.png
│ │ ├── favicon.ico
│ │ ├── mstile-150x150.png
│ │ └── safari-pinned-tab.svg
│ ├── _templates
│ │ ├── autosummary
│ │ │ └── module.rst
│ │ └── header.html
│ ├── api
│ │ ├── brainbox.io.io.extract_waveforms.rst
│ │ ├── brainbox.metrics.metrics.contamination_est.rst
│ │ ├── brainbox.metrics.metrics.cum_drift.rst
│ │ ├── brainbox.metrics.metrics.firing_rate_coeff_var.rst
│ │ ├── brainbox.metrics.metrics.isi_viol.rst
│ │ ├── brainbox.metrics.metrics.max_drift.rst
│ │ ├── brainbox.metrics.metrics.missed_spikes_est.rst
│ │ ├── brainbox.metrics.metrics.pres_ratio.rst
│ │ ├── brainbox.metrics.metrics.ptp_over_noise.rst
│ │ ├── brainbox.metrics.metrics.unit_stability.rst
│ │ ├── brainbox.metrics.metrics.wf_similarity.rst
│ │ ├── brainbox.plot.plot.amp_heatmap.rst
│ │ ├── brainbox.plot.plot.driftmap.rst
│ │ ├── brainbox.plot.plot.feat_vars.rst
│ │ ├── brainbox.plot.plot.firing_rate.rst
│ │ ├── brainbox.plot.plot.missed_spikes_est.rst
│ │ ├── brainbox.plot.plot.peri_event_time_histogram.rst
│ │ ├── brainbox.plot.plot.pres_ratio.rst
│ │ ├── brainbox.plot.plot.wf_comp.rst
│ │ ├── brainbox.population.population.xcorr.rst
│ │ ├── brainbox.processing.processing.bin_spikes.rst
│ │ ├── brainbox.processing.processing.bincount2D.rst
│ │ ├── brainbox.processing.processing.filter_units.rst
│ │ ├── brainbox.processing.processing.get_units_bunch.rst
│ │ ├── brainbox.processing.processing.sync.rst
│ │ ├── brainbox.singlecell.singlecell.acorr.rst
│ │ ├── brainbox.singlecell.singlecell.calculate_peths.rst
│ │ └── brainbox.singlecell.singlecell.firing_rate.rst
│ ├── conf.py
│ ├── contributing.md
│ ├── index.rst
│ ├── make.bat
│ ├── modules
│ │ ├── behavior.rst
│ │ ├── io.rst
│ │ ├── metrics.rst
│ │ ├── plot.rst
│ │ ├── population.rst
│ │ ├── processing.rst
│ │ ├── quality.rst
│ │ ├── simulation.rst
│ │ ├── singlecell.rst
│ │ └── task.rst
│ ├── overview.md
│ ├── tutorials.md
│ └── usage
│ │ ├── installation.md
│ │ └── quickstart.md
├── ephys_plots.py
├── examples
│ ├── 2019-09_cca_code-camp.ipynb
│ ├── DLC_pupil_event.py
│ ├── Psychometric curves.ipynb
│ ├── best_available_channels_from_insertion_id.py
│ ├── brainbox_plot_peth_func.py
│ ├── count_wheel_time_impossibilities.py
│ ├── decoding.py
│ ├── dim_reduction.py
│ ├── docs_access_DLC.py
│ ├── docs_explore_passive.py
│ ├── docs_get_training_status.py
│ ├── docs_load_spike_sorting.py
│ ├── docs_load_video.py
│ ├── docs_scatter_raster_plot.py
│ ├── docs_wheel_moves.ipynb
│ ├── docs_wheel_screen_stimulus.ipynb
│ ├── gen_phy_metrics.py
│ ├── lfp_plots.py
│ ├── plot_SingleUnit_StabilityQC__GaelleJai.py
│ ├── plot_all_peths.py
│ ├── plot_atlas_color_values.py
│ ├── pyschofit_example.ipynb
│ ├── raster_cluster_ordered.py
│ ├── raster_clusters.py
│ ├── raster_depth_per_spike.py
│ ├── raster_depths.py
│ ├── raster_per_trial.py
│ ├── simplest_peth_plot.ipynb
│ ├── simplest_peth_plot.py
│ ├── simplest_peth_plot_aligned_to_StimON.py
│ ├── simplest_raster_plot.ipynb
│ ├── simplest_raster_plot.py
│ ├── test_cca.ipynb
│ ├── wheel_moves.ipynb
│ └── xcorr_numpy.py
├── io
│ ├── __init__.py
│ ├── one.py
│ └── spikeglx.py
├── metrics
│ ├── __init__.py
│ ├── electrode_drift.py
│ └── single_units.py
├── plot.py
├── plot_base.py
├── population
│ ├── __init__.py
│ ├── cca.py
│ └── decode.py
├── processing.py
├── singlecell.py
├── task
│ ├── __init__.py
│ ├── _knockoff.py
│ ├── _statsmodels.py
│ ├── closed_loop.py
│ ├── passive.py
│ └── trials.py
├── tests
│ ├── __init__.py
│ ├── fixtures
│ │ ├── ephys_test.p
│ │ ├── parquet_records.json
│ │ ├── trials_df_test.csv
│ │ ├── trials_test.pickle
│ │ └── wheel_test.p
│ ├── test_behavior.py
│ ├── test_cca.py
│ ├── test_io.py
│ ├── test_metrics.py
│ ├── test_passive.py
│ ├── test_plot_base.py
│ ├── test_population.py
│ ├── test_processing.py
│ ├── test_singlecell.py
│ ├── test_task.py
│ ├── test_trials.py
│ └── test_video.py
└── video.py
├── examples
├── WIP
│ └── bpod-qc-sound.py
├── __init__.py
├── archive
│ ├── audio_pipeline.py
│ ├── ephys_qc_raw.py
│ ├── ephys_synch.py
│ ├── ibllib
│ │ ├── qc_behaviour_bpod.py
│ │ └── synchronisation_ephys.py
│ ├── one_demo.py.ipynb
│ ├── one_plot_psychometric_curve.py
│ ├── rest_water_restrictions.py
│ ├── rest_weighings.py
│ └── test_onelight.ipynb
├── data_release
│ ├── data_release_behavior.ipynb
│ ├── data_release_brainwidemap.ipynb
│ ├── data_release_repro_ephys.ipynb
│ └── data_release_spikesorting_benchmarks.ipynb
├── ephys
│ ├── example_amp_depth_scatter.py
│ ├── example_single_cluster_stim_aligned_activity.py
│ └── example_stim_aligned_over_depth.py
├── exploring_data
│ ├── data_download.ipynb
│ └── data_structure.ipynb
├── loading_data
│ ├── loading_ephys_data.ipynb
│ ├── loading_multi_photon_imaging_data.ipynb
│ ├── loading_passive_data.ipynb
│ ├── loading_photometry_data.ipynb
│ ├── loading_raw_audio_data.ipynb
│ ├── loading_raw_ephys_data.ipynb
│ ├── loading_raw_mesoscope_data.ipynb
│ ├── loading_raw_video_data.ipynb
│ ├── loading_spikesorting_data.ipynb
│ ├── loading_trials_data.ipynb
│ ├── loading_video_data.ipynb
│ ├── loading_wheel_data.ipynb
│ └── loading_widefield_data.ipynb
└── one
│ ├── __init__.py
│ ├── behavior
│ ├── number_mice_inproject.py
│ ├── plot_microphone_spectrogram.py
│ ├── plot_weight_curve.py
│ ├── print_water_administrations.py
│ ├── stream_video_frames.py
│ ├── water_administrations_add_new.py
│ └── water_administrations_weekend.py
│ ├── docs_one_queries.py
│ ├── ephys
│ ├── First_pass_progress
│ │ ├── Firstpassmap_x_y.csv
│ │ └── firstpass_scatterplot.py
│ ├── README.txt
│ ├── docs_compute_drift.py
│ ├── docs_get_cluster_brain_locations.py
│ ├── docs_get_first_pass_map_sessions.py
│ ├── docs_get_power_spectrum_data.py
│ ├── docs_get_rms_data.py
│ ├── docs_plot_sound_spectrogram_ephysrig.py
│ ├── docs_raw_data_decompress.py
│ ├── get_list_mice_certif.py
│ ├── get_list_mice_repeated_site.py
│ ├── get_probe_label_dir.py
│ ├── plot_raster_drift.py
│ ├── raw_data_download.py
│ └── raw_data_sync_session_time.py
│ ├── histology
│ ├── brain_regions_navigation.py
│ ├── coverage_map.py
│ ├── create_histology_session.py
│ ├── docs_find_dist_neighbouring_region.py
│ ├── docs_find_nearby_trajectories.py
│ ├── docs_find_previous_alignments.py
│ ├── docs_visualization3D_subject_channels.py
│ ├── docs_visualize_session_coronal_tilted.py
│ ├── get_probe_trajectory.py
│ ├── list_trajectory_more1alignment.py
│ ├── register_lasagna_tracks_alyx.py
│ ├── visualization3D_alyx_traj_planned_histology.py
│ ├── visualization3D_repeated_site.py
│ ├── visualization3D_rotating_gif_firstpassmap_plan.py
│ ├── visualization3D_rotating_gif_selectedmice.py
│ ├── visualization3D_subject_histology.py
│ ├── visualize_alyx_channels_coronal.py
│ ├── visualize_alyx_traj_coronal_sagittal_raster.py
│ ├── visualize_track_file_coronal_GUIoption.py
│ └── visualize_track_file_coronal_sagittal_slice.py
│ └── one_queries.py
├── gitflow_checklist.md
├── ibllib
├── __init__.py
├── ephys
│ ├── __init__.py
│ ├── ephysqc.py
│ ├── spikes.py
│ └── sync_probes.py
├── exceptions.py
├── io
│ ├── __init__.py
│ ├── extractors
│ │ ├── __init__.py
│ │ ├── base.py
│ │ ├── biased_trials.py
│ │ ├── bpod_trials.py
│ │ ├── camera.py
│ │ ├── default_channel_maps.py
│ │ ├── ephys_fpga.py
│ │ ├── ephys_passive.py
│ │ ├── ephys_sessions
│ │ │ ├── passive_stim_meta.json
│ │ │ ├── session_0_ephys_len_blocks.npy
│ │ │ ├── session_0_ephys_pcqs.npy
│ │ │ ├── session_0_passive_pcs.npy
│ │ │ ├── session_0_passive_stimDelays.npy
│ │ │ ├── session_0_passive_stimIDs.npy
│ │ │ ├── session_0_stim_phase.npy
│ │ │ ├── session_10_ephys_len_blocks.npy
│ │ │ ├── session_10_ephys_pcqs.npy
│ │ │ ├── session_10_passive_pcs.npy
│ │ │ ├── session_10_passive_stimDelays.npy
│ │ │ ├── session_10_passive_stimIDs.npy
│ │ │ ├── session_10_stim_phase.npy
│ │ │ ├── session_11_ephys_len_blocks.npy
│ │ │ ├── session_11_ephys_pcqs.npy
│ │ │ ├── session_11_passive_pcs.npy
│ │ │ ├── session_11_passive_stimDelays.npy
│ │ │ ├── session_11_passive_stimIDs.npy
│ │ │ ├── session_11_stim_phase.npy
│ │ │ ├── session_1_ephys_len_blocks.npy
│ │ │ ├── session_1_ephys_pcqs.npy
│ │ │ ├── session_1_passive_pcs.npy
│ │ │ ├── session_1_passive_stimDelays.npy
│ │ │ ├── session_1_passive_stimIDs.npy
│ │ │ ├── session_1_stim_phase.npy
│ │ │ ├── session_2_ephys_len_blocks.npy
│ │ │ ├── session_2_ephys_pcqs.npy
│ │ │ ├── session_2_passive_pcs.npy
│ │ │ ├── session_2_passive_stimDelays.npy
│ │ │ ├── session_2_passive_stimIDs.npy
│ │ │ ├── session_2_stim_phase.npy
│ │ │ ├── session_3_ephys_len_blocks.npy
│ │ │ ├── session_3_ephys_pcqs.npy
│ │ │ ├── session_3_passive_pcs.npy
│ │ │ ├── session_3_passive_stimDelays.npy
│ │ │ ├── session_3_passive_stimIDs.npy
│ │ │ ├── session_3_stim_phase.npy
│ │ │ ├── session_4_ephys_len_blocks.npy
│ │ │ ├── session_4_ephys_pcqs.npy
│ │ │ ├── session_4_passive_pcs.npy
│ │ │ ├── session_4_passive_stimDelays.npy
│ │ │ ├── session_4_passive_stimIDs.npy
│ │ │ ├── session_4_stim_phase.npy
│ │ │ ├── session_5_ephys_len_blocks.npy
│ │ │ ├── session_5_ephys_pcqs.npy
│ │ │ ├── session_5_passive_pcs.npy
│ │ │ ├── session_5_passive_stimDelays.npy
│ │ │ ├── session_5_passive_stimIDs.npy
│ │ │ ├── session_5_stim_phase.npy
│ │ │ ├── session_6_ephys_len_blocks.npy
│ │ │ ├── session_6_ephys_pcqs.npy
│ │ │ ├── session_6_passive_pcs.npy
│ │ │ ├── session_6_passive_stimDelays.npy
│ │ │ ├── session_6_passive_stimIDs.npy
│ │ │ ├── session_6_stim_phase.npy
│ │ │ ├── session_7_ephys_len_blocks.npy
│ │ │ ├── session_7_ephys_pcqs.npy
│ │ │ ├── session_7_passive_pcs.npy
│ │ │ ├── session_7_passive_stimDelays.npy
│ │ │ ├── session_7_passive_stimIDs.npy
│ │ │ ├── session_7_stim_phase.npy
│ │ │ ├── session_8_ephys_len_blocks.npy
│ │ │ ├── session_8_ephys_pcqs.npy
│ │ │ ├── session_8_passive_pcs.npy
│ │ │ ├── session_8_passive_stimDelays.npy
│ │ │ ├── session_8_passive_stimIDs.npy
│ │ │ ├── session_8_stim_phase.npy
│ │ │ ├── session_9_ephys_len_blocks.npy
│ │ │ ├── session_9_ephys_pcqs.npy
│ │ │ ├── session_9_passive_pcs.npy
│ │ │ ├── session_9_passive_stimDelays.npy
│ │ │ ├── session_9_passive_stimIDs.npy
│ │ │ ├── session_9_stim_phase.npy
│ │ │ ├── session_mock_ephys_len_blocks.npy
│ │ │ ├── session_mock_ephys_pcqs.npy
│ │ │ ├── session_mock_passive_pcs.npy
│ │ │ ├── session_mock_passive_stimDelays.npy
│ │ │ ├── session_mock_passive_stimIDs.npy
│ │ │ └── session_mock_stim_phase.npy
│ │ ├── habituation_trials.py
│ │ ├── mesoscope.py
│ │ ├── mesoscope
│ │ │ ├── README.md
│ │ │ └── surface_triangulation.npz
│ │ ├── opto_trials.py
│ │ ├── passive_plotting.py
│ │ ├── task_extractor_map.json
│ │ ├── training_audio.py
│ │ ├── training_trials.py
│ │ ├── training_wheel.py
│ │ ├── video_motion.py
│ │ └── widefield.py
│ ├── ffmpeg.py
│ ├── flags.py
│ ├── misc.py
│ ├── raw_daq_loaders.py
│ ├── raw_data_loaders.py
│ ├── session_params.py
│ └── video.py
├── misc
│ ├── __init__.py
│ ├── misc.py
│ └── qt.py
├── oneibl
│ ├── __init__.py
│ ├── data_handlers.py
│ ├── patcher.py
│ └── registration.py
├── pipes
│ ├── __init__.py
│ ├── audio_tasks.py
│ ├── base_tasks.py
│ ├── behavior_tasks.py
│ ├── dynamic_pipeline.py
│ ├── ephys_alignment.py
│ ├── ephys_tasks.py
│ ├── histology.py
│ ├── local_server.py
│ ├── mesoscope_tasks.py
│ ├── misc.py
│ ├── neurophotometrics.py
│ ├── scan_fix_passive_files.py
│ ├── sync_tasks.py
│ ├── tasks.py
│ ├── training_status.py
│ ├── video_tasks.py
│ └── widefield_tasks.py
├── plots
│ ├── __init__.py
│ ├── figures.py
│ ├── misc.py
│ └── snapshot.py
├── qc
│ ├── __init__.py
│ ├── alignment_qc.py
│ ├── base.py
│ ├── camera.py
│ ├── critical_reasons.py
│ ├── dlc.py
│ ├── reference
│ │ ├── frame_src.json
│ │ ├── frames_body.npy
│ │ ├── frames_left.npy
│ │ └── frames_right.npy
│ ├── task_extractors.py
│ ├── task_metrics.py
│ └── task_qc_viewer
│ │ ├── README.md
│ │ ├── ViewEphysQC.py
│ │ ├── __init__.py
│ │ └── task_qc.py
├── tests
│ ├── __init__.py
│ ├── extractors
│ │ ├── __init__.py
│ │ ├── data
│ │ │ ├── session_biased_ge5
│ │ │ │ └── raw_behavior_data
│ │ │ │ │ ├── _iblrig_ambientSensorData.raw.jsonable
│ │ │ │ │ ├── _iblrig_encoderEvents.raw.ssv
│ │ │ │ │ ├── _iblrig_encoderPositions.raw.ssv
│ │ │ │ │ ├── _iblrig_encoderTrialInfo.raw.ssv
│ │ │ │ │ ├── _iblrig_taskData.raw.jsonable
│ │ │ │ │ └── _iblrig_taskSettings.raw.json
│ │ │ ├── session_biased_lt5
│ │ │ │ └── raw_behavior_data
│ │ │ │ │ ├── _iblrig_encoderEvents.raw.ssv
│ │ │ │ │ ├── _iblrig_encoderPositions.raw.ssv
│ │ │ │ │ ├── _iblrig_taskData.raw.jsonable
│ │ │ │ │ └── _iblrig_taskSettings.raw.json
│ │ │ ├── session_ephys
│ │ │ │ ├── raw_behavior_data
│ │ │ │ │ ├── _iblrig_encoderEvents.raw.ssv
│ │ │ │ │ ├── _iblrig_encoderPositions.raw.ssv
│ │ │ │ │ ├── _iblrig_encoderTrialInfo.raw.ssv
│ │ │ │ │ ├── _iblrig_taskCodeFiles.raw.zip
│ │ │ │ │ ├── _iblrig_taskData.raw.jsonable
│ │ │ │ │ ├── _iblrig_taskDataCodeFiles.raw.zip
│ │ │ │ │ └── _iblrig_taskSettings.raw.json
│ │ │ │ └── raw_video_data
│ │ │ │ │ ├── _iblrig_bodyCamera.GPIO.bin
│ │ │ │ │ ├── _iblrig_bodyCamera.frame_counter.bin
│ │ │ │ │ └── _iblrig_bodyCamera.timestamps.ssv
│ │ │ ├── session_training_ge5
│ │ │ │ └── raw_behavior_data
│ │ │ │ │ ├── _iblrig_ambientSensorData.raw.jsonable
│ │ │ │ │ ├── _iblrig_encoderEvents.raw.ssv
│ │ │ │ │ ├── _iblrig_encoderPositions.raw.ssv
│ │ │ │ │ ├── _iblrig_encoderTrialInfo.raw.ssv
│ │ │ │ │ ├── _iblrig_taskData.raw.jsonable
│ │ │ │ │ └── _iblrig_taskSettings.raw.json
│ │ │ ├── session_training_lt5
│ │ │ │ └── raw_behavior_data
│ │ │ │ │ ├── _iblrig_encoderEvents.raw.ssv
│ │ │ │ │ ├── _iblrig_encoderPositions.raw.ssv
│ │ │ │ │ ├── _iblrig_taskData.raw.jsonable
│ │ │ │ │ └── _iblrig_taskSettings.raw.json
│ │ │ └── wheel
│ │ │ │ ├── ge5
│ │ │ │ ├── _iblrig_encoderEvents.raw.ssv
│ │ │ │ └── _iblrig_encoderPositions.raw.ssv
│ │ │ │ └── lt5
│ │ │ │ ├── _iblrig_encoderEvents.raw.00.ssv
│ │ │ │ ├── _iblrig_encoderEvents.raw.CorruptMiddle.ssv
│ │ │ │ ├── _iblrig_encoderEvents.raw.CorruptTimestamp.ssv
│ │ │ │ ├── _iblrig_encoderPositions.raw.00.ssv
│ │ │ │ ├── _iblrig_encoderPositions.raw.01.ssv
│ │ │ │ └── _iblrig_encoderPositions.raw.2firstsamples.ssv
│ │ ├── test_ephys_fpga.py
│ │ ├── test_ephys_passive.py
│ │ ├── test_extractors.py
│ │ ├── test_extractors_base.py
│ │ └── test_mesoscope.py
│ ├── fixtures
│ │ ├── __init__.py
│ │ ├── ephysalignment
│ │ │ └── alignment_data.npz
│ │ ├── histology
│ │ │ └── tracks
│ │ │ │ └── 2019-12-04_KS014_001_probe00_pts.csv
│ │ ├── io
│ │ │ ├── _ibl_experiment.description.yaml
│ │ │ └── data_loaders
│ │ │ │ └── _iblrig_test_mouse_2020-01-01_001
│ │ │ │ └── raw_video_data
│ │ │ │ └── _iblrig_leftCamera.frameData.bin
│ │ ├── pipes
│ │ │ ├── sample3A_g0_t0.imec.ap.meta
│ │ │ ├── sample3B_g0_t0.imec1.ap.meta
│ │ │ ├── sample3B_g0_t0.nidq.meta
│ │ │ ├── sampleNP2.1_g0_t0.imec.ap.meta
│ │ │ ├── sampleNP2.4_1shank_g0_t0.imec.ap.meta
│ │ │ └── sampleNP2.4_4shanks_g0_t0.imec.ap.meta
│ │ ├── qc
│ │ │ ├── camera_times.npy
│ │ │ ├── data_alignmentqc_existing.npz
│ │ │ ├── data_alignmentqc_manual.npz
│ │ │ ├── pupil_diameter.npy
│ │ │ ├── stimOn_times.npy
│ │ │ └── wheel.npy
│ │ ├── sync_ephys_fpga
│ │ │ ├── sample3A_g0_t0.imec.ap.meta
│ │ │ ├── sample3B_g0_t0.imec1.ap.meta
│ │ │ └── sample3B_g0_t0.nidq.meta
│ │ └── utils.py
│ ├── qc
│ │ ├── __init__.py
│ │ ├── test_alignment_qc.py
│ │ ├── test_base_qc.py
│ │ ├── test_camera_qc.py
│ │ ├── test_critical_reasons.py
│ │ ├── test_dlc_qc.py
│ │ ├── test_task_metrics.py
│ │ └── test_task_qc_viewer.py
│ ├── test_base_tasks.py
│ ├── test_dynamic_pipeline.py
│ ├── test_ephys.py
│ ├── test_histology.py
│ ├── test_io.py
│ ├── test_mesoscope.py
│ ├── test_oneibl.py
│ ├── test_pipes.py
│ ├── test_plots.py
│ ├── test_tasks.py
│ ├── test_time.py
│ └── test_widefield.py
└── time.py
├── readthedocs.yml
├── release_notes.md
├── requirements-analysis.txt
├── requirements.txt
├── ruff.toml
├── run_tests
├── run_tests.bat
└── setup.py
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | max-line-length = 130
3 | ignore = W504, W503, E266, D, BLK
4 | exclude =
5 | .git,
6 | __pycache__,
7 | __init__.py,
8 | scratch_*.py,
9 | tutorial_script.py,
10 | venv,
11 | test_imports.py,
12 | build,
13 | dist,
14 | brainbox/examples/
15 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 | **Before posting**
10 | Make sure your environment is fully up-to-date by running
11 | ```
12 | conda activate iblenv
13 | conda env update --file iblenv.yaml --prune
14 | ```
15 |
16 | **Describe the bug**
17 | A clear and concise description of what the bug is.
18 |
19 | **To Reproduce**
20 | Steps to reproduce the behavior:
21 | 1. Go to '...'
22 | 2. Click on '....'
23 | 3. Scroll down to '....'
24 | 4. See error
25 |
26 | **Expected behavior**
27 | A clear and concise description of what you expected to happen.
28 |
29 | **Screenshots**
30 | If applicable, add screenshots to help explain your problem.
31 |
32 | **Desktop (please complete the following information):**
33 | - OS: [e.g. iOS]
34 | - Browser [e.g. chrome, safari]
35 | - Version [e.g. 22]
36 |
37 | **Smartphone (please complete the following information):**
38 | - Device: [e.g. iPhone6]
39 | - OS: [e.g. iOS8.1]
40 | - Browser [e.g. stock browser, safari]
41 | - Version [e.g. 22]
42 |
43 | **Additional context**
44 | Add any other context about the problem here.
45 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/.github/workflows/ibllib_ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on:
4 | schedule:
5 | - cron: '0 0 * * *' # every day at midnight
6 | workflow_dispatch: # For manual triggering
7 | push:
8 | branches: [ master, develop, public ]
9 | pull_request:
10 | branches: [ master, develop ]
11 |
12 | jobs:
13 | build:
14 | name: build (${{ matrix.python-version }}, ${{ matrix.os }})
15 | runs-on: ${{ matrix.os }}
16 | strategy:
17 | fail-fast: false # Whether to stop execution of other instances
18 | max-parallel: 2
19 | matrix:
20 | os: ["windows-latest", "ubuntu-latest"]
21 | python-version: ["3.10", "3.12"]
22 | exclude:
23 | - os: windows-latest
24 | python-version: 3.10
25 | - os: ubuntu-latest
26 | python-version: 3.12
27 | env:
28 | ONE_SAVE_ON_DELETE: false
29 | steps:
30 | - uses: actions/checkout@v2
31 | - name: Set up Python ${{ matrix.python-version }}
32 | uses: actions/setup-python@v2
33 | with:
34 | python-version: ${{ matrix.python-version }}
35 | - name: Install deps
36 | run: |
37 | python -m pip install --upgrade pip
38 | python -m pip install flake8 pytest flake8-docstrings
39 | pip install -r requirements.txt
40 | pip install -e .
41 | - name: Flake8
42 | run: |
43 | python -m flake8
44 | python -m flake8 --select D --ignore E ibllib/qc/camera.py ibllib/qc/task_metrics.py
45 | - name: Brainbox tests
46 | run: |
47 | cd brainbox
48 | pytest
49 | - name: ibllib tests
50 | run: |
51 | cd ibllib
52 | cd tests
53 | python -m unittest discover
54 |
--------------------------------------------------------------------------------
/.github/workflows/python-publish.yml:
--------------------------------------------------------------------------------
1 | # This workflow will upload a Python Package using Twine when a release is created
2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
3 |
4 | name: Upload Python Package
5 |
6 | on:
7 | push:
8 | tags:
9 | - '*'
10 |
11 | jobs:
12 | deploy:
13 |
14 | runs-on: ubuntu-latest
15 |
16 | steps:
17 | - uses: actions/checkout@v2
18 | - name: Set up Python
19 | uses: actions/setup-python@v2
20 | with:
21 | python-version: '3.10'
22 | - name: Install dependencies
23 | run: |
24 | python -m pip install --upgrade pip
25 | pip install setuptools wheel twine
26 | - name: Build and publish
27 | env:
28 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
29 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
30 | run: |
31 | python setup.py sdist bdist_wheel
32 | twine upload dist/*
33 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | docs/_build
2 | __pycache__
3 | *.*~
4 | *.pyc
5 | # this fixes a dummy version on the public repository
6 | #git update-index --assume-unchanged python/openneurodata/oneibl/params_secret.py
7 | python/scratch
8 | .idea/*
9 | .vscode/
10 | *.code-workspace
11 | *checkpoint.ipynb
12 | build/
13 | venv/
14 | ibllibenv/
15 | dist/
16 | ibllib.egg-info/
17 | .DS_Store
18 | scratch/
19 | _autosummary/
20 | brainbox/docs/_build/
21 |
--------------------------------------------------------------------------------
/CITATION.cff:
--------------------------------------------------------------------------------
1 | cff-version: 0.0.0
2 | message: "If you use this software, please cite it as below."
3 | authors:
4 | - family-names: International Brain Laboratory
5 | given-names: The
6 | orcid:
7 | title: "ibllib"
8 | version:
9 | doi:
10 | date-released: 2021-12-09
11 | url: "https://github.com/int-brain-lab/ibllib"
12 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018 International Brain Laboratory
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include ibllib/io/extractors/extractor_types.json
2 | include ibllib/io/extractors/task_extractor_map.json
3 | include brainbox/tests/wheel_test.p
4 | recursive-include brainbox/tests/fixtures *
5 | recursive-include ibllib/qc/reference *
6 | graft ibllib/tests/extractors/data
7 | graft ibllib/io/extractors/ephys_sessions
8 | graft ibllib/io/extractors/mesoscope
9 | graft ibllib/tests/fixtures
10 | recursive-include oneibl/tests/fixtures *
11 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # IBL Python Libraries
2 | [](https://ibllib.hooks.internationalbrainlab.org/coverage/master)
3 | [](https://ibllib.hooks.internationalbrainlab.org/logs/records/master)
4 | [](https://ibllib.hooks.internationalbrainlab.org/logs/records/develop)
5 |
6 | ## Description
7 | Library used to implement the International Brain Laboratory data pipeline. Currently in active development.
8 | The library is currently 2 main modules:
9 | - **brainbox**: neuroscience analysis oriented library
10 | - **ibllib**: general purpose library containing I/O, signal processing and IBL data pipelines utilities.
11 |
12 | [Release Notes here](release_notes.md)
13 |
14 | ## Requirements
15 | **OS**: Only tested on Linux. Windows and Mac may work, but are not supported.
16 |
17 | **Python Module**: Python 3.10 or higher, Python 3.12 recommended
18 |
19 | ## Installation, documentation and examples
20 | https://docs.internationalbrainlab.org
21 |
22 |
23 | ## Contribution and development practices
24 | See https://int-brain-lab.github.io/iblenv/07_contribution.html
25 |
26 | We use Semantic Versioning.
27 |
28 | Before committing to your branch:
29 | - run tests
30 | - flake8
31 | This is also enforced by continuous integration.
32 |
33 |
34 | ## Matlab Library
35 | The Matlab library has moved to its own repository here: https://github.com/int-brain-lab/ibllib-matlab/
36 |
--------------------------------------------------------------------------------
/brainbox/README.md:
--------------------------------------------------------------------------------
1 | # brainbox
2 |
3 | ## Contributing
4 |
5 | To contribute to this repository, please read [our guide to contributing](https://github.com/int-brain-lab/ibllib/blob/brainbox/brainbox/CONTRIBUTING.md)
6 |
--------------------------------------------------------------------------------
/brainbox/__init__.py:
--------------------------------------------------------------------------------
1 | """IBL shared data processing methods."""
2 | import logging
3 | try:
4 | import one
5 | except ModuleNotFoundError:
6 | logging.getLogger(__name__).error('Missing dependency, please run `pip install ONE-api`')
7 |
--------------------------------------------------------------------------------
/brainbox/behavior/__init__.py:
--------------------------------------------------------------------------------
1 | """Behaviour analysis functions for the IBL task."""
2 |
--------------------------------------------------------------------------------
/brainbox/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?= -v
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/brainbox/docs/_static/IBL_b_n_w.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/brainbox/docs/_static/IBL_b_n_w.png
--------------------------------------------------------------------------------
/brainbox/docs/_static/IBL_black_bnw.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/brainbox/docs/_static/IBL_black_bnw.png
--------------------------------------------------------------------------------
/brainbox/docs/_static/LogoFullResAlpha.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/brainbox/docs/_static/LogoFullResAlpha.png
--------------------------------------------------------------------------------
/brainbox/docs/_static/android-chrome-192x192.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/brainbox/docs/_static/android-chrome-192x192.png
--------------------------------------------------------------------------------
/brainbox/docs/_static/android-chrome-512x512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/brainbox/docs/_static/android-chrome-512x512.png
--------------------------------------------------------------------------------
/brainbox/docs/_static/apple-touch-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/brainbox/docs/_static/apple-touch-icon.png
--------------------------------------------------------------------------------
/brainbox/docs/_static/favicon-16x16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/brainbox/docs/_static/favicon-16x16.png
--------------------------------------------------------------------------------
/brainbox/docs/_static/favicon-32x32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/brainbox/docs/_static/favicon-32x32.png
--------------------------------------------------------------------------------
/brainbox/docs/_static/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/brainbox/docs/_static/favicon.ico
--------------------------------------------------------------------------------
/brainbox/docs/_static/mstile-150x150.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/brainbox/docs/_static/mstile-150x150.png
--------------------------------------------------------------------------------
/brainbox/docs/_templates/autosummary/module.rst:
--------------------------------------------------------------------------------
1 | {{ fullname }}
2 | {{ underline }}
3 |
4 | .. automodule:: {{ fullname }}
5 | :members:
6 |
--------------------------------------------------------------------------------
/brainbox/docs/_templates/header.html:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.io.io.extract_waveforms.rst:
--------------------------------------------------------------------------------
1 | extract_waveforms
2 | =================
3 |
4 | .. currentmodule:: brainbox.io.io
5 |
6 | .. autofunction:: extract_waveforms
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.metrics.metrics.contamination_est.rst:
--------------------------------------------------------------------------------
1 | contamination_est
2 | =================
3 |
4 | .. currentmodule:: brainbox.metrics.metrics
5 |
6 | .. autofunction:: contamination_est
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.metrics.metrics.cum_drift.rst:
--------------------------------------------------------------------------------
1 | cum_drift
2 | =========
3 |
4 | .. currentmodule:: brainbox.metrics.metrics
5 |
6 | .. autofunction:: cum_drift
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.metrics.metrics.firing_rate_coeff_var.rst:
--------------------------------------------------------------------------------
1 | firing_rate_cv
2 | =====================
3 |
4 | .. currentmodule:: brainbox.metrics.metrics
5 |
6 | .. autofunction:: firing_rate_cv
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.metrics.metrics.isi_viol.rst:
--------------------------------------------------------------------------------
1 | isi_viol
2 | ========
3 |
4 | .. currentmodule:: brainbox.metrics.metrics
5 |
6 | .. autofunction:: isi_viol
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.metrics.metrics.max_drift.rst:
--------------------------------------------------------------------------------
1 | max_drift
2 | =========
3 |
4 | .. currentmodule:: brainbox.metrics.metrics
5 |
6 | .. autofunction:: max_drift
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.metrics.metrics.missed_spikes_est.rst:
--------------------------------------------------------------------------------
1 | missed_spikes_est
2 | ===========
3 |
4 | .. currentmodule:: brainbox.metrics.metrics
5 |
6 | .. autofunction:: missed_spikes_est
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.metrics.metrics.pres_ratio.rst:
--------------------------------------------------------------------------------
1 | pres_ratio
2 | ==========
3 |
4 | .. currentmodule:: brainbox.metrics.metrics
5 |
6 | .. autofunction:: pres_ratio
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.metrics.metrics.ptp_over_noise.rst:
--------------------------------------------------------------------------------
1 | ptp_over_noise
2 | ==============
3 |
4 | .. currentmodule:: brainbox.metrics.metrics
5 |
6 | .. autofunction:: ptp_over_noise
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.metrics.metrics.unit_stability.rst:
--------------------------------------------------------------------------------
1 | unit_stability
2 | ==============
3 |
4 | .. currentmodule:: brainbox.metrics.metrics
5 |
6 | .. autofunction:: unit_stability
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.metrics.metrics.wf_similarity.rst:
--------------------------------------------------------------------------------
1 | wf_similarity
2 | =============
3 |
4 | .. currentmodule:: brainbox.metrics.metrics
5 |
6 | .. autofunction:: wf_similarity
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.plot.plot.amp_heatmap.rst:
--------------------------------------------------------------------------------
1 | amp_heatmap
2 | ===========
3 |
4 | .. currentmodule:: brainbox.plot.plot
5 |
6 | .. autofunction:: amp_heatmap
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.plot.plot.driftmap.rst:
--------------------------------------------------------------------------------
1 | driftmap
2 | ========
3 |
4 | .. currentmodule:: brainbox.plot.plot
5 |
6 | .. autofunction:: driftmap
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.plot.plot.feat_vars.rst:
--------------------------------------------------------------------------------
1 | feat_vars
2 | =========
3 |
4 | .. currentmodule:: brainbox.plot.plot
5 |
6 | .. autofunction:: feat_vars
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.plot.plot.firing_rate.rst:
--------------------------------------------------------------------------------
1 | firing_rate
2 | ===========
3 |
4 | .. currentmodule:: brainbox.plot.plot
5 |
6 | .. autofunction:: firing_rate
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.plot.plot.missed_spikes_est.rst:
--------------------------------------------------------------------------------
1 | missed_spikes_est
2 | =================
3 |
4 | .. currentmodule:: brainbox.plot.plot
5 |
6 | .. autofunction:: missed_spikes_est
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.plot.plot.peri_event_time_histogram.rst:
--------------------------------------------------------------------------------
1 | peri_event_time_histogram
2 | =========================
3 |
4 | .. currentmodule:: brainbox.plot.plot
5 |
6 | .. autofunction:: peri_event_time_histogram
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.plot.plot.pres_ratio.rst:
--------------------------------------------------------------------------------
1 | pres_ratio
2 | ==========
3 |
4 | .. currentmodule:: brainbox.plot.plot
5 |
6 | .. autofunction:: pres_ratio
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.plot.plot.wf_comp.rst:
--------------------------------------------------------------------------------
1 | wf_comp
2 | =======
3 |
4 | .. currentmodule:: brainbox.plot.plot
5 |
6 | .. autofunction:: wf_comp
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.population.population.xcorr.rst:
--------------------------------------------------------------------------------
1 | xcorr
2 | =====
3 |
4 | .. currentmodule:: brainbox.population.population
5 |
6 | .. autofunction:: xcorr
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.processing.processing.bin_spikes.rst:
--------------------------------------------------------------------------------
1 | bin_spikes
2 | ==========
3 |
4 | .. currentmodule:: brainbox.processing.processing
5 |
6 | .. autofunction:: bin_spikes
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.processing.processing.bincount2D.rst:
--------------------------------------------------------------------------------
1 | bincount2D
2 | ==========
3 |
4 | .. currentmodule:: brainbox.processing.processing
5 |
6 | .. autofunction:: bincount2D
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.processing.processing.filter_units.rst:
--------------------------------------------------------------------------------
1 | filter_units
2 | ============
3 |
4 | .. currentmodule:: brainbox.processing.processing
5 |
6 | .. autofunction:: filter_units
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.processing.processing.get_units_bunch.rst:
--------------------------------------------------------------------------------
1 | get_units_bunch
2 | ===============
3 |
4 | .. currentmodule:: brainbox.processing.processing
5 |
6 | .. autofunction:: get_units_bunch
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.processing.processing.sync.rst:
--------------------------------------------------------------------------------
1 | sync
2 | ====
3 |
4 | .. currentmodule:: brainbox.processing.processing
5 |
6 | .. autofunction:: sync
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.singlecell.singlecell.acorr.rst:
--------------------------------------------------------------------------------
1 | acorr
2 | =====
3 |
4 | .. currentmodule:: brainbox.singlecell.singlecell
5 |
6 | .. autofunction:: acorr
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.singlecell.singlecell.calculate_peths.rst:
--------------------------------------------------------------------------------
1 | calculate_peths
2 | ===============
3 |
4 | .. currentmodule:: brainbox.singlecell.singlecell
5 |
6 | .. autofunction:: calculate_peths
7 |
--------------------------------------------------------------------------------
/brainbox/docs/api/brainbox.singlecell.singlecell.firing_rate.rst:
--------------------------------------------------------------------------------
1 | firing_rate
2 | ===========
3 |
4 | .. currentmodule:: brainbox.singlecell.singlecell
5 |
6 | .. autofunction:: firing_rate
7 |
--------------------------------------------------------------------------------
/brainbox/docs/contributing.md:
--------------------------------------------------------------------------------
1 | # Contributing to Brainbox
2 |
3 | Coming soon
--------------------------------------------------------------------------------
/brainbox/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. Brainbox documentation master file, created by
2 | sphinx-quickstart on Fri Jan 24 11:35:17 2020.
3 |
4 | ===================================================================================================================
5 | Welcome to Brainbox: A collaborative effort of the `International Brain Lab `_
6 | ===================================================================================================================
7 |
8 | What is Brainbox?
9 | -----------------
10 |
11 | Brainbox is a suite of simple tools for analyzing spike-sorted
12 | electrophysiological data.
13 |
14 | With brainbox we aim to provide a useful set of functions for analyzing data
15 | that take simple inputs (lists, arrays, etc.) and provide simple outputs of
16 | similar form. Brainbox includes tools for analyzing single units and
17 | populations of neurons, as well as many useful plotting functions.
18 |
19 | Brainbox is a work in progress. If you would like to contribute, please visit our `GitHub page `_!
20 |
21 | Table of Contents
22 | -----------------
23 |
24 | .. toctree::
25 | :maxdepth: 1
26 |
27 | overview
28 | usage/installation
29 | usage/quickstart
30 | tutorials
31 | contributing
32 |
33 | Brainbox API reference
34 | ----------------------
35 |
36 | .. toctree::
37 | :maxdepth: 1
38 |
39 | modules/behavior
40 | modules/io
41 | modules/metrics
42 | modules/plot
43 | modules/population
44 | modules/processing
45 | modules/quality
46 | modules/simulation
47 | modules/singlecell
48 | modules/task
49 |
50 | Indices and tables
51 | ==================
52 |
53 | * :ref:`genindex`
54 | * :ref:`modindex`
55 | * :ref:`search`
56 |
--------------------------------------------------------------------------------
/brainbox/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/brainbox/docs/modules/behavior.rst:
--------------------------------------------------------------------------------
1 | `brainbox.behavior`: Functions for dealing with behavioral data
2 | ===============================================================
3 |
4 | The behavior submodule of brainbox includes functions for dealing with various aspects of pure
5 | behavioral data. Examples of behavioral data include reaction times, traces of appendages from
6 | DeepLabCut, or pupilometry data.
7 |
8 | Contains the `bb.behavior.wheel` submodule for dealing with experimental
9 | wheel data.
10 |
11 | At the moment, this module is empty. Let us know if you'd like to help expand it!
12 |
13 | .. automodapi:: brainbox.behavior.behavior
14 | :no-heading:
15 | :no-main-docstr:
16 |
17 | .. automodapi:: brainbox.behavior.wheel
18 | :no-heading:
19 | :no-main-docstr:
20 |
21 |
22 |
--------------------------------------------------------------------------------
/brainbox/docs/modules/io.rst:
--------------------------------------------------------------------------------
1 | `brainbox.io`: Functions for data I/O through Brainbox
2 | ===============================================================
3 |
4 | The purpose of the `io` submodule is to provide utilities for reading in, exporting, and
5 | transforming data via brainbox. This includes manipulation of matlab arrays of spikes into python
6 | friendly format, and export into .mat files among other things.
7 |
8 | .. automodapi:: brainbox.io.io
9 | :no-heading:
10 | :no-main-docstr:
11 |
--------------------------------------------------------------------------------
/brainbox/docs/modules/metrics.rst:
--------------------------------------------------------------------------------
1 | `brainbox.metrics`: Functions for assesing spike unit quality
2 | ===============================================================
3 |
4 | The purpose of the `metrics` submodule is to provide utilities for quantifying how stable a single
5 | spiking unit, as identified by spike sorting, is over time. It also includes functions for
6 | assesing level of noise in the unit and drift.
7 |
8 | .. automodapi:: brainbox.metrics.metrics
9 | :no-heading:
10 | :no-main-docstr:
--------------------------------------------------------------------------------
/brainbox/docs/modules/plot.rst:
--------------------------------------------------------------------------------
1 | `brainbox.plot`: Plotting utilities for single units and populations
2 | ====================================================================
3 |
4 | The `plot` module is a catch-all for all types of plots that are useful
5 | in characterizing single units or populations of neurons.
6 |
7 | .. automodapi:: brainbox.plot.plot
8 | :no-heading:
9 | :no-main-docstr:
--------------------------------------------------------------------------------
/brainbox/docs/modules/population.rst:
--------------------------------------------------------------------------------
1 | `brainbox.population`: Population activity analyses
2 | ===============================================================
3 |
4 | The `population` module provides functions which help to understand the activity of large numbers
5 | of neurons recorded simulatenously.
6 |
7 | .. automodapi:: brainbox.population.population
8 | :no-heading:
9 | :no-main-docstr:
--------------------------------------------------------------------------------
/brainbox/docs/modules/processing.rst:
--------------------------------------------------------------------------------
1 | `brainbox.processing`: Functions for transforming data
2 | ===============================================================
3 |
4 | The `processing` module provides low-level utilities for processing data, such as functionality
5 | for binning spikes into non-overlapping bins, or the convolution of spike trains with a filter.
6 |
7 | .. automodapi:: brainbox.processing.processing
8 | :no-heading:
9 | :no-main-docstr:
10 |
--------------------------------------------------------------------------------
/brainbox/docs/modules/quality.rst:
--------------------------------------------------------------------------------
1 | `brainbox.quality`: General quality control metrics for data
2 | ===============================================================
3 |
4 | The `quality` module provides means of verifying the quality of
5 | incoming data. At the moment this module has no functionality.
6 |
7 | If you'd like to help expand, please let us know on the Brainbox github!
8 |
9 | .. automodapi:: brainbox.quality.quality
10 | :no-heading:
11 | :no-main-docstr:
12 |
--------------------------------------------------------------------------------
/brainbox/docs/modules/simulation.rst:
--------------------------------------------------------------------------------
1 | `brainbox.simulation`: Tools for simulating data
2 | ===============================================================
3 |
4 | The `simulation` module is intended to be a place for functions that simulate data in a very simple
5 | way. Brainbox does not aim to supplant the many other excellent neural simulation projects, such as
6 | Brian, and as such this module is limited to only basic synthetic data generation for testing
7 | purposes.
8 |
9 | .. automodapi:: brainbox.simulation.simulation
10 | :no-heading:
11 | :no-main-docstr:
12 |
--------------------------------------------------------------------------------
/brainbox/docs/modules/singlecell.rst:
--------------------------------------------------------------------------------
1 | `brainbox.singlecell`: Tools for examining single cell activity
2 | ===============================================================
3 |
4 | The `singlecell` module is a toolkit for examining the activity of single
5 | units within a recording. At the moment we are limited to simple analyses
6 | such as autocorrelations, but we aim to expand this
7 | feature set in the future.
8 |
9 | .. automodapi:: brainbox.singlecell.singlecell
10 | :no-heading:
11 | :no-main-docstr:
12 |
--------------------------------------------------------------------------------
/brainbox/docs/modules/task.rst:
--------------------------------------------------------------------------------
1 | `brainbox.task`: Tools for computing task-related neural activity
2 | =================================================================
3 |
4 | The `task` module is a toolkit for examining the relationship between the activity of single or
5 | multiple units in a recording and task-related variables. We aim for these functions
6 | to be general enough to apply to tasks beyond the IBL, and input is greatly appreciated.
7 |
8 | .. automodapi:: brainbox.task.task
9 | :no-heading:
10 | :no-main-docstr:
11 |
--------------------------------------------------------------------------------
/brainbox/docs/overview.md:
--------------------------------------------------------------------------------
1 | # Overview of Brainbox
2 |
3 | Coming soon
--------------------------------------------------------------------------------
/brainbox/docs/tutorials.md:
--------------------------------------------------------------------------------
1 | # Tutorials
2 |
3 | Coming soon
--------------------------------------------------------------------------------
/brainbox/docs/usage/installation.md:
--------------------------------------------------------------------------------
1 | # Installing Brainbox
2 |
3 | ## Key Dependencies
4 | Brainbox requires **Python 3.6 or higher**. Other dependencies can be installed using the `requirements.txt` or `environment.yml` files in included in brainbox using pip or conda, respectively.
5 |
6 | ## Brainbox requires ibllib
7 | Brainbox is for the moment a submodule of ibllib, a suite of python code used by the International Brain Lab to support its experimental infrastructure and data analysis pipelines.
8 |
9 | ## Setting up a conda environment for ibllib
10 | To install brainbox you must first install ibllib via the terminal. This process should be the same for Windows, OS X, and Linux.
11 |
12 | ```
13 | conda create -n brainbox python=3.7
14 | conda activate brainbox
15 | ```
16 |
17 | ## Clone ibllib into your machine
18 | We will use git in the terminal to clone the GitHub repository for ibllib onto our machine. Windows does not come with git installed. (You can download it here.)[https://gitforwindows.org/]
19 |
20 | ```
21 | cd ~/Documents # Change this to wherever you want the ibllib directory to live
22 | git clone https://github.com/int-brain-lab/ibllib
23 | cd ibllib
24 | git checkout brainbox
25 | ```
26 |
27 | ## Tell python to use brainbox
28 | Since Brainbox is a submodule of ibllib, we will need to install all of ibllib from the top-level directory. After running the above code to clone ibllib, run the following command: (Be sure you're in the main ibllib directory!)
29 |
30 | ```
31 | pip install -e .
32 | ```
33 |
34 | All done! You should now be able to use brainbox from the brainbox conda environment. This means that in new terminal sessions, after you run
35 |
36 | ```
37 | conda activate brainbox # Or whatever you named the environment
38 | ```
39 |
40 | Your python installation will have access to brainbox via `import brainbox`.
41 |
--------------------------------------------------------------------------------
/brainbox/docs/usage/quickstart.md:
--------------------------------------------------------------------------------
1 | # Quickstart
2 |
3 | Coming soon
--------------------------------------------------------------------------------
/brainbox/examples/best_available_channels_from_insertion_id.py:
--------------------------------------------------------------------------------
1 | from one.api import ONE
2 |
3 | from iblatlas import atlas
4 | from brainbox.io.one import load_channels_from_insertion
5 |
6 | pid = "8413c5c6-b42b-4ec6-b751-881a54413628"
7 | ba = atlas.AllenAtlas()
8 |
9 | xyz = load_channels_from_insertion(ONE().alyx.rest('insertions', 'read', id=pid), ba=ba)
10 |
--------------------------------------------------------------------------------
/brainbox/examples/brainbox_plot_peth_func.py:
--------------------------------------------------------------------------------
1 | import matplotlib.pyplot as plt
2 | from oneibl.one import ONE
3 |
4 | import brainbox.plot as bbp
5 |
6 | one = ONE()
7 | eid = one.search(lab='wittenlab', date='2019-08-04')[0]
8 | probe_label = 'probe00'
9 |
10 | spikes = one.load_object(eid, 'spikes', collection=f'alf/{probe_label}')
11 | trials = one.load_object(eid, 'trials', collection='alf')
12 |
13 | # For a simple peth plot without a raster, all we need to input is spike times, clusters, event
14 | # times, and the identity of the cluster we want to plot, e.g. in this case cluster 121
15 |
16 | ax = bbp.peri_event_time_histogram(spikes.times, spikes.clusters, trials.goCue_times, 121)
17 |
18 | # Or we can include a raster plot below the PETH:
19 |
20 | fig = plt.figure()
21 | ax = plt.gca()
22 | bbp.peri_event_time_histogram(spikes.times, # Spike times first
23 | spikes.clusters, # Then cluster ids
24 | trials.goCue_times, # Event markers we want to plot against
25 | 121, # Identity of the cluster we plot
26 | t_before=0.4, t_after=0.4, # Time before and after the event
27 | error_bars='sem', # Whether we want Stdev, SEM, or no error
28 | include_raster=True, # adds a raster to the bottom
29 | n_rasters=55, # How many raster traces to include
30 | ax=ax) # Make sure we plot to the axis we created
31 |
--------------------------------------------------------------------------------
/brainbox/examples/count_wheel_time_impossibilities.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 |
3 | import numpy as np
4 | import matplotlib.pyplot as plt
5 | from one.api import ONE
6 |
7 | import ibllib.plots as iblplt
8 |
9 |
10 | def find_nearest(array, value):
11 | array = np.asarray(array)
12 | idx = (np.abs(array - value)).argmin()
13 | return idx
14 |
15 |
16 | def check_wheel_angle(eid):
17 |
18 | Plot = True
19 |
20 | one = ONE()
21 | #eid = 'e1023140-50c1-462a-b80e-5e05626d7f0e' # at least 9 bad cases
22 |
23 | #eid = one.search(subject='ZM_2104', date='2019-09-19', number=1)
24 | dsets = ['_ibl_trials.feedback_times.npy', '_ibl_trials.feedbackType.npy']
25 | feedback_times, feedback_type = one.load_datasets(eid, dsets, collections='alf')
26 |
27 | wheel = one.load_object(eid, 'wheel', collection='alf')
28 | reward_success = feedback_times[feedback_type == 1]
29 | reward_failure = feedback_times[feedback_type == -1]
30 |
31 | if Plot:
32 | plt.plot(wheel['times'], wheel['position'], linestyle='', marker='o')
33 |
34 | #iblplt.vertical_lines(trials['stimOn_times'], ymin=-100, ymax=100,
35 | # color='r', linewidth=0.5, label='stimOn_times')
36 |
37 | #iblplt.vertical_lines(reward_failure, ymin=-100, ymax=100,
38 | # color='b', linewidth=0.5, label='reward_failure')
39 |
40 | iblplt.vertical_lines(reward_success, ymin=-100, ymax=100,
41 | color='k', linewidth=0.5, label='reward_success')
42 |
43 | plt.legend()
44 | plt.xlabel('time [sec]')
45 | plt.ylabel('wheel linear displacement [cm]')
46 | plt.show()
47 |
48 | # get fraction of reward deliveries with silent wheel time_delay before the reward
49 | time_delay = 0.5
50 |
51 | bad_cases1 = []
52 | for rew in reward_success:
53 |
54 | left = wheel['times'][find_nearest(wheel['times'], rew - time_delay)]
55 | right = wheel['times'][find_nearest(wheel['times'], rew)]
56 |
57 | if left == right:
58 | if left < rew - time_delay:
59 | bad_cases1.append(rew)
60 |
61 | if len(bad_cases1) == 0:
62 | print('Good news, no impossible case found.')
63 | else:
64 | print('Bad news, at least one impossible case found.')
65 | return len(bad_cases1)
66 |
--------------------------------------------------------------------------------
/brainbox/examples/decoding.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from sklearn.naive_bayes import MultinomialNB
3 | from sklearn.model_selection import KFold
4 | from one.api import ONE
5 |
6 | from brainbox.population import get_spike_counts_in_bins, classify
7 | import brainbox.io.one as bbone
8 |
9 | # %% Load in data
10 | one = ONE()
11 | eid = one.search(subject='ZM_2240', date_range=['2020-01-23', '2020-01-23'])
12 | spikes, clusters = bbone.load_spike_sorting(eid[0], one=one)
13 | trials = one.load_object(eid[0], 'trials', collection='alf')
14 |
15 | # Use probe00
16 | spikes = spikes['probe00']
17 | clusters = clusters['probe00']
18 |
19 | # %% Do decoding
20 | print('\nDecoding whether the stimulus was on the left or the right..')
21 |
22 | # Get population response matrix of all trials
23 | times = np.column_stack(((trials.goCue_times), (trials.goCue_times + 0.3))) # 0-300 ms timewindow
24 | population_activity, cluster_ids = get_spike_counts_in_bins(spikes.times, spikes.clusters, times)
25 | population_activity = population_activity.T
26 |
27 | # Get decoding target
28 | stim_sides = np.isnan(trials.contrastLeft).astype(int)
29 |
30 | # Decode using a Naive Bayes classifier with multinomial likelihood using 5-fold cross validation
31 | clf = MultinomialNB()
32 | cv = KFold(n_splits=5)
33 | accuracy, pred, prob = classify(population_activity, stim_sides, clf, cross_validation=cv)
34 |
35 | # Get the accuracy over chance
36 | print('\nNaive Bayes with 5-fold cross-validation')
37 | print('Performance: %.1f%% correct [chance level: %.1f%%]' % (
38 | accuracy * 100,
39 | ((stim_sides.sum() / stim_sides.shape[0]) * 100)))
40 |
--------------------------------------------------------------------------------
/brainbox/examples/docs_access_DLC.py:
--------------------------------------------------------------------------------
1 | """
2 | Accessing DeepLabCut (DLC) traces
3 | =================================
4 | This script illustrates how to access DLC results for a session
5 | and video type, filter them by likelihood and save as a
6 | dictionary of numpy arrays, with the keys being the tracked points
7 | and the entries being x,y coordinates. This can be done for
8 | each camera ('left' only for training sessions, 'left',
9 | 'right' and 'body' for ephys sessions).
10 | See also
11 |
12 | https://github.com/int-brain-lab/iblapps/blob/develop/dlc/DLC_labeled_video.py
13 |
14 | to make a labeled video, and
15 |
16 | https://github.com/int-brain-lab/ibllib/blob/camera_extractor/ibllib/qc/stream_dlc_labeled_frames.py
17 |
18 | to stream some frames and paint dlc labels on top.
19 | """
20 |
21 | # Author: Michael
22 | import numpy as np
23 | from one.api import ONE
24 |
25 | from ibllib.io.video import assert_valid_label
26 |
27 |
28 | def get_DLC(eid, video_type):
29 | """load dlc traces
30 | load dlc traces for a given session and
31 | video type.
32 |
33 | :param eid: A session eid
34 | :param video_type: string in 'left', 'right', body'
35 | :return: array of times and dict with dlc points
36 | as keys and x,y coordinates as values,
37 | for each frame id
38 |
39 | Example:
40 |
41 | eid = '6c6983ef-7383-4989-9183-32b1a300d17a'
42 | video_type = 'right'
43 |
44 | Times, XYs = get_DLC(eid, video_type)
45 |
46 | # get for frame 500 the x coordinate of the nose
47 | # and the time stamp:
48 |
49 | x_frame_500 = XYs['nose_tip'][0][500]
50 | t_frame_500 = Times[500]
51 | """
52 |
53 | one = ONE()
54 | video_type = assert_valid_label(video_type)
55 | cam = one.load_object(eid, f'{video_type}Camera', collection='alf')
56 | points = np.unique(['_'.join(x.split('_')[:-1]) for x in cam.dlc.columns])
57 | XYs = {}
58 | for point in points:
59 | x = np.ma.masked_where(
60 | cam.dlc[point + '_likelihood'] < 0.9, cam.dlc[point + '_x'])
61 | x = x.filled(np.nan)
62 | y = np.ma.masked_where(
63 | cam.dlc[point + '_likelihood'] < 0.9, cam.dlc[point + '_y'])
64 | y = y.filled(np.nan)
65 | XYs[point] = np.array([x, y])
66 |
67 | return cam.times, XYs
68 |
--------------------------------------------------------------------------------
/brainbox/examples/docs_explore_passive.py:
--------------------------------------------------------------------------------
1 | """
2 | Explore Passive Data
3 | ====================
4 | This example shows how to load in passive data for a session and plot the receptive field map over
5 | depth
6 | """
7 |
8 | import matplotlib.pyplot as plt
9 | import numpy as np
10 | from one.api import ONE
11 |
12 | import brainbox.io.one as bbone
13 | import brainbox.task.passive as passive
14 |
15 | eid = '4ecb5d24-f5cc-402c-be28-9d0f7cb14b3a'
16 | probe = 'probe00'
17 | one = ONE(base_url='https://openalyx.internationalbrainlab.org', silent=True)
18 |
19 | # Load in the receptive field map data
20 | rf_map = bbone.load_passive_rfmap(eid, one=one)
21 | spike_times = one.load_dataset(eid, dataset='spikes.times.npy', collection=f'alf/{probe}')
22 | spike_depths = one.load_dataset(eid, dataset='spikes.depths.npy', collection=f'alf/{probe}')
23 | # Remove any nan depths
24 | kp_idx = np.where(~np.isnan(spike_depths))[0]
25 | spike_times = spike_times[kp_idx]
26 | spike_depths = spike_depths[kp_idx]
27 | # Pass this data into brainbox function to find out at what times each voxel on screen was
28 | # activated
29 | rf_stim_times, rf_stim_pos, rf_stim_frames = passive.get_on_off_times_and_positions(rf_map)
30 |
31 | # rf_stim_times - time of frame change
32 | # rf_stim_pos - position of each voxel on (15 x 15) screen
33 | # rf_stim_frames - frames at which stimulus was turned 'on' (grey to white) or 'off' (grey to
34 | # black) at each position on screen
35 |
36 | # For each position on screen compute average stim epoched activity across depth of probe
37 | rf_map_avg, _ = passive.get_rf_map_over_depth(rf_stim_times, rf_stim_pos, rf_stim_frames,
38 | spike_times, spike_depths, t_bin=0.01,
39 | d_bin=160, pre_stim=0.05, post_stim=1.5)
40 |
41 | # Take the SVD across to represent RF map across depth
42 | rf_svd = passive.get_svd_map(rf_map_avg)
43 |
44 | # Now we can plot the RF map for the on and off response
45 | on_rf_map = np.vstack(rf_svd['on'])
46 | off_rf_map = np.vstack(rf_svd['off'])
47 |
48 | fig, ax = plt.subplots(1, 2)
49 | depths = np.linspace(0, 3840, len(rf_svd['on']) + 1)
50 | ax[0].imshow(on_rf_map, extent=(0, 500, 0, 3840), origin='lower')
51 | ax[0].hlines(depths, *ax[0].get_xlim(), linewidth=1, colors='w')
52 | ax[0].get_xaxis().set_visible(False)
53 | ax[0].set_title('RF map On')
54 | ax[1].imshow(off_rf_map, extent=(0, 500, 0, 3840), origin='lower')
55 | ax[1].hlines(depths, *ax[1].get_xlim(), linewidth=1, colors='w')
56 | ax[1].get_xaxis().set_visible(False)
57 | ax[1].set_title('RF map Off')
58 | plt.show()
59 |
--------------------------------------------------------------------------------
/brainbox/examples/docs_get_training_status.py:
--------------------------------------------------------------------------------
1 | """
2 | Get subject training status via ONE
3 | ===================================
4 | Use ONE to get the training status of a chosen subject or all subjects within a lab.
5 | Training status is computed based on performance over latest 3 sessions (default) or last 3
6 | sessions before a specified date.
7 | """
8 | from one.api import ONE
9 |
10 | import brainbox.behavior.training as training
11 | one = ONE(base_url='https://openalyx.internationalbrainlab.org', silent=True)
12 | # Get training status of a specific subject
13 | training.get_subject_training_status('SWC_043', one=one)
14 |
15 | # Get training status of a specific subject on a chosen date
16 | training.get_subject_training_status('KS023', date='2019-12-10', one=one)
17 |
18 | # Get training status of all mice within a lab
19 | # (N.B. only looks for alive and water restricted subjects)
20 | training.get_lab_training_status('churchlandlab', one=one)
21 |
--------------------------------------------------------------------------------
/brainbox/examples/docs_load_spike_sorting.py:
--------------------------------------------------------------------------------
1 | """
2 | Get spikes, clusters and channels data
3 | ========================================
4 | Downloads and loads in spikes, clusters and channels data for a given probe insertion.
5 |
6 | There could be several spike sorting collections, by default the loader will get the pykilosort collection
7 |
8 | The channel locations can come from several sources, it will load the most advanced version of the histology available,
9 | regardless of the spike sorting version loaded. The steps are (from most advanced to fresh out of the imaging):
10 | - alf: the final version of channel locations, same as resolved with the difference that data has been written out to files
11 | - resolved: channel locations alignments have been agreed upon
12 | - aligned: channel locations have been aligned, but review or other alignments are pending, potentially not accurate
13 | - traced: the histology track has been recovered from microscopy, however the depths may not match, inacurate data
14 | """
15 |
16 | from one.api import ONE
17 | from iblatlas.atlas import AllenAtlas
18 | from brainbox.io.one import SpikeSortingLoader
19 |
20 |
21 | one = ONE(base_url='https://openalyx.internationalbrainlab.org')
22 | ba = AllenAtlas()
23 |
24 | insertions = one.alyx.rest('insertions', 'list')
25 | pid = insertions[0]['id']
26 | sl = SpikeSortingLoader(pid=pid, one=one, atlas=ba)
27 | spikes, clusters, channels = sl.load_spike_sorting()
28 | clusters_labeled = SpikeSortingLoader.merge_clusters(spikes, clusters, channels)
29 |
30 | # the histology property holds the provenance of the current channel locations
31 | print(sl.histology)
32 |
33 | # available spike sorting collections for this probe insertion
34 | print(sl.collections)
35 |
36 | # the collection that has been loaded
37 | print(sl.collection)
38 |
--------------------------------------------------------------------------------
/brainbox/examples/docs_scatter_raster_plot.py:
--------------------------------------------------------------------------------
1 | """
2 | Plot raster across session
3 | ==========================
4 | Example of how to plot scatter plot of spike depths vs spike times with colour and size of scatter
5 | points scaled by spike amplitude
6 | """
7 |
8 | import numpy as np
9 | from one.api import ONE
10 |
11 | from brainbox.ephys_plots import scatter_raster_plot
12 | from brainbox.plot_base import plot_scatter
13 |
14 | one = ONE(base_url='https://openalyx.internationalbrainlab.org', silent=True)
15 |
16 | eid = '4ecb5d24-f5cc-402c-be28-9d0f7cb14b3a'
17 | probe = 'probe00'
18 |
19 | spikes = one.load_object(eid, obj='spikes', collection=f'alf/{probe}')
20 | metrics = one.load_dataset(eid, dataset='clusters.metrics.pqt', collection=f'alf/{probe}')
21 |
22 | # Find the clusters that have been labelled as good and their corresponding spike indices
23 | good_clusters = np.where(metrics.label == 1)
24 | spike_idx = np.where(np.isin(spikes['clusters'], good_clusters))[0]
25 |
26 | # Also filter for nans in amplitude and depth
27 | kp_idx = spike_idx[np.where(~np.isnan(spikes['depths'][spike_idx])
28 | & ~np.isnan(spikes['amps'][spike_idx]))[0]]
29 |
30 | # Get ScatterPlot object
31 | data = scatter_raster_plot(spikes['amps'][kp_idx], spikes['depths'][kp_idx],
32 | spikes['times'][kp_idx])
33 |
34 | # Add v lines 10s after start and 10s before end or recording
35 | x1 = np.min(spikes['times'][kp_idx] + 100)
36 | x2 = np.max(spikes['times'][kp_idx] - 100)
37 | data.add_lines(pos=x1, orientation='v', style='dashed', width=3, color='k')
38 | data.add_lines(pos=x2, orientation='v', style='dashed', width=3, color='k')
39 |
40 |
41 | plot_dict = data.convert2dict()
42 |
43 | fig, ax = plot_scatter(plot_dict)
44 |
45 |
--------------------------------------------------------------------------------
/brainbox/examples/lfp_plots.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import matplotlib.pyplot as plt
3 | import seaborn as sns
4 | from matplotlib.gridspec import GridSpec
5 | from oneibl.one import ONE
6 | from ibllib.io import spikeglx
7 |
8 | from brainbox import lfp
9 |
10 | # Download data
11 | one = ONE()
12 | eid = one.search(subject='ZM_2240', date_range=['2020-01-23', '2020-01-23'])
13 | lf_paths = one.load(eid[0], dataset_types=['ephysData.raw.lf', 'ephysData.raw.meta',
14 | 'ephysData.raw.ch'],
15 | download_only=True)
16 |
17 | # Read in raw LFP data from probe00
18 | with spikeglx.Reader(lf_paths[0]) as raw:
19 | signal = raw.read(nsel=slice(None, 100000, None), csel=slice(None, None, None))[0]
20 | signal = np.rot90(signal)
21 |
22 | ts = one.load(eid[0], 'ephysData.raw.timestamps')
23 |
24 | # %% Calculate power spectrum and coherence between two random channels
25 | raw = spikeglx.Reader(lf_paths[0], open=True)
26 | ps_freqs, ps = lfp.power_spectrum(signal, fs=raw.fs, segment_length=1, segment_overlap=0.5)
27 | random_ch = np.random.choice(raw.nc, 2)
28 | coh_freqs, coh, phase_lag = lfp.coherence(signal[random_ch[0], :],
29 | signal[random_ch[1], :], fs=raw.fs)
30 | raw.close()
31 |
32 | # %% Create power spectrum and coherence plot
33 |
34 | fig = plt.figure(figsize=(18, 12))
35 | gs = GridSpec(3, 2, figure=fig)
36 | cmap = sns.color_palette('cubehelix', 50)
37 | ax1 = fig.add_subplot(gs[:, 0])
38 | sns.heatmap(data=np.log10(ps[:, ps_freqs < 140]), cbar=True, ax=ax1, yticklabels=50,
39 | cmap=cmap, cbar_kws={'label': 'log10 power ($V^2$)'})
40 | ax1.set(xticks=np.arange(0, np.sum(ps_freqs < 140), 50),
41 | xticklabels=np.array(ps_freqs[np.arange(0, np.sum(ps_freqs < 140), 50)], dtype=int),
42 | ylabel='Channels', xlabel='Frequency (Hz)')
43 |
44 | ax2 = fig.add_subplot(gs[0, 1])
45 | ax2.plot(signal[random_ch[0]])
46 | ax2.set(ylabel='Power ($V^2$)',
47 | xlabel='Frequency (Hz)', title='Channel %d' % random_ch[0])
48 |
49 | ax3 = fig.add_subplot(gs[1, 1])
50 | ax3.plot(ps_freqs, ps[random_ch[0], :])
51 | ax3.set(xlim=[1, 140], yscale='log', ylabel='Power ($V^2$)',
52 | xlabel='Frequency (Hz)', title='Channel %d' % random_ch[0])
53 |
54 | ax4 = fig.add_subplot(gs[2, 1])
55 | ax4.plot(coh_freqs, coh)
56 | ax4.set(xlim=[1, 140], ylabel='Coherence', xlabel='Frequency (Hz)',
57 | title='Channel %d and %d' % (random_ch[0], random_ch[1]))
58 |
59 | plt.tight_layout(pad=5)
60 |
61 |
--------------------------------------------------------------------------------
/brainbox/examples/plot_atlas_color_values.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import matplotlib.pyplot as plt
3 | from iblatlas import atlas
4 | from brainbox.atlas import plot_atlas
5 |
6 |
7 | def combine_layers_cortex(regions, delete_duplicates=False):
8 | remove = ["1", "2", "3", "4", "5", "6a", "6b", "/"]
9 | for i, region in enumerate(regions):
10 | for j, char in enumerate(remove):
11 | regions[i] = regions[i].replace(char, "")
12 | if delete_duplicates:
13 | regions = list(set(regions))
14 | return regions
15 |
16 |
17 | # Coordinates of slices in mm
18 | ML = -0.5
19 | AP = 1
20 | DV = -2
21 |
22 | # Generate some mock data
23 | ba = atlas.AllenAtlas(25)
24 | all_regions = ba.regions.acronym
25 | regions = np.random.choice(all_regions, size=500, replace=False) # pick 500 random regions
26 | values = np.random.uniform(-1, 1, 500) # generate 500 random values
27 |
28 | # Plot atlas
29 | f, axs = plt.subplots(2, 3, figsize=(20, 10))
30 | plot_atlas(regions, values, ML, AP, DV, color_palette="RdBu_r", minmax=[-1, 1], axs=axs[0])
31 |
32 | # Now combine all layers of cortex
33 | plot_regions = combine_layers_cortex(regions)
34 | combined_cortex = combine_layers_cortex(all_regions)
35 |
36 | # Plot atlas
37 | plot_atlas(
38 | plot_regions,
39 | values,
40 | ML,
41 | AP,
42 | DV,
43 | color_palette="RdBu_r",
44 | minmax=[-1, 1],
45 | axs=axs[1],
46 | custom_region_list=combined_cortex,
47 | )
48 |
--------------------------------------------------------------------------------
/brainbox/examples/raster_cluster_ordered.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import matplotlib.pyplot as plt
3 | import numpy as np
4 | import rastermap
5 | from oneibl.one import ONE
6 | import alf.io as ioalf
7 | import ibllib.plots as iblplt
8 | from iblutil.numerical import bincount2D
9 |
10 | T_BIN = 0.01
11 |
12 | # get the data from flatiron and the current folder
13 | one = ONE()
14 | eid = one.search(lab='wittenlab', date='2019-08-04')
15 | D = one.load(eid[0])
16 | session_path = Path(D.local_path[0]).parent
17 |
18 | # load objects
19 | spikes = ioalf.load_object(session_path, 'spikes')
20 | clusters = ioalf.load_object(session_path, 'clusters')
21 | channels = ioalf.load_object(session_path, 'channels')
22 | trials = ioalf.load_object(session_path, 'trials')
23 |
24 | # compute raster map as a function of cluster number
25 | R, times, clusters = bincount2D(spikes['times'], spikes['clusters'], T_BIN)
26 |
27 |
28 | # Using rastermap defaults to order activity matrix
29 | # by similarity of activity (requires R to contain floats)
30 | model = rastermap.mapping.Rastermap().fit(R.astype(float))
31 | isort = np.argsort(model.embedding[:, 0])
32 | R = R[isort, :]
33 |
34 | # Alternatively, order activity by cortical depth of neurons
35 | # d=dict(zip(spikes['clusters'],spikes['depths']))
36 | # y=sorted([[i,d[i]] for i in d])
37 | # isort=argsort([x[1] for x in y])
38 | # R=R[isort,:]
39 |
40 | # plot raster map
41 | plt.imshow(R, aspect='auto', cmap='binary', vmax=T_BIN / 0.001 / 4,
42 | extent=np.r_[times[[0, -1]], clusters[[0, -1]]], origin='lower')
43 | # plot trial start and reward time
44 | reward = trials['goCue_times']
45 |
46 | iblplt.vertical_lines(reward, ymin=0, ymax=clusters[-1], color='m', linewidth=0.5,
47 | label='valve openings')
48 | plt.xlabel('Time (s)')
49 | plt.ylabel('Cluster #')
50 | plt.legend()
51 |
--------------------------------------------------------------------------------
/brainbox/examples/raster_clusters.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import matplotlib.pyplot as plt
3 | import numpy as np
4 |
5 | from oneibl.one import ONE
6 | import alf.io as ioalf
7 | import ibllib.plots as iblplt
8 |
9 | from iblutil.numerical import bincount2D
10 |
11 | T_BIN = 0.01
12 |
13 | # get the data from flatiron and the current folder
14 | one = ONE()
15 | eid = one.search(subject='ZM_1150', date='2019-05-07', number=1)
16 | D = one.load(eid[0], clobber=False, download_only=True)
17 | session_path = Path(D.local_path[0]).parent
18 |
19 | # load objects
20 | spikes = ioalf.load_object(session_path, 'spikes')
21 | clusters = ioalf.load_object(session_path, 'clusters')
22 | channels = ioalf.load_object(session_path, 'channels')
23 | trials = ioalf.load_object(session_path, 'trials')
24 |
25 | # compute raster map as a function of cluster number
26 | R, times, clusters = bincount2D(spikes['times'], spikes['clusters'], T_BIN)
27 |
28 | # plot raster map
29 | plt.imshow(R, aspect='auto', cmap='binary', vmax=T_BIN / 0.001 / 4,
30 | extent=np.r_[times[[0, -1]], clusters[[0, -1]]], origin='lower')
31 | # plot trial start and reward time
32 | reward = trials['feedback_times'][trials['feedbackType'] == 1]
33 | iblplt.vertical_lines(trials['intervals'][:, 0], ymin=0, ymax=clusters[-1],
34 | color='k', linewidth=0.5, label='trial starts')
35 | iblplt.vertical_lines(reward, ymin=0, ymax=clusters[-1], color='m', linewidth=0.5,
36 | label='valve openings')
37 | plt.xlabel('Time (s)')
38 | plt.ylabel('Cluster #')
39 | plt.legend()
40 |
--------------------------------------------------------------------------------
/brainbox/examples/raster_depth_per_spike.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import numpy as np
3 | import matplotlib.pyplot as plt
4 | from oneibl.one import ONE
5 | import alf.io
6 | import scipy.stats
7 | plt.ion()
8 |
9 | def scatter_raster(spikes):
10 |
11 | '''
12 | Create a scatter plot, time vs depth for each spike
13 | colored by cluster id; including vertical lines
14 | for stimulus type boundary times
15 |
16 | Note that interval should be at most 10**6 else
17 | the plot is too memory expensive
18 |
19 | :param spike: spike = alf.io.load_object(alf_path, 'spikes')
20 | :type spike: dict
21 | :type restrict: [int, int]
22 | :param restrict: array of clusters to be plotted
23 | :rtype: plot
24 | '''
25 |
26 | downsample_factor = 20
27 |
28 | uclusters = np.unique(spikes['clusters'])
29 | cols = ['c','b','g','y','k','r','m']
30 | cols_cat = (cols*int(len(uclusters)/len(cols)+10))[:len(uclusters)]
31 | col_dict = dict(zip(uclusters, cols_cat))
32 |
33 | # downsample
34 | z = spikes['clusters'][::downsample_factor]
35 | x = spikes['times'][::downsample_factor]
36 | y = spikes['depths'][::downsample_factor]
37 |
38 | cols_int =[col_dict[x] for x in z]
39 |
40 | plt.scatter(x, y, marker='o', s=0.01, c = cols_int)
41 |
42 | plt.ylabel('depth [um]')
43 | plt.xlabel('time [sec]')
44 | plt.title('downsample factor: %s' %downsample_factor)
45 |
46 | if __name__ == '__main__':
47 |
48 | one = ONE()
49 | eid = one.search(subject='ZM_2407', date='2019-11-05', number=3)
50 | D = one.load(eid[0], clobber=False, download_only=True)
51 | alf_path = Path(D.local_path[0]).parent
52 |
53 | spikes = alf.io.load_object(alf_path, 'spikes')
54 | scatter_raster(spikes)
55 |
--------------------------------------------------------------------------------
/brainbox/examples/raster_depths.py:
--------------------------------------------------------------------------------
1 | """
2 | author OW
3 | last reviewed/run 19-04-2020
4 | """
5 | import matplotlib.pyplot as plt
6 | import numpy as np
7 |
8 | from oneibl.one import ONE
9 | import ibllib.plots as iblplt
10 |
11 | from iblutil.numerical import bincount2D
12 | from brainbox.io import one as bbone
13 |
14 | T_BIN = 0.05
15 | D_BIN = 5
16 |
17 | # get the data from flatiron and the current folder
18 | one = ONE()
19 | eid = one.search(subject='CSHL045', date='2020-02-26', number=1)[0]
20 |
21 | spikes, clusters, trials = bbone.load_ephys_session(eid, one=one, dataset_types=['spikes.depth'])
22 |
23 | pname = list(spikes.keys())[0]
24 |
25 | # compute raster map as a function of site depth
26 | R, times, depths = bincount2D(spikes[pname]['times'], spikes[pname]['depths'], T_BIN, D_BIN)
27 |
28 | # plot raster map
29 | plt.imshow(R, aspect='auto', cmap='binary', vmax=T_BIN / 0.001 / 4,
30 | extent=np.r_[times[[0, -1]], depths[[0, -1]]], origin='lower')
31 | # plot trial start and reward time
32 | reward = trials['feedback_times'][trials['feedbackType'] == 1]
33 | iblplt.vertical_lines(trials['intervals'][:, 0], ymin=0, ymax=depths[-1],
34 | color='k', linewidth=0.5, label='trial starts')
35 | iblplt.vertical_lines(reward, ymin=0, ymax=depths[-1], color='m', linewidth=0.5,
36 | label='valve openings')
37 | plt.xlabel('Time (s)')
38 | plt.ylabel('Cluster #')
39 | plt.legend()
40 |
--------------------------------------------------------------------------------
/brainbox/examples/simplest_peth_plot.py:
--------------------------------------------------------------------------------
1 | import matplotlib.pyplot as plt
2 | import numpy as np
3 |
4 | import alf.io
5 | from brainbox.singlecell import calculate_peths
6 |
7 | from oneibl.one import ONE
8 |
9 | one = ONE()
10 | eid = one.search(subject='KS004', date=['2019-09-25'], task_protocol='ephysChoiceWorld')[0]
11 | datasets = one.load(eid, download_only=True)
12 | ses_path = datasets[0].local_path.parent
13 |
14 | spikes = alf.io.load_object(ses_path, 'spikes')
15 | trials = alf.io.load_object(ses_path, 'trials')
16 |
17 | peth, bs = calculate_peths(spikes.times, spikes.clusters, [225, 52], trials.goCue_times)
18 |
19 | plt.plot(peth.tscale, peth.means.T)
20 |
21 | for m in np.arange(peth.means.shape[0]):
22 | plt.fill_between(peth.tscale,
23 | peth.means[m, :].T - peth.stds[m, :].T / 20,
24 | peth.means[m, :].T + peth.stds[m, :].T / 20,
25 | alpha=0.2, edgecolor='#1B2ACC', facecolor='#089FFF',
26 | linewidth=4, linestyle='dashdot', antialiased=True)
27 |
--------------------------------------------------------------------------------
/brainbox/examples/simplest_peth_plot_aligned_to_StimON.py:
--------------------------------------------------------------------------------
1 | import matplotlib.pyplot as plt
2 | import numpy as np
3 |
4 | import alf.io
5 | from brainbox.singlecell import peths
6 | from brainbox.examples.plot_all_peths import get_session_path
7 | from v1_protocol_utilities.example_plotting import *
8 |
9 | from oneibl.one import ONE # to download data
10 |
11 | one = ONE()
12 | #eid = one.search(subject='KS004', date=['2019-09-25'], task_protocol='ephysChoiceWorld')[0]
13 | eid = one.search(lab='wittenlab', date='2019-08-04')
14 | datasets = one.load(eid, download_only=True)
15 | #ses_path = get_session_path(datasets)
16 | ses_path = datasets[0].local_path.parent #local path where the data has been downloaded
17 |
18 | spikes = alf.io.load_object(ses_path, 'spikes')
19 | trials = alf.io.load_object(ses_path, 'trials')
20 |
21 | # check which neurons are responsive
22 | #are_neurons_responsive(spike_times,spike_clusters,stimulus_intervals=None,spontaneous_period=None,p_value_threshold=.05):
23 | # spontaenous period is just 1 interval! on and off time
24 | responsive = are_neurons_responsive(spikes.times, spikes.clusters, np.vstack((trials.stimOn_times, trials.stimOn_times + 0.5)).T, np.array([0, trials.stimOn_times[0]-1]).reshape(-1), 0.001)
25 | #peths(spike_times, spike_clusters, cluster_ids, align_times, pre_time=0.2,
26 | # post_time=0.5, bin_size=0.025, smoothing=0.025, return_fr=True):
27 |
28 | responsive_neuron = np.unique(spikes.clusters)[responsive]
29 |
30 | peth, bs = peths(spikes.times, spikes.clusters, responsive_neuron, trials.stimOn_times)
31 |
32 | plt.plot(peth.tscale, peth.means.T)
33 |
34 | #for m in np.arange(peth.means.shape[0]):
35 | # plt.fill_between(peth.tscale,
36 | # peth.means[m, :].T - peth.stds[m, :].T / 20,
37 | # peth.means[m, :].T + peth.stds[m, :].T / 20,
38 | # alpha=0.2, edgecolor='#1B2ACC', facecolor='#089FFF',
39 | # linewidth=4, linestyle='dashdot', antialiased=True)
40 |
--------------------------------------------------------------------------------
/brainbox/examples/simplest_raster_plot.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import numpy as np
3 | from brainbox.processing import bincount2D
4 | import matplotlib.pyplot as plt
5 | from oneibl.one import ONE
6 | import alf.io
7 | plt.ion()
8 |
9 |
10 | def raster_complete(R, times, Clusters):
11 | '''
12 | Plot a rasterplot for the complete recording
13 | (might be slow, restrict R if so),
14 | ordered by insertion depth
15 | '''
16 |
17 | plt.imshow(R, aspect='auto', cmap='binary', vmax=T_BIN / 0.001 / 4,
18 | origin='lower', extent=np.r_[times[[0, -1]], Clusters[[0, -1]]])
19 |
20 | plt.xlabel('Time (s)')
21 | plt.ylabel('Cluster #; ordered by depth')
22 | plt.show()
23 |
24 | # plt.savefig('/home/mic/Rasters/%s.svg' %(trial_number))
25 | # plt.close('all')
26 | plt.tight_layout()
27 |
28 |
29 | if __name__ == '__main__':
30 |
31 | # get data
32 | one = ONE()
33 | eid = one.search(lab='wittenlab', date='2019-08-04')
34 | D = one.load(eid[0], clobber=False, download_only=True)
35 | alf_path = Path(D.local_path[0]).parent
36 | spikes = alf.io.load_object(alf_path, 'spikes')
37 |
38 | # bin activity
39 | T_BIN = 0.01 # [sec]
40 | R, times, Clusters = bincount2D(spikes['times'], spikes['clusters'], T_BIN)
41 |
42 | # Order activity by anatomical depth of neurons
43 | d = dict(zip(spikes['clusters'], spikes['depths']))
44 | y = sorted([[i, d[i]] for i in d])
45 | isort = np.argsort([x[1] for x in y])
46 | R = R[isort, :]
47 |
48 | # Check the number of clusters x number of time bins
49 | print(R.shape, '#clusters x #timebins')
50 |
51 | # get a raster plot for the complete recording
52 | raster_complete(R, times, Clusters)
53 |
--------------------------------------------------------------------------------
/brainbox/examples/xcorr_numpy.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import matplotlib.pyplot as plt
3 | import numpy as np
4 |
5 | from oneibl.one import ONE
6 | import alf.io as ioalf
7 |
8 | from iblutil.numerical import bincount2D
9 |
10 |
11 | class Bunch(dict):
12 | """A subclass of dictionary with an additional dot syntax."""
13 | def __init__(self, *args, **kwargs):
14 | super(Bunch, self).__init__(*args, **kwargs)
15 | self.__dict__ = self
16 |
17 | def copy(self):
18 | """Return a new Bunch instance which is a copy of the current Bunch instance."""
19 | return Bunch(super(Bunch, self).copy())
20 |
21 |
22 | def one_to_bunch(spikes_obj):
23 | """Convert a ONE spikes object into a Bunch of spike times and spike clusters."""
24 | return Bunch(spike_times=spikes_obj['times'], spike_clusters=spikes['clusters'])
25 |
26 |
27 | def firing_rates(spike_times, spike_clusters, bin_size):
28 | """Return the time-dependent firing rate of a population of neurons.
29 |
30 | :param spike_times: the spike times of all neurons, in seconds
31 | :param spike_clusters: the cluster numbers of all spikes
32 | :param bin_size: the bin size, in seconds
33 | :return: a (n_clusters, n_samples) array with the firing rate of every cluster
34 |
35 | """
36 | rates, times, clusters = bincount2D(spike_times, spike_clusters, bin_size)
37 | return rates
38 |
39 |
40 | def xcorr(x, y, maxlags=None):
41 | """Cross-correlation between two 1D signals of the same length."""
42 | ns = len(x)
43 | if len(y) != ns:
44 | raise ValueError("x and y should have the same length.")
45 | maxlags = maxlags or ns - 1
46 | return np.correlate(x, y, mode='full')[ns - 1 - maxlags:ns + maxlags]
47 |
48 |
49 | T_BIN = 0.01 # seconds
50 | CORR_LEN = 1 # seconds
51 | CORR_BINS = int(CORR_LEN / T_BIN) # bins
52 |
53 | # get the data from flatiron and the current folder
54 | one = ONE()
55 | eid = one.search(subject='ZM_1150', date='2019-05-07', number=1)
56 | D = one.load(eid[0], clobber=False, download_only=True)
57 | session_path = Path(D.local_path[0]).parent
58 |
59 | # load objects
60 | spikes = ioalf.load_object(session_path, 'spikes')
61 |
62 | # Get a Bunch instance.
63 | b = one_to_bunch(spikes)
64 |
65 | # Compute the firing rates.
66 | rates = firing_rates(b.spike_times, b.spike_clusters, T_BIN)
67 | # Note: I would rather just use spikes['times'] and spikes['clusters'] instead of going
68 | # via a Bunch or DataFrame or similar...
69 |
70 | # Compute the cross-correlation between the firing rate of two neurons.
71 | c = xcorr(rates[0], rates[1], CORR_BINS)
72 |
73 | # Plot it.
74 | lags = np.linspace(-CORR_LEN, +CORR_LEN, len(c))
75 | plt.plot(c)
76 | plt.show()
77 |
--------------------------------------------------------------------------------
/brainbox/io/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/brainbox/io/__init__.py
--------------------------------------------------------------------------------
/brainbox/metrics/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/brainbox/metrics/__init__.py
--------------------------------------------------------------------------------
/brainbox/population/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/brainbox/population/__init__.py
--------------------------------------------------------------------------------
/brainbox/task/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/brainbox/task/__init__.py
--------------------------------------------------------------------------------
/brainbox/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/brainbox/tests/__init__.py
--------------------------------------------------------------------------------
/brainbox/tests/fixtures/ephys_test.p:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/brainbox/tests/fixtures/ephys_test.p
--------------------------------------------------------------------------------
/brainbox/tests/fixtures/parquet_records.json:
--------------------------------------------------------------------------------
1 | [{"id": "490dbe0e-8d96-44ef-bb3f-3443285376e4", "name": "_ibl_trials.intervals.npy", "dataset_type": "trials.intervals", "data_url": "http://ibl.flatironinstitute.org/churchlandlab/Subjects/CSHL046/2020-06-20/002/alf/_ibl_trials.intervals.490dbe0e-8d96-44ef-bb3f-3443285376e4.npy", "url": "https://alyx.internationalbrainlab.org/datasets/490dbe0e-8d96-44ef-bb3f-3443285376e4", "file_size": 5824, "hash": "a9d372ce849439c12243dd4c1bbf29d5", "version": "1.4.14", "collection": "alf"}, {"id": "6b6aeba4-a6c2-4a42-aa8c-b2f12e790623", "name": "_ibl_log.info.register_v1.4.11.log", "dataset_type": "_ibl_log.info", "data_url": "http://ibl.flatironinstitute.org/churchlandlab/Subjects/CSHL046/2020-06-20/002/logs/_ibl_log.info.register_v1.4.11.6b6aeba4-a6c2-4a42-aa8c-b2f12e790623.log", "url": "https://alyx.internationalbrainlab.org/datasets/6b6aeba4-a6c2-4a42-aa8c-b2f12e790623", "file_size": 146, "hash": "fe2dd9fee9720049b02fe318165222bf", "version": "1.4.14", "collection": "logs"}, {"id": "1c633c8c-7b04-458f-b7a8-c1ac62ce9dcb", "name": "_iblrig_bodyCamera.raw.mp4", "dataset_type": "_iblrig_Camera.raw", "data_url": "http://ibl.flatironinstitute.org/churchlandlab/Subjects/CSHL046/2020-06-20/002/raw_video_data/_iblrig_bodyCamera.raw.1c633c8c-7b04-458f-b7a8-c1ac62ce9dcb.mp4", "url": "https://alyx.internationalbrainlab.org/datasets/1c633c8c-7b04-458f-b7a8-c1ac62ce9dcb", "file_size": 741255803, "hash": "c8e605c3112c639ac7fd7a1a1e1e782e", "version": "1.4.14", "collection": "raw_video_data"}, {"id": "88ca7cad-6bba-49f3-af69-1056a4099474", "name": "_spikeglx_ephysData_g1_t0.imec.lf.cbin", "dataset_type": "ephysData.raw.lf", "data_url": "http://ibl.flatironinstitute.org/churchlandlab/Subjects/CSHL046/2020-06-20/002/raw_ephys_data/probe01/_spikeglx_ephysData_g1_t0.imec.lf.88ca7cad-6bba-49f3-af69-1056a4099474.cbin", "url": "https://alyx.internationalbrainlab.org/datasets/88ca7cad-6bba-49f3-af69-1056a4099474", "file_size": 3276657550, "hash": null, "version": "1.4.14", "collection": "raw_ephys_data/probe01"}, {"id": "3e7f3a3d-5992-4d3e-86b2-69016464beae", "name": "_spikeglx_sync.polarities.probe00.npy", "dataset_type": "_spikeglx_sync.polarities", "data_url": "http://ibl.flatironinstitute.org/churchlandlab/Subjects/CSHL046/2020-06-20/002/raw_ephys_data/probe00/_spikeglx_sync.polarities.probe00.3e7f3a3d-5992-4d3e-86b2-69016464beae.npy", "url": "https://alyx.internationalbrainlab.org/datasets/3e7f3a3d-5992-4d3e-86b2-69016464beae", "file_size": 20475896, "hash": "22ee1f879df4c0705474586b64412ea3", "version": "1.4.14", "collection": "raw_ephys_data/probe00"}]
--------------------------------------------------------------------------------
/brainbox/tests/fixtures/trials_test.pickle:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/brainbox/tests/fixtures/trials_test.pickle
--------------------------------------------------------------------------------
/brainbox/tests/fixtures/wheel_test.p:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/brainbox/tests/fixtures/wheel_test.p
--------------------------------------------------------------------------------
/brainbox/tests/test_cca.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | import numpy as np
4 | import matplotlib.pylab as plt
5 |
6 | from brainbox.population import cca
7 |
8 |
9 | class TestBunch(unittest.TestCase):
10 |
11 | def test_plotting(self):
12 | """
13 | This test is just to document current use in libraries in case of refactoring
14 | """
15 | corrs = np.array([.6, .2, .1, .001])
16 | errs = np.array([.1, .05, .04, .0005])
17 | fig, ax1 = plt.subplots(1, 1, figsize=(5, 5))
18 | cca.plot_correlations(corrs, errs, ax=ax1, color='blue')
19 | cca.plot_correlations(corrs * .1, errs, ax=ax1, color='orange')
20 |
21 | # Shuffle data
22 | # ...
23 | # fig, ax1 = plt.subplots(1,1,figsize(10,10))
24 | # plot_correlations(corrs, ... , ax=ax1, color='blue')
25 | # plot_correlations(shuffled_coors, ..., ax=ax1, color='red')
26 | # plt.show()
27 |
28 |
29 | if __name__ == "__main__":
30 | unittest.main(exit=False)
31 |
--------------------------------------------------------------------------------
/brainbox/tests/test_singlecell.py:
--------------------------------------------------------------------------------
1 | from brainbox.singlecell import acorr, calculate_peths
2 | import unittest
3 | import numpy as np
4 |
5 |
6 | class TestPopulation(unittest.TestCase):
7 | def test_acorr_0(self):
8 | spike_times = np.array([0, 10, 10, 20])
9 | bin_size = 1
10 | winsize_bins = 2 * 3 + 1
11 |
12 | c_expected = np.zeros(7, dtype=np.int32)
13 | c_expected[3] = 1
14 |
15 | c = acorr(spike_times, bin_size=bin_size, window_size=winsize_bins)
16 |
17 | self.assertTrue(np.allclose(c, c_expected))
18 |
19 | def test_acorr_1(self):
20 | spike_times = np.array([0, 10, 10, 20], dtype=np.float64)
21 | bin_size = 1
22 | winsize_bins = 2 * 3 + 1
23 |
24 | c_expected = np.zeros(7, dtype=np.float64)
25 | c_expected[3] = 1
26 |
27 | c = acorr(spike_times, bin_size=bin_size, window_size=winsize_bins)
28 |
29 | self.assertTrue(np.allclose(c, c_expected))
30 |
31 |
32 | class TestPeths(unittest.TestCase):
33 | def test_peths_synthetic(self):
34 | n_spikes = 20000
35 | n_clusters = 20
36 | n_events = 200
37 | record_length = 1654
38 | cluster_sel = [1, 2, 3, 6, 15, 16]
39 | np.random.seed(seed=42)
40 | spike_times = np.sort(np.random.rand(n_spikes, ) * record_length)
41 | spike_clusters = np.random.randint(0, n_clusters, n_spikes)
42 | event_times = np.sort(np.random.rand(n_events, ) * record_length)
43 |
44 | peth, fr = calculate_peths(spike_times, spike_clusters, cluster_ids=cluster_sel,
45 | align_times=event_times)
46 | self.assertTrue(peth.means.shape[0] == len(cluster_sel))
47 | self.assertTrue(np.all(peth.means.shape == peth.stds.shape))
48 | self.assertTrue(np.all(fr.shape == (n_events, len(cluster_sel), 28)))
49 | self.assertTrue(peth.tscale.size == 28)
50 |
51 |
52 | def test_firing_rate():
53 | pass
54 |
55 |
56 | if __name__ == "__main__":
57 | np.random.seed(0)
58 | unittest.main(exit=False)
59 |
--------------------------------------------------------------------------------
/brainbox/tests/test_video.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | import numpy as np
4 |
5 | from brainbox import video
6 |
7 |
8 | class TestVideo(unittest.TestCase):
9 | def setUp(self) -> None:
10 | """Test frames are 12 2x2x3 arrays, where
11 | frames[0][...,0] =
12 | [[1, 1],
13 | [1, 1]]
14 |
15 | frames[1][...,0] =
16 | [[2, 2],
17 | [2, 2]]
18 |
19 | [...]
20 |
21 | frames[-1][...,0] =
22 | [[12, 12],
23 | [12, 12]]
24 | """
25 | self.frames = np.cumsum(np.ones((12, 2, 2, 3)), axis=0).astype(np.uint8)
26 |
27 | def test_frame_diff(self):
28 | # Test with three colour channels (2x2x3)
29 | frame1, frame2 = [self.frames[i] for i in range(2)]
30 | df = video.frame_diff(frame1, frame2)
31 | expected = np.ones(frame1.shape[:2], dtype=np.uint8)
32 | np.testing.assert_equal(df, expected)
33 |
34 | # Test with single channel (2x2)
35 | df = video.frame_diff(frame1[..., 0], frame2[..., 0])
36 | np.testing.assert_equal(df, expected)
37 |
38 | # Test shape validation
39 | with self.assertRaises(ValueError):
40 | video.frame_diff(frame1[..., 0], frame2)
41 |
42 | def test_frame_diffs(self):
43 | # Test frame diffs defaults
44 | df = video.frame_diffs(self.frames)
45 | expected = np.ones((self.frames.shape[0] - 1, *self.frames.shape[1:-1]), dtype=np.uint8)
46 | np.testing.assert_equal(df, expected)
47 |
48 | # Test shape validation
49 | with self.assertRaises(ValueError):
50 | video.frame_diffs(self.frames, diff=20)
51 |
52 | # Test frames diff every 2nd frame with intensity frames
53 | d = 2 # Take difference every two frames
54 | df = video.frame_diffs(self.frames[..., 0], d)
55 | expected_shape = (self.frames.shape[0] - d, *self.frames.shape[1:-1])
56 | expected = np.ones(expected_shape, dtype=np.uint8) * 2
57 | np.testing.assert_equal(df, expected)
58 |
59 |
60 | if __name__ == '__main__':
61 | unittest.main()
62 |
--------------------------------------------------------------------------------
/examples/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/examples/__init__.py
--------------------------------------------------------------------------------
/examples/archive/audio_pipeline.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import matplotlib.pyplot as plt
3 | import numpy as np
4 | import seaborn as sns
5 |
6 | import alf.io
7 |
8 | from ibllib.io.extractors import training_audio as audio
9 |
10 | main_path = '/mnt/s0/Data/Subjects'
11 |
12 | # step 1 is to launch the extraction on audio
13 | for wfile in Path(main_path).rglob('*.wav'):
14 | print(wfile)
15 | ses_path = wfile.parents[1]
16 | audio.extract_sound(ses_path, save=True)
17 |
18 | # step 2 plot the result - here for the last session only
19 | D = alf.io.load_object(ses_path / 'alf', 'audioSpectrogram')
20 |
21 | cues = alf.io.load_object(ses_path / 'alf', 'audioOnsetGoCue',
22 | attribute='times', timescale='microphone')
23 | tlims = D['times_microphone'][[0, -1]].flatten()
24 | flims = D['frequencies'][0, [0, -1]].flatten()
25 |
26 | fig = plt.figure(figsize=[16, 7])
27 | ax = plt.axes()
28 |
29 | im = ax.imshow(20 * np.log10(D['power'].T), aspect='auto', cmap=plt.get_cmap('magma'),
30 | extent=np.concatenate((tlims, flims)), origin='lower')
31 | ax.plot(cues['times_microphone'], cues['times_microphone'] * 0 + 5000, '*k')
32 | ax.set_xlabel(r'Time (s)')
33 | ax.set_ylabel(r'Frequency (Hz)')
34 | plt.colorbar(im)
35 | im.set_clim(-100, -60)
36 |
37 | sns.set_style("whitegrid")
38 | db_q = 20 * np.log10(np.percentile(D['power'], [10, 90], axis=0))
39 | plt.figure()
40 | ax = plt.axes()
41 | ax.plot(D['frequencies'].flatten(), 20 * np.log10(np.median(D['power'], axis=0)), label='median')
42 | ax.plot(D['frequencies'].flatten(), 20 * np.log10(np.mean(D['power'], axis=0)), label='average')
43 | ax.fill_between(D['frequencies'].flatten(), db_q[0, :], db_q[1, :], alpha=0.5)
44 | ax.set_ylabel(r'dBFS')
45 | ax.set_xlabel(r'Frequency (Hz)')
46 |
--------------------------------------------------------------------------------
/examples/archive/ephys_qc_raw.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 |
3 | import matplotlib.pyplot as plt
4 | import numpy as np
5 | import seaborn as sns
6 |
7 | from ibllib.ephys import ephysqc
8 | import alf.io
9 |
10 |
11 | def _plot_spectra(outpath, typ, savefig=True):
12 | spec = alf.io.load_object(outpath, 'ephysSpectralDensity' + typ.upper(), namespace='iblqc')
13 |
14 | sns.set_style("whitegrid")
15 | plt.figure(figsize=[9, 4.5])
16 | ax = plt.axes()
17 | ax.plot(spec['freqs'], 20 * np.log10(spec['power'] + 1e-14),
18 | linewidth=0.5, color=[0.5, 0.5, 0.5])
19 | ax.plot(spec['freqs'], 20 * np.log10(np.median(spec['power'] + 1e-14, axis=1)), label='median')
20 | ax.set_xlabel(r'Frequency (Hz)')
21 | ax.set_ylabel(r'dB rel to $V^2.$Hz$^{-1}$')
22 | if typ == 'ap':
23 | ax.set_ylim([-275, -125])
24 | elif typ == 'lf':
25 | ax.set_ylim([-260, -60])
26 | ax.legend()
27 | if savefig:
28 | plt.savefig(outpath / (typ + '_spec.png'), dpi=150)
29 |
30 |
31 | def _plot_rmsmap(outfil, typ, savefig=True):
32 | rmsmap = alf.io.load_object(outpath, 'ephysTimeRms' + typ.upper(), namespace='iblqc')
33 | plt.figure(figsize=[12, 4.5])
34 | axim = plt.axes([0.2, 0.1, 0.7, 0.8])
35 | axrms = plt.axes([0.05, 0.1, 0.15, 0.8])
36 | axcb = plt.axes([0.92, 0.1, 0.02, 0.8])
37 |
38 | axrms.plot(np.median(rmsmap['rms'], axis=0)[:-1] * 1e6, np.arange(1, rmsmap['rms'].shape[1]))
39 | axrms.set_ylim(0, rmsmap['rms'].shape[1])
40 |
41 | im = axim.imshow(20 * np.log10(rmsmap['rms'].T + 1e-15), aspect='auto', origin='lower',
42 | extent=[rmsmap['timestamps'][0], rmsmap['timestamps'][-1],
43 | 0, rmsmap['rms'].shape[1]])
44 | axim.set_xlabel(r'Time (s)')
45 | axim.set_ylabel(r'Channel Number')
46 | plt.colorbar(im, cax=axcb)
47 | if typ == 'ap':
48 | im.set_clim(-110, -90)
49 | axrms.set_xlim(100, 0)
50 | elif typ == 'lf':
51 | im.set_clim(-100, -60)
52 | axrms.set_xlim(500, 0)
53 | axim.set_xlim(0, 4000)
54 | if savefig:
55 | plt.savefig(outpath / (typ + '_rms.png'), dpi=150)
56 |
57 |
58 | if __name__ == "__main__":
59 | fbin = Path('/mnt/s1/Data/Subjects/ZM_1735/2019-08-01/001/raw_ephys_data/probe_left/'
60 | '_iblrig_ephysData.raw_g0_t0.imec.ap.bin')
61 | ephysqc.extract_rmsmap(fbin) # make sure you send a path for the time being and not a string
62 | typ = 'lf'
63 | outpath = fbin.parent
64 | _plot_spectra(outpath, typ)
65 | _plot_rmsmap(outpath, typ)
66 |
--------------------------------------------------------------------------------
/examples/archive/ephys_synch.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from ibllib.ephys.sync_probes import apply_sync
3 |
4 | sync_file = "/path/to/my/probe01/_spikeglx_ephysData_g0_t0.imec1.sync.npy"
5 | times_secs = np.arange(600)
6 | interp_times = apply_sync(sync_file, times_secs, forward=True)
7 |
--------------------------------------------------------------------------------
/examples/archive/ibllib/synchronisation_ephys.py:
--------------------------------------------------------------------------------
1 | import ibldsp.utils
2 | import spikeglx
3 | import ibllib.io.extractors.ephys_fpga
4 |
5 | BATCH_SIZE_SAMPLES = 50000
6 |
7 | # full path to the raw ephys
8 | raw_ephys_apfile = ('/datadisk/Data/Subjects/ZM_1150/2019-05-07/001/raw_ephys_data/probe_right/'
9 | 'ephysData_g0_t0.imec.ap.bin')
10 | output_path = '/home/olivier/scratch'
11 |
12 | # load reader object, and extract sync traces
13 | sr = spikeglx.Reader(raw_ephys_apfile)
14 | sync = ibllib.io.extractors.ephys_fpga._sync_to_alf(sr, output_path, save=False)
15 |
16 | # if the data is needed as well, loop over the file
17 | # raw data contains raw ephys traces, while raw_sync contains the 16 sync traces
18 | wg = ibldsp.utils.WindowGenerator(sr.ns, BATCH_SIZE_SAMPLES, overlap=1)
19 | for first, last in wg.firstlast:
20 | rawdata, rawsync = sr.read_samples(first, last)
21 | wg.print_progress()
22 |
--------------------------------------------------------------------------------
/examples/archive/one_plot_psychometric_curve.py:
--------------------------------------------------------------------------------
1 | '''
2 | TODO CHECK THIS IS THE REGULAR WAY TO PLOT PSYC CURVE
3 | Plot psychometric curve from behavior data downloaded via ONE.
4 |
5 | Uses the functions get_behavior() and plot_psychometric()
6 | from the module TODO
7 | '''
8 | # Author: Olivier Winter, Anne Urai
9 |
10 | import matplotlib.pyplot as plt
11 |
12 | from oneibl.one import ONE
13 |
14 | from load_mouse_data import get_behavior # TODO WRITE DEPENDENCY;
15 | from behavior_plots import plot_psychometric # TODO THESE MODULES ARE NOT IN IBLLIB
16 |
17 | one = ONE()
18 |
19 | # Use function to get behavioral information
20 | df = get_behavior('IBL_14', date_range='2018-11-27')
21 |
22 | # Use function to plot the psychometric curve
23 | plt.figure()
24 | plot_psychometric(df, ax=plt.axes(), color="orange")
25 |
26 | # Get session information (FYI, not used for plotting)
27 | # https://alyx.internationalbrainlab.org/admin/actions/session/e752b02d-b54d-4373-b51e-0b31be5f8ee5/change/
28 | ses_ids = one.search(subjects='IBL_14', date_range='2018-11-27')
29 | print(one.list(ses_ids[0]))
30 |
--------------------------------------------------------------------------------
/examples/archive/rest_water_restrictions.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/examples/archive/rest_water_restrictions.py
--------------------------------------------------------------------------------
/examples/archive/rest_weighings.py:
--------------------------------------------------------------------------------
1 | import matplotlib.pyplot as plt
2 | import pandas as pd
3 |
4 | from oneibl.one import ONE
5 | from ibllib.time import isostr2date
6 |
7 | one = ONE()
8 |
9 | # this gets all weighings for all subjects
10 | wei = one.alyx.rest('weighings', 'list')
11 |
12 | # this gets the weighings for one subject
13 | subject = '437'
14 | wei = pd.DataFrame(one.alyx.rest('weighings', 'list', '?nickname=' + subject))
15 | plt.plot_date(wei['date_time'].apply(isostr2date), wei['weight'])
16 |
17 | # to list administrations for one subject, it is better to use the subjects endpoint
18 | sub_info = one.alyx.rest('subjects', 'read', '437')
19 | wei = pd.DataFrame(sub_info['weighings'])
20 | wei['date_time'].apply(isostr2date)
21 | wei.sort_values('date_time', inplace=True)
22 | plt.plot(wei.date_time, wei.weight)
23 |
--------------------------------------------------------------------------------
/examples/ephys/example_amp_depth_scatter.py:
--------------------------------------------------------------------------------
1 | """
2 | Scatter plot of spike depth, amplitude and firing rate
3 | ==========================
4 | Example of how to plot scatter plot of spike depths vs spike times vs spike firing rate
5 | """
6 |
7 | from one.api import ONE
8 | from brainbox.io.one import SpikeSortingLoader
9 | from iblatlas.atlas import AllenAtlas
10 |
11 | import matplotlib.pyplot as plt
12 | import numpy as np
13 | from brainbox.ephys_plots import scatter_amp_depth_fr_plot, plot_brain_regions
14 |
15 | one = ONE()
16 | ba = AllenAtlas()
17 | pid = 'da8dfec1-d265-44e8-84ce-6ae9c109b8bd'
18 |
19 | # Load in spikesorting data
20 | sl = SpikeSortingLoader(pid=pid, one=one, atlas=ba)
21 | spikes, clusters, channels = sl.load_spike_sorting()
22 | clusters = sl.merge_clusters(spikes, clusters, channels)
23 |
24 | # Find the index of good spikes
25 | good_idx = np.isin(spikes['clusters'], clusters['cluster_id'][clusters['label'] == 1])
26 |
27 |
28 | # Make plot
29 | fig, axs = plt.subplots(1, 3, gridspec_kw={'width_ratios': [3, 3, 1], 'wspace': 1})
30 | _ = scatter_amp_depth_fr_plot(spikes['amps'], spikes['clusters'], spikes['depths'], spikes['times'], title='all units',
31 | display=True, ax=axs[0])
32 | _ = scatter_amp_depth_fr_plot(spikes['amps'][good_idx], spikes['clusters'][good_idx], spikes['depths'][good_idx],
33 | spikes['times'][good_idx], title='good_units', display=True, ax=axs[1])
34 | _ = plot_brain_regions(channels['atlas_id'], channel_depths=channels['axial_um'], ax=axs[2], label='right', display=True)
35 |
36 | # Clean up plot
37 | axs[1].get_yaxis().set_visible(False)
38 |
39 | plt.show()
40 |
--------------------------------------------------------------------------------
/examples/one/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/examples/one/__init__.py
--------------------------------------------------------------------------------
/examples/one/behavior/number_mice_inproject.py:
--------------------------------------------------------------------------------
1 | """
2 | Quick search through the Alyx database to see all mice/sessions
3 | ever used in training.
4 | """
5 | # Author: Gaelle Chapuis, Olivier
6 | from one.api import ONE
7 |
8 | one = ONE()
9 |
10 | # Get all sessions that contains 'world' in task protocol, and associate to specific project
11 | ses = one.alyx.rest('sessions', 'list',
12 | task_protocol='world',
13 | project='ibl_neuropixel_brainwide_01')
14 |
15 | # Do the same query, but get the subjects associated to sessions with given features
16 | subs = one.alyx.rest('subjects', 'list',
17 | django=('actions_sessions__task_protocol__icontains,world,'
18 | 'actions_sessions__project__name,ibl_neuropixel_brainwide_01'))
19 |
20 | print(f'N subjects: {len(subs)} - N sessions: {len(ses)}')
21 |
--------------------------------------------------------------------------------
/examples/one/behavior/plot_microphone_spectrogram.py:
--------------------------------------------------------------------------------
1 | """
2 | For a given session eid, plot spectrogram of sound recorded via the microphone.
3 | """
4 | # Author: Gaelle Chapuis, Miles Wells
5 |
6 | import numpy as np
7 | import matplotlib.pyplot as plt
8 | from one.api import ONE
9 |
10 | one = ONE()
11 |
12 | dataset_types = [
13 | '_iblmic_audioSpectrogram.frequencies',
14 | '_iblmic_audioSpectrogram.power',
15 | '_iblmic_audioSpectrogram.times_mic']
16 |
17 | eid = '098bdac5-0e25-4f51-ae63-995be7fe81c7' # TEST EXAMPLE
18 |
19 | TF = one.load_object(eid, 'audioSpectrogram', collection='raw_behavior_data')
20 |
21 | # -- Plot spectrogram
22 | tlims = TF['times_mic'][[0, -1]]
23 | flims = TF['frequencies'][0, [0, -1]]
24 | fig = plt.figure(figsize=[16, 7])
25 | ax = plt.axes()
26 | im = ax.imshow(20 * np.log10(TF['power'].T), aspect='auto', cmap=plt.get_cmap('magma'),
27 | extent=np.concatenate((tlims, flims)),
28 | origin='lower')
29 | ax.set_xlabel(r'Time (s)')
30 | ax.set_ylabel(r'Frequency (Hz)')
31 | plt.colorbar(im)
32 | im.set_clim(-100, -60)
33 |
--------------------------------------------------------------------------------
/examples/one/behavior/plot_weight_curve.py:
--------------------------------------------------------------------------------
1 | '''
2 | Plot weight curve from behavior data downloaded via ONE.
3 | '''
4 | # Author: Olivier Winter
5 |
6 | import matplotlib.pyplot as plt
7 | import pandas as pd
8 | from oneibl.one import ONE
9 |
10 | from ibllib.time import isostr2date
11 |
12 | # Get the subject information.
13 | # We want in particular weighings, that is only accessible through the rest endpoint.
14 | subject_details = ONE().alyx.rest('subjects', 'read', 'IBL_14')
15 |
16 | # Get and show list of keys, check 'weighings' is present
17 | k = subject_details.keys()
18 | print(k)
19 |
20 | if 'weighings' in k:
21 | # Put the weighings data into a pandas dataframe
22 | wei = pd.DataFrame(subject_details['weighings'])
23 | wei['date_time'].apply(isostr2date)
24 | wei.sort_values('date_time', inplace=True)
25 |
26 | # Plot the weight curve
27 | # https://alyx.internationalbrainlab.org/admin-actions/water-history/37c8f897-cbcc-4743-bad6-764ccbbfb190
28 |
29 | plt.plot(wei.date_time, wei.weight)
30 | plt.show()
31 |
--------------------------------------------------------------------------------
/examples/one/behavior/print_water_administrations.py:
--------------------------------------------------------------------------------
1 | """
2 | Print water administration values from behavior data downloaded via ONE.
3 | """
4 | # Author: Olivier Winter
5 |
6 | from pprint import pprint
7 | from one.api import ONE
8 |
9 | one = ONE()
10 |
11 | # -- Get saved water administration --
12 | # List all water administrations
13 | wa = one.alyx.rest('water-administrations', 'list')
14 |
15 | # To list administrations for one subject, it is better to use the subjects endpoint
16 | subject_details = one.alyx.rest('subjects', 'read', 'ZM_346')
17 | pprint(subject_details['water_administrations'][0:2]) # Print the first 2 water admin.
18 |
--------------------------------------------------------------------------------
/examples/one/behavior/stream_video_frames.py:
--------------------------------------------------------------------------------
1 | from ibllib.io.video import VideoStreamer
2 |
3 | FRAME_ID = 4000
4 |
5 | # example 1: with URL directly
6 | url = "http://ibl.flatironinstitute.org/mainenlab/Subjects/ZM_1743/2019" \
7 | "-06-17/001/raw_video_data/_iblrig_leftCamera.raw.00002677-a6d1-49fb-888b-66679184ee0e.mp4"
8 | vs = VideoStreamer(url)
9 | f, im = vs.get_frame(FRAME_ID)
10 |
11 | # example 2: with URL directly
12 | from one.api import ONE # noqa
13 | one = ONE()
14 | eid = "a9fb578a-9d7d-42b4-8dbc-3b419ce9f424"
15 | dset = one.alyx.rest('datasets', 'list', session=eid, name='_iblrig_leftCamera.raw.mp4')
16 | vs = VideoStreamer(dset[0])
17 | f, im = vs.get_frame(FRAME_ID)
18 |
--------------------------------------------------------------------------------
/examples/one/behavior/water_administrations_add_new.py:
--------------------------------------------------------------------------------
1 | '''
2 | Programmatically add a new water administration onto the Alyx database via ONE.
3 | '''
4 | # Author: Olivier Winter
5 |
6 | from one.api import ONE
7 |
8 | one = ONE(base_url='https://dev.alyx.internationalbrainlab.org')
9 |
10 | # This is how to programmatically create and add a water administration
11 |
12 | wa_ = {
13 | 'subject': 'ZM_346',
14 | 'date_time': '2018-11-25T12:34',
15 | 'water_administered': 25,
16 | 'water_type': 'Water 10% Sucrose',
17 | 'user': 'olivier',
18 | 'session': 'f4b13ba2-1308-4673-820e-0e0a3e0f2d73',
19 | 'adlib': True}
20 |
21 | # Change the data on the database
22 | # Do not use the example on anything else than alyx-dev !
23 | if one.alyx.base_url == 'https://dev.alyx.internationalbrainlab.org':
24 | rep = one.alyx.rest('water-administrations', 'create', data=wa_)
25 |
--------------------------------------------------------------------------------
/examples/one/behavior/water_administrations_weekend.py:
--------------------------------------------------------------------------------
1 | '''
2 | Programmatically add new water administrations for the week-end onto the Alyx database via ONE.
3 | '''
4 | # Author: Olivier Winter
5 |
6 | from one.api import ONE
7 |
8 | one = ONE(base_url='https://dev.alyx.internationalbrainlab.org')
9 |
10 |
11 | # Define an example function to input 'Hydrogel 5% Citric Acid' with user 'valeria' on Alyx dev
12 | def _example_change_wa(dates, sub):
13 | for dat in dates:
14 | for s in sub:
15 | wa_ = {
16 | 'subject': s,
17 | 'date_time': dat,
18 | 'water_type': 'Hydrogel 5% Citric Acid',
19 | 'user': 'valeria',
20 | 'adlib': True}
21 | # Do not use the example on anything else than alyx-dev !
22 | if one.alyx.base_url == 'https://dev.alyx.internationalbrainlab.org':
23 | one.alyx.rest('water-administrations', 'create', data=wa_)
24 |
25 |
26 | # Define date range
27 | dates = ['2018-11-19T12:00', '2018-11-22T12:00', '2018-11-23T12:00']
28 |
29 | # --Option 1-- You can either give manually a list of subject nicknames
30 | sub_manual = ['IBL_1',
31 | 'IBL_10',
32 | 'IBL_47']
33 |
34 | # Call function to execute change on Alyx
35 | _example_change_wa(dates, sub_manual)
36 |
37 | # --Option 2-- Or find all subject nicknames programmatically
38 | subjects = one.alyx.rest('subjects', 'list', alive=True, lab='zadorlab', water_restricted=True)
39 | sub_prog = [s['nickname'] for s in subjects]
40 |
41 | # Call function to execute change on Alyx
42 | _example_change_wa(dates, sub_prog)
43 |
--------------------------------------------------------------------------------
/examples/one/ephys/First_pass_progress/firstpass_scatterplot.py:
--------------------------------------------------------------------------------
1 | """
2 | Get all trajectories from given provenance,
3 | that have better QC than given status,
4 | plot as scatter
5 | """
6 |
7 | from ibl_pipeline import acquisition
8 | from ibl_pipeline.analyses import behavior as behavior_analysis
9 | import numpy as np
10 | import matplotlib.pyplot as plt
11 | import pandas as pd
12 | from one.api import ONE
13 |
14 |
15 | traj = ONE().alyx.rest('trajectories', 'list', provenance='Planned',
16 | django='probe_insertion__session__project__name__'
17 | 'icontains,ibl_neuropixel_brainwide_01,'
18 | 'probe_insertion__session__qc__lt,50')
19 |
20 | # Ephys aligned histology track, Histology track, Micro-manipulator, Planned
21 |
22 | # QC_CHOICES = [
23 | # (50, 'CRITICAL',),
24 | # (40, 'FAIL',),
25 | # (30, 'WARNING',),
26 | # (0, 'NOT_SET',),
27 | # (10, 'PASS',),
28 | # ]
29 |
30 | eids_traj = [p['session']['id'] for p in traj]
31 | print(f'N traj with QC : {len(eids_traj)}')
32 |
33 | # DATAJOINT query to combine with behavioral criterion
34 | data_all = (acquisition.Session & [{'session_uuid': i_e} for i_e in eids_traj] &
35 | (behavior_analysis.SessionTrainingStatus & 'good_enough_for_brainwide_map=1'))
36 | data_eids = data_all.proj('session_uuid')
37 | df = data_eids.fetch(format='frame').reset_index()
38 |
39 | eids_good = df['session_uuid'].values.tolist()
40 | eids_good = [str(_) for _ in eids_good]
41 | print(f'N traj good : {len(eids_good)}')
42 |
43 | # Get ml / ap of only those that are good
44 | traj_dict = {str(p['session']['id']): (p['x'], p['y']) for p in traj}
45 | ml = np.array([traj_dict[eid][0] for eid in eids_good])
46 | ap = np.array([traj_dict[eid][1] for eid in eids_good])
47 |
48 | # Read CSV containing all x / y positions to be done
49 | data = pd.read_csv(
50 | "/Users/gaelle/Documents/Git/Scrapbook/Needles/Firstpassmap_x_y.csv")
51 | ap_fm = data['ap_um']
52 | ml_fm = data['ml_um']
53 |
54 | # Plot
55 | fig, ax = plt.subplots()
56 | ax.scatter(ap_fm, ml_fm, color='black', alpha=0.1)
57 | ax.scatter(ap, ml, color='green', alpha=0.4)
58 |
59 | ax.set_xlim(4000, -8000) # decreasing x
60 |
--------------------------------------------------------------------------------
/examples/one/ephys/README.txt:
--------------------------------------------------------------------------------
1 | This is my gallery
2 | ==================
3 |
4 | Below is a gallery of examples
--------------------------------------------------------------------------------
/examples/one/ephys/docs_compute_drift.py:
--------------------------------------------------------------------------------
1 | """
2 | Download data and plot drift over the session
3 | ==============================================
4 |
5 | Downloads LFP power spectrum for a given session and probe and plots a heatmap of power spectrum
6 | on the channels along probe against frequency
7 | """
8 |
9 | # import modules
10 | from one.api import ONE
11 | from brainbox.metrics import electrode_drift
12 |
13 | # instantiate one
14 | one = ONE(base_url='https://openalyx.internationalbrainlab.org', silent=True)
15 |
16 | # Specify subject, date and probe we are interested in
17 | subject = 'CSHL049'
18 | date = '2020-01-08'
19 | sess_no = 1
20 | probe_label = 'probe00'
21 | eid = one.search(subject=subject, date=date, number=sess_no)[0]
22 |
23 | # Download and load the spikes data
24 | spikes = one.load_object(eid, 'spikes', collection=f'alf/{probe_label}')
25 |
26 | # Use brainbox function to compute drift over session
27 | drift = electrode_drift.estimate_drift(spikes['times'], spikes['amps'], spikes['depths'],
28 | display=True)
29 |
--------------------------------------------------------------------------------
/examples/one/ephys/docs_get_first_pass_map_sessions.py:
--------------------------------------------------------------------------------
1 | """
2 | Get first pass map sessions
3 | ===========================
4 | Use ONE to get information about sessions included in first pass map
5 |
6 | """
7 | from one.api import ONE
8 | one = ONE(base_url='https://openalyx.internationalbrainlab.org', silent=True)
9 |
10 | first_pass_map_sessions = one.search(project='ibl_neuropixel_brainwide_01')
11 |
--------------------------------------------------------------------------------
/examples/one/ephys/docs_get_power_spectrum_data.py:
--------------------------------------------------------------------------------
1 | """
2 | Download and plot power spectrum of raw data
3 | ============================================
4 |
5 | Downloads LFP power spectrum for a given session and probe and plots a heatmap of power spectrum
6 | on the channels along probe against frequency
7 | """
8 |
9 | # import modules
10 | from one.api import ONE
11 | import matplotlib.pyplot as plt
12 | import numpy as np
13 |
14 | # instantiate ONE
15 | one = ONE(base_url='https://openalyx.internationalbrainlab.org', silent=True)
16 |
17 | # Specify subject, date and probe we are interested in
18 | subject = 'CSHL049'
19 | date = '2020-01-08'
20 | sess_no = 1
21 | probe_label = 'probe00'
22 | eid = one.search(subject=subject, date=date, number=sess_no)[0]
23 |
24 | # Download the data
25 | # channels.rawInd: Index of good recording channels along probe
26 | # channels.localCoordinates: Position of each recording channel along probe
27 | channels = one.load_object(eid, 'channels', collection=f'alf/{probe_label}')
28 | # Get range for y-axis
29 | depth_range = [np.min(channels.localCoordinates[:, 1]),
30 | np.max(channels.localCoordinates[:, 1])]
31 |
32 | # Load in power spectrum data
33 | lfp_spectrum = one.load_object(eid, 'ephysSpectralDensityLF',
34 | collection=f'raw_ephys_data/{probe_label}')
35 | lfp_freq = lfp_spectrum['freqs']
36 | lfp_power = lfp_spectrum['power'][:, channels.rawInd]
37 |
38 | # Define a frequency range of interest
39 | freq_range = [0, 300]
40 | freq_idx = np.where((lfp_freq >= freq_range[0]) &
41 | (lfp_freq < freq_range[1]))[0]
42 |
43 | # Limit data to freq range of interest and also convert to dB
44 | lfp_spectrum_data = 10 * np.log(lfp_power[freq_idx, :])
45 | dB_levels = np.quantile(lfp_spectrum_data, [0.1, 0.9])
46 |
47 | # Create figure
48 | fig, ax = plt.subplots()
49 | # Plot the LFP spectral data
50 | spectrum_plot = ax.imshow(lfp_spectrum_data.T, extent=np.r_[freq_range, depth_range],
51 | cmap='viridis', vmin=dB_levels[0], vmax=dB_levels[1], origin='lower',
52 | aspect='auto')
53 | cbar = fig.colorbar(spectrum_plot, ax=ax)
54 | cbar.set_label('LFP power (dB)')
55 | ax.set_xlabel('Frequency (Hz)')
56 | ax.set_ylabel('Depth along probe (um)')
57 | ax.set_title('Power Spectrum of LFP')
58 |
59 | plt.show()
60 |
--------------------------------------------------------------------------------
/examples/one/ephys/docs_plot_sound_spectrogram_ephysrig.py:
--------------------------------------------------------------------------------
1 | """
2 | Plot audio spectrogtam
3 | ======================
4 | For a given session eid (ephys session), plot spectrogram of sound recorded via the microphone.
5 | Example of using soundfile to read in .flac file extensions
6 | """
7 | # Author: Gaelle Chapuis
8 |
9 | from ibllib.io.extractors.training_audio import welchogram
10 | import soundfile as sf
11 | import numpy as np
12 | import matplotlib.pyplot as plt
13 | from one.api import ONE
14 |
15 | eid = '4ecb5d24-f5cc-402c-be28-9d0f7cb14b3a' # TEST EXAMPLE
16 | one = ONE(base_url='https://openalyx.internationalbrainlab.org', silent=True)
17 | # -- Get raw data
18 | filename = one.load_dataset(eid, '_iblrig_micData.raw.flac', download_only=True)
19 | with open(filename, 'rb') as f:
20 | wav, fs = sf.read(f)
21 |
22 | # -- Compute spectrogram over first 2 minutes
23 | t_idx = 120 * fs
24 | tscale, fscale, W, detect = welchogram(fs, wav[:t_idx])
25 |
26 | # -- Put data into single variable
27 | TF = {}
28 |
29 | TF['power'] = W.astype(np.single)
30 | TF['frequencies'] = fscale[None, :].astype(np.single)
31 | TF['onset_times'] = detect
32 | TF['times_mic'] = tscale[:, None].astype(np.single)
33 |
34 | # # -- Plot spectrogram
35 | tlims = TF['times_mic'][[0, -1]].flatten()
36 | flims = TF['frequencies'][0, [0, -1]].flatten()
37 | fig = plt.figure(figsize=[16, 7])
38 | ax = plt.axes()
39 | im = ax.imshow(20 * np.log10(TF['power'].T), aspect='auto', cmap=plt.get_cmap('magma'),
40 | extent=np.concatenate((tlims, flims)),
41 | origin='lower')
42 | ax.set_xlabel(r'Time (s)')
43 | ax.set_ylabel(r'Frequency (Hz)')
44 | plt.colorbar(im)
45 | plt.show()
46 |
--------------------------------------------------------------------------------
/examples/one/ephys/docs_raw_data_decompress.py:
--------------------------------------------------------------------------------
1 | """
2 | Download and decompress raw ephys data
3 | ======================================
4 |
5 | This code demonstrates how to decompress raw ephys (binary) data - This is necessary for some
6 | client codes (such as Matlab spike sorting KS2 algorithm) to run
7 |
8 | (example taken for nidq.cbin file, but also applicable for lf.cbin and ap.cbin files)
9 | """
10 |
11 | # Author: Olivier, Gaelle, Mayo
12 | from pprint import pprint
13 |
14 | from ibllib.io import spikeglx
15 | from one.api import ONE
16 |
17 | one = ONE(base_url='https://openalyx.internationalbrainlab.org', silent=True)
18 | # Download a dataset of interest
19 | eid = one.search(subject='KS023', date_range='2019-12-10')[0]
20 |
21 | # Optionally list the raw ephys data for this session
22 | pprint([x for x in one.list_datasets(eid) if 'ephysData' in x])
23 |
24 | files = one.load_object(eid, 'ephysData_g0_t0',
25 | attribute='nidq', collection='raw_ephys_data', download_only=True)
26 |
27 | # Get file path of interest
28 | efile = next(x for x in files if str(x).endswith('.cbin'))
29 |
30 | # Read the files and get the data
31 | # Enough to do analysis
32 | sr = spikeglx.Reader(efile)
33 |
34 | # Decompress the data
35 | # Used by client code, e.g. Matlab for spike sorting
36 | # Give new path output name
37 | sr.decompress_file(keep_original=True, overwrite=True) # Keep the original file and overwrite any
38 | # previously decompressed file
39 |
40 | # For ap/lf data from a given probe
41 | # probe_label = 'probe00'
42 | # files = one.load_object(eid, 'ephysData_g0_t0', download_only=True,
43 | # attribute='nidq', collection=f'raw_ephys_data/{probe_label}')
44 |
--------------------------------------------------------------------------------
/examples/one/ephys/get_list_mice_certif.py:
--------------------------------------------------------------------------------
1 | """
2 | Find certification recording sessions
3 | =====================================
4 | Use ONE to get the training status of a chosen subject or all subjects within a lab.
5 | Training status is computed based on performance over latest 3 sessions (default) or last 3
6 | sessions before a specified date.
7 | """
8 | # Author: Gaelle Chapuis
9 |
10 | # import modules
11 | import numpy as np
12 | from one.api import ONE
13 | one = ONE()
14 |
15 | dataset_types = ['spikes.times',
16 | 'spikes.clusters']
17 |
18 | # eid1, det1 = one.search(project='ibl_certif_neuropix_recording',
19 | # dataset=dataset_types, details=True)
20 |
21 | eids, det = one.search(task_protocol='ephys_certification',
22 | dataset=dataset_types, details=True)
23 |
24 | sub = [p['subject'] for p in det]
25 | # sub_unique = list(set(sub))
26 |
27 | lab = [p['lab'] for p in det]
28 | # lab_unique = list(set(lab))
29 |
30 | # task = [p['task_protocol'] for p in det]
31 | # task_unique = list(set(task))
32 |
33 | # -- How many animals were used per lab
34 | su, ind_su = np.unique(sub, return_index=True)
35 | lab_arr = np.array(lab)
36 | lu = lab_arr[ind_su]
37 |
38 | for i_su in range(0, len(su)):
39 | # Find how many recordings were made with this animals
40 | sess_id = np.where(np.array(sub) == su[i_su])
41 | # Display
42 | tr_pl = one.alyx.rest('trajectories', 'list', subject=su[i_su], provenance='Planned')
43 | tr_tr = one.alyx.rest('trajectories', 'list', subject=su[i_su], provenance='Histology track')
44 | print(f'Subject: {su[i_su]} - Lab: {lu[i_su]} - N session: {len(sess_id[0])}'
45 | f' - N planned traces: {len(tr_pl)} - N tracked traces: {len(tr_tr)}')
46 |
47 | # TODO -- How many recording sessions were done per lab
48 | # prob_des = one.load_dataset(eids[0], 'probes.description.json')
49 | # n_probe = len(prob_des[0])
50 |
--------------------------------------------------------------------------------
/examples/one/ephys/get_list_mice_repeated_site.py:
--------------------------------------------------------------------------------
1 | """
2 | Get list of subjects associated to the repeated site probe trajectory from ONE.
3 | """
4 | # Author: Gaelle Chapuis
5 |
6 | from one.api import ONE
7 |
8 | one = ONE()
9 | # find projects: proj = one.alyx.rest('projects', 'list')
10 | # proj_list = [p['name'] for p in proj]
11 | traj = one.alyx.rest('trajectories', 'list', provenance='Planned',
12 | x=-2243, y=-2000, # repeated site coordinate
13 | project='ibl_neuropixel_brainwide_01')
14 |
15 | # Display subjects names
16 | sess = [p['session'] for p in traj]
17 | sub = [p['subject'] for p in sess]
18 | task = [p['task_protocol'] for p in sess]
19 |
20 | for i_su in range(0, len(sub)):
21 | tr_tr = one.alyx.rest('trajectories', 'list', subject=sub[i_su], provenance='Histology track')
22 | print(f'Subject: {sub[i_su]} - {task[i_su]} - N tracked traces: {len(tr_tr)}')
23 |
--------------------------------------------------------------------------------
/examples/one/ephys/get_probe_label_dir.py:
--------------------------------------------------------------------------------
1 | """
2 | Get single probe label and directory, using the probes description dataset.
3 | """
4 | # Author: Gaelle Chapuis, Miles Wells
5 |
6 | from one.api import ONE
7 | one = ONE()
8 |
9 | eid = 'da188f2c-553c-4e04-879b-c9ea2d1b9a93'
10 |
11 | # --- Get single probe directory filename either by
12 | # 1. getting probe description in alf
13 | # 2. using alyx rest end point
14 |
15 | # Option 1.
16 | prob_des = one.load_dataset(eid, 'probes.description.json')
17 | labels = [x['label'] for x in prob_des]
18 | # You can then use this label into dict, e.g. channels[label[0]]
19 |
20 | # -- Load single probe data with probe-level collection
21 | # List datsets for first probe
22 | collection = f'alf/{labels[0]}'
23 | datasets = one.list_datasets(eid, collection=collection)
24 |
--------------------------------------------------------------------------------
/examples/one/ephys/plot_raster_drift.py:
--------------------------------------------------------------------------------
1 | """
2 | Compute drift for example sessions using:
3 | https://github.com/int-brain-lab/ibllib/blob/master/brainbox/metrics/electrode_drift.py
4 | and display raster plot below
5 | """
6 | # Authors: Gaelle, Olivier
7 |
8 | from brainbox.metrics.electrode_drift import estimate_drift
9 | from one.api import ONE
10 | import brainbox.plot as bbplot
11 | import matplotlib.pyplot as plt
12 |
13 | one = ONE()
14 |
15 | # Find sessions
16 | dsets = ['spikes.times.npy',
17 | 'spikes.amps.npy',
18 | 'spikes.depths.npy']
19 |
20 | # eids = one.search(dataset=dsets,
21 | # project='ibl_neuropixel_brainwide_01',
22 | # task_protocol='_iblrig_tasks_ephysChoiceWorld')
23 | #
24 | # eid = eids[0] # Test with little drift: '7cdb71fb-928d-4eea-988f-0b655081f21c'
25 |
26 | eid = '89f0d6ff-69f4-45bc-b89e-72868abb042a' # Test with huge drift
27 |
28 | # Get dataset
29 |
30 | spikes = one.load_object(eid, 'spikes', collection='alf/probe00')
31 |
32 | drift = estimate_drift(spikes.times, spikes.amps, spikes.depths, display=False)
33 |
34 | # PLOT
35 | # Tight layout
36 | fig3 = plt.figure(constrained_layout=True)
37 | gs = fig3.add_gridspec(3, 3)
38 | f3_ax0 = fig3.add_subplot(gs[0, :])
39 | f3_ax0.plot(drift)
40 | f3_ax1 = fig3.add_subplot(gs[1:, :])
41 | bbplot.driftmap(spikes.times,
42 | spikes.depths,
43 | ax=f3_ax1, plot_style='bincount')
44 | f3_ax0.set_xlim(f3_ax1.get_xlim())
45 |
--------------------------------------------------------------------------------
/examples/one/ephys/raw_data_download.py:
--------------------------------------------------------------------------------
1 | """
2 | Download raw ephys datasets for all probes in a single session via ONE.
3 | (example written for the LFP, but the download can be done for AP
4 | files similarly by replacing 'lf' with 'ap')
5 | """
6 | # Author: Olivier, Gaelle
7 |
8 | from ibllib.io import spikeglx
9 | from one.api import ONE
10 |
11 | # === Option 1 === Download a dataset of interest
12 | one = ONE()
13 |
14 | # Get a specific session eID
15 | eid = one.search(subject='ZM_2240', date_range='2020-01-22')[0]
16 |
17 | # Define and load dataset types of interest
18 | # FIXME this doesn't work
19 | dtypes = ['*ephysData*.lf*', # lf : LFP
20 | '*ephysData*.sync*'] # Used for synchronisation
21 | one.load_datasets(eid, dataset_types=dtypes, download_only=True)
22 |
23 | # Get the files information
24 | session_path = one.eid2path(eid)
25 | efiles = [ef for ef in spikeglx.glob_ephys_files(session_path, bin_exists=False) if
26 | ef.get('lf', None)]
27 | efile = efiles[0]['lf'] # Example: access to the first file
28 |
--------------------------------------------------------------------------------
/examples/one/ephys/raw_data_sync_session_time.py:
--------------------------------------------------------------------------------
1 | """
2 | Reads in and display a chunk of raw LFP synchronized on session time.
3 | """
4 | # Author: Olivier, Gaelle
5 |
6 | import matplotlib.pyplot as plt
7 | import numpy as np
8 | import scipy.interpolate
9 | from oneibl.one import ONE
10 |
11 | from ibllib.io import spikeglx
12 |
13 |
14 | # === Option 1 === Download a dataset of interest
15 | one = ONE()
16 |
17 | # Get a specific session eID
18 | eid = one.search(subject='ZM_2240', date_range='2020-01-22')[0]
19 |
20 | # Define and load dataset types of interest
21 | dtypes = ['ephysData.raw.lf', 'ephysData.raw.meta', 'ephysData.raw.ch',
22 | 'ephysData.raw.sync']
23 | one.load(eid, dataset_types=dtypes, download_only=True)
24 |
25 | # Get the files information
26 | session_path = one.path_from_eid(eid)
27 | efiles = [ef for ef in spikeglx.glob_ephys_files(session_path, bin_exists=False) if
28 | ef.get('lf', None)]
29 | efile = efiles[0]['lf']
30 |
31 | # === Option 2 === You can also input a file locally, e.g.
32 | # efile = ('/datadisk/FlatIron/churchlandlab/Subjects/CSHL045/2020-02-26/001/'
33 | # 'raw_ephys_data/probe01/_spikeglx_ephysData_g0_t0.imec.lf.cbin')
34 |
35 | # === Read the files and get the data ===
36 | with spikeglx.Reader(efile) as sr:
37 | sync_file = sr.file_bin.parent.joinpath(sr.file_bin.stem.replace('.lf', '.sync.npy'))
38 | sync = np.load(sync_file)
39 | sample2time = scipy.interpolate.interp1d(sync[:, 0] * sr.fs, sync[:, 1])
40 |
41 | # Read and plot chunk of data
42 | data = sr[105000:109000, :-1]
43 | data = data - np.mean(data)
44 | tscale = sample2time(np.array([105000, 109000]))
45 |
46 | plt.figure()
47 | im = plt.imshow(data.transpose(), aspect='auto',
48 | extent=[*tscale, data.shape[1], 0])
49 | plt.xlabel('session time (sec)')
50 | plt.ylabel('channel')
51 |
--------------------------------------------------------------------------------
/examples/one/histology/brain_regions_navigation.py:
--------------------------------------------------------------------------------
1 | from iblatlas.atlas import AllenAtlas
2 |
3 | ba = AllenAtlas()
4 |
5 | # Primary somatosensory area nose
6 | region_id = 353
7 |
8 | ba.regions.descendants(353)
9 | ba.regions.ancestors(353)
10 |
--------------------------------------------------------------------------------
/examples/one/histology/coverage_map.py:
--------------------------------------------------------------------------------
1 | import matplotlib.pyplot as plt
2 | import numpy as np
3 | from one.api import ONE
4 | from ibldsp.utils import fcn_cosine
5 |
6 | import iblatlas.atlas as atlas
7 | from ibllib.pipes.histology import coverage
8 |
9 | ba = atlas.AllenAtlas()
10 | # trajs = ONE().alyx.rest('trajectories', 'list', provenance='Ephys aligned histology track')
11 |
12 | trajs = ONE().alyx.rest('trajectories', 'list', provenance='Micro-manipulator',
13 | django='probe_insertion__session__project__name__icontains,'
14 | 'ibl_neuropixel_brainwide_01,'
15 | 'probe_insertion__session__qc__lt,40')
16 |
17 | full_coverage = coverage(trajs)
18 |
19 | fig, axs = plt.subplots(2, 2)
20 | ax = ba.plot_hslice(-4000 * 1e-6, volume=full_coverage, ax=axs[0, 0])
21 | ax.set_title("horizontal slice at dv=-4mm")
22 | ax = ba.plot_sslice(ml_coordinate=-0.002, volume=full_coverage, ax=axs[0, 1])
23 | ax.set_title("sagittal slice at ml=-2mm")
24 | ax = ba.plot_cslice(ap_coordinate=-0.003, volume=full_coverage, ax=axs[1, 1])
25 | ax.set_title("coronal slice at ap=-3mm")
26 |
27 | axs[1, 0].plot(np.linspace(0, 200), 1 - fcn_cosine([100, 150])(np.linspace(0, 200)))
28 | axs[1, 0].set_xlabel('distance from nearest active site (um)')
29 | axs[1, 0].set_ylabel('weight')
30 |
--------------------------------------------------------------------------------
/examples/one/histology/docs_visualize_session_coronal_tilted.py:
--------------------------------------------------------------------------------
1 | """
2 | Coronal Plot
3 | ============
4 | Plot a coronal slice (best fit) that contains a given probe track.
5 | As input, use an eID and probe label.
6 | environment installation guide https://github.com/int-brain-lab/iblenv
7 | """
8 | # Author: Olivier Winter
9 |
10 | import numpy as np
11 | from one.api import ONE
12 |
13 | import iblatlas.atlas as atlas
14 | import brainbox.io.one as bbone
15 |
16 | # === Parameters section (edit) ===
17 | eid = 'c7bd79c9-c47e-4ea5-aea3-74dda991b48e'
18 | probe_label = 'probe01'
19 | # === Code (do not edit) ===
20 | ba = atlas.AllenAtlas(25)
21 | one = ONE(base_url='https://openalyx.internationalbrainlab.org')
22 | traj = one.alyx.rest('trajectories', 'list', session=eid,
23 | provenance='Ephys aligned histology track', probe=probe_label)[0]
24 | channels = bbone.load_channel_locations(eid=eid, one=one, probe=probe_label)
25 |
26 | picks = one.alyx.rest('insertions', 'read', id=traj['probe_insertion'])['json']
27 | picks = np.array(picks['xyz_picks']) / 1e6
28 | ins = atlas.Insertion.from_dict(traj)
29 |
30 | cax = ba.plot_tilted_slice(xyz=picks, axis=1, volume='image')
31 | cax.plot(picks[:, 0] * 1e6, picks[:, 2] * 1e6)
32 | cax.plot(channels[probe_label]['x'] * 1e6, channels[probe_label]['z'] * 1e6, 'g*')
33 |
--------------------------------------------------------------------------------
/examples/one/histology/get_probe_trajectory.py:
--------------------------------------------------------------------------------
1 | """
2 | Get all probe trajectory, or filter by provenance, for a given session eID.
3 | """
4 | # Author: Olivier Winter
5 |
6 | from one.api import ONE
7 |
8 | one = ONE()
9 |
10 | eid = 'dda5fc59-f09a-4256-9fb5-66c67667a466'
11 |
12 | # Get all trajectories for the session
13 | trajs = one.alyx.rest('trajectories', 'list', session=eid)
14 | del trajs
15 |
16 | # Filter by provenance
17 | trajs = one.alyx.rest('trajectories', 'list', session=eid, provenance='micro-manipulator')
18 |
19 | # Transform into list for analysis
20 | trajs = list(trajs)
21 |
--------------------------------------------------------------------------------
/examples/one/histology/list_trajectory_more1alignment.py:
--------------------------------------------------------------------------------
1 | """
2 | List sessions with 2 or more histology-ephys alignment done.
3 | TODO: remove session with multiple alingment done by 1 user only.
4 | """
5 | # Author: Gaelle Chapuis
6 | import numpy as np
7 | import pandas as pd
8 | from one.api import ONE
9 |
10 | rec_with_hist = ONE().alyx.rest('trajectories', 'list', provenance='Ephys aligned histology track')
11 | eids = np.array([s['id'] for s in rec_with_hist])
12 |
13 | json = [s['json'] for s in rec_with_hist]
14 | idx_none = [i for i, val in enumerate(json) if val is None]
15 | json_val = np.delete(json, idx_none)
16 | keys = [list(s.keys()) for s in json_val]
17 |
18 | # user_key = [s[0][20:] for s in keys] # remove date-time todo
19 |
20 | # Find index of json fields with 2 or more keys
21 | len_key = [len(s) for s in keys]
22 | idx_several = [i for i, val in enumerate(len_key) if val >= 2]
23 | eid_several = eids[idx_several]
24 |
25 | # create dataframe
26 | frame = pd.DataFrame()
27 | frame['eid'] = eid_several
28 | # frame['user'] = np.array(user_key)[idx_several]
29 | frame['key'] = np.array(keys)[idx_several]
30 |
31 | print(f'{frame}')
32 |
--------------------------------------------------------------------------------
/examples/one/histology/register_lasagna_tracks_alyx.py:
--------------------------------------------------------------------------------
1 | """
2 | Register on Alyx the set of tracked traces (after histology) for a given mouse.
3 |
4 | All your tracks should be in a single folder, and the files names should follow the nomenclature
5 | {yyyy-mm-dd}_{SubjectName}_{SessionNumber}_{ProbeLabel}_pts.csv
6 |
7 | Edit the variable 'path_tracks'(set it either to your local directory [example given here],
8 | either to the Google folder if synched to your machine).
9 |
10 | To check if the registration worked, go to the admin interface
11 | > experiments > trajectory estimates > search for the subject
12 |
13 | If you want to test first, use ALYX_URL = "https://dev.alyx.internationalbrainlab.org"
14 | And check the data appears on:
15 | https://dev.alyx.internationalbrainlab.org/admin/experiments/trajectoryestimate/?
16 |
17 | When you feel confident you can upload without error,
18 | set EXAMPLE_OVERWRITE = False ,
19 | change to the ALYX_URL = "https://alyx.internationalbrainlab.org"
20 | and re-run.
21 |
22 | With EXAMPLE_OVERWRITE = True, the script downloads an example dataset and runs
23 | the registration (used for automatic testing of the example).
24 | """
25 | # Author: Olivier, Gaelle
26 | from pathlib import Path
27 | from iblatlas.atlas import AllenAtlas
28 | from one.api import ONE
29 |
30 | from ibllib.pipes import histology
31 |
32 | # ======== EDIT FOR USERS ====
33 |
34 | # Edit so as to reflect the directory containing your electrode tracks
35 | path_tracks = "/Users/gaelle/Downloads/Flatiron/examples/00_to_add"
36 |
37 |
38 | EXAMPLE_OVERWRITE = True # Put to False when wanting to run the script on your data
39 |
40 | ALYX_URL = "https://dev.alyx.internationalbrainlab.org" # FOR TESTING
41 | # ALYX_URL = "https://alyx.internationalbrainlab.org" # UNCOMMENT WHEN READY
42 |
43 | # ======== DO NOT EDIT BELOW ====
44 | one = ONE(base_url=ALYX_URL)
45 | ba = AllenAtlas()
46 |
47 | if EXAMPLE_OVERWRITE:
48 | # TODO Olivier : Function to download examples folder
49 | cachepath = Path(one.alyx.cache_dir)
50 | path_tracks = cachepath.joinpath('examples', 'histology', 'tracks_to_add')
51 |
52 | histology.register_track_files(path_tracks=path_tracks, one=one, overwrite=True, brain_atlas=ba)
53 | histology.detect_missing_histology_tracks(path_tracks=path_tracks, one=one, brain_atlas=ba)
54 |
--------------------------------------------------------------------------------
/examples/one/histology/visualization3D_rotating_gif_firstpassmap_plan.py:
--------------------------------------------------------------------------------
1 | # Author: Olivier
2 | # environment installation guide https://github.com/int-brain-lab/iblenv
3 | # run "%qui qt" magic command from Ipython prompt for interactive mode
4 |
5 | import pandas as pd
6 | from mayavi import mlab
7 | from atlaselectrophysiology import rendering
8 |
9 | import iblatlas.atlas as atlas
10 |
11 | # the csv file is available here:
12 | # https://github.com/int-brain-lab/ibllib-matlab/blob/master/needles/maps/first_pass_map.csv
13 | output_video = '/home/olivier/Videos/first_pass.webm'
14 | csv_file = "/home/olivier/Documents/MATLAB/ibllib-matlab/needles/maps/first_pass_map.csv"
15 |
16 | # start of the code
17 | brain_atlas = atlas.AllenAtlas(25)
18 | brain_atlas = atlas.NeedlesAtlas(25)
19 |
20 | df_map = pd.read_csv(csv_file)
21 |
22 | fig = rendering.figure()
23 |
24 | plt_trj = []
25 | for index, rec in df_map.iterrows():
26 | ins = atlas.Insertion.from_dict({'x': rec.ml_um, 'y': rec.ap_um, 'z': rec.dv_um,
27 | 'phi': rec.phi, 'theta': rec.theta, 'depth': rec.depth_um})
28 | mlapdv = brain_atlas.xyz2ccf(ins.xyz)
29 | plt = mlab.plot3d(mlapdv[:, 1], mlapdv[:, 2], mlapdv[:, 0],
30 | line_width=3, color=(1, .6, .6), tube_radius=15)
31 | plt_trj.append(plt)
32 |
33 | ##
34 | rendering.rotating_video(output_video, fig, fps=24, secs=16)
35 |
--------------------------------------------------------------------------------
/examples/one/histology/visualization3D_subject_histology.py:
--------------------------------------------------------------------------------
1 | """
2 | Generates 3D rendering of all probe trajectories for a single subject.
3 |
4 | The trajectory plotted are:
5 | 'Micro-manipulator': Green
6 | 'Histology track': Red
7 | 'Planned': Blue
8 | """
9 | # Author: Olivier
10 | # environment installation guide https://github.com/int-brain-lab/iblenv
11 | # run "%qui qt" magic command from Ipython prompt for interactive mode
12 | from mayavi import mlab
13 | from one.api import ONE
14 |
15 | from atlaselectrophysiology import rendering
16 | import iblatlas.atlas as atlas
17 |
18 | one = ONE(base_url="https://alyx.internationalbrainlab.org")
19 |
20 | fig = rendering.figure()
21 | subject = 'KS003'
22 | trajs = one.alyx.rest('trajectories', 'list', subject=subject)
23 |
24 | ba_allen = atlas.AllenAtlas(25)
25 | ba_needles = atlas.NeedlesAtlas(25)
26 |
27 | plt_trj = []
28 | for index, trj in enumerate(trajs):
29 | if trj['coordinate_system'] == 'IBL-Allen':
30 | brain_atlas = ba_allen
31 | elif trj['coordinate_system'] == 'Needles-Allen':
32 | brain_atlas = ba_needles
33 | ins = atlas.Insertion.from_dict(trj, brain_atlas=brain_atlas)
34 | ins = atlas.Insertion.from_dict(trj, brain_atlas=ba_allen)
35 |
36 | mlapdv = brain_atlas.xyz2ccf(ins.xyz)
37 | if trj['provenance'] == 'Micro-manipulator':
38 | color = (0., 1., 0.) # Green
39 | elif trj['provenance'] == 'Histology track':
40 | color = (1., 0., 0.) # Red
41 | elif trj['provenance'] == 'Planned':
42 | color = (0., 0., 1.) # Blue
43 |
44 | lab = f"{trj['session']['subject']}/{trj['session']['start_time'][:10]}/" \
45 | f"{str(trj['session']['number']).zfill(3)}"
46 | plt = mlab.plot3d(mlapdv[:, 1], mlapdv[:, 2], mlapdv[:, 0],
47 | line_width=3, color=color, tube_radius=20)
48 | # setup the labels at the top of the trajectories
49 | mlab.text3d(mlapdv[0, 1], mlapdv[0, 2], mlapdv[0, 0] - 500, lab,
50 | line_width=4, color=tuple(color), figure=fig, scale=150)
51 | plt_trj.append(plt)
52 |
--------------------------------------------------------------------------------
/examples/one/histology/visualize_alyx_channels_coronal.py:
--------------------------------------------------------------------------------
1 | """
2 | Plot a coronal slice (best fit) that contains a given probe track.
3 | As input, use an eID and probe label.
4 | environment installation guide https://github.com/int-brain-lab/iblenv
5 | """
6 | # Author: Olivier Winter
7 |
8 | import numpy as np
9 | from one.api import ONE
10 |
11 | import iblatlas.atlas as atlas
12 | import brainbox.io.one as bbone
13 |
14 | # === Parameters section (edit) ===
15 | eid = '614e1937-4b24-4ad3-9055-c8253d089919'
16 | probe_label = 'probe01'
17 | FULL_BLOWN_GUI = True # set to False for simple matplotlib view
18 |
19 | # === Code (do not edit) ===
20 | ba = atlas.AllenAtlas(25)
21 | one = ONE(base_url="https://alyx.internationalbrainlab.org")
22 | traj = one.alyx.rest('trajectories', 'list', session=eid,
23 | provenance='Histology track', probe=probe_label)[0]
24 | channels = bbone.load_channel_locations(eid=eid, one=one, probe=probe_label)
25 |
26 | ins = atlas.Insertion.from_dict(traj)
27 |
28 | if FULL_BLOWN_GUI:
29 | from iblapps.histology import atlas_mpl
30 | mw, cax = atlas_mpl.viewatlas(ba, ap_um=np.mean(ins.xyz[:, 1]) * 1e6)
31 | else:
32 | cax = ba.plot_cslice(ap_coordinate=np.mean(ins.xyz[:, 1]), volume='annotation')
33 |
34 | cax.plot(ins.xyz[:, 0] * 1e6, ins.xyz[:, 2] * 1e6)
35 | cax.plot(channels[probe_label].x * 1e6, channels[probe_label].z * 1e6, 'k*')
36 |
37 | if FULL_BLOWN_GUI:
38 | mw.mpl_widget.draw()
39 |
--------------------------------------------------------------------------------
/examples/one/histology/visualize_alyx_traj_coronal_sagittal_raster.py:
--------------------------------------------------------------------------------
1 | """
2 | For a given eID, plot the probe(s) track(s) and the corresponding raster(s).
3 | """
4 | # Author: Gaelle Chapuis
5 | import matplotlib.pyplot as plt
6 | from one.api import ONE
7 |
8 | import iblatlas.atlas as atlas
9 | import brainbox.io.one as bbone
10 | import brainbox.plot as bbplot
11 |
12 | # === Parameters section (edit) ===
13 | ba = atlas.AllenAtlas(25)
14 | one = ONE(base_url="https://alyx.internationalbrainlab.org")
15 |
16 | eid = 'aad23144-0e52-4eac-80c5-c4ee2decb198'
17 |
18 | prob_des = one.load_dataset(eid, 'probes.description.json')
19 | n_probe = len(prob_des)
20 |
21 | # Get information for the session
22 | spikes, clusters, channels = bbone.load_spike_sorting_with_channel(eid, one=one)
23 |
24 | # Loop over probes
25 | for i, probe in enumerate(prob_des):
26 | # Get single probe trajectory
27 | probe_label = probe['label']
28 | trajs = one.alyx.rest('trajectories', 'list', session=eid,
29 | provenance='Histology track', probe=probe_label)
30 |
31 | if len(trajs) == 0:
32 | print(f"No histology recorded for probe {probe_label}")
33 | continue
34 | else:
35 | traj = trajs[0]
36 |
37 | ins = atlas.Insertion.from_dict(traj)
38 |
39 | # Initialise fig subplots
40 | plt.figure(num=i)
41 | fig, axs = plt.subplots(1, 3)
42 | fig.suptitle(f'Probe {probe_label}', fontsize=16)
43 |
44 | # Sagittal view
45 | sax = ba.plot_tilted_slice(ins.xyz, axis=0, ax=axs[0])
46 | sax.plot(ins.xyz[:, 1] * 1e6, ins.xyz[:, 2] * 1e6)
47 | sax.plot(channels[probe_label].y * 1e6, channels[probe_label].z * 1e6, 'y.')
48 |
49 | # Coronal view
50 | cax = ba.plot_tilted_slice(ins.xyz, axis=1, ax=axs[1])
51 | cax.plot(ins.xyz[:, 0] * 1e6, ins.xyz[:, 2] * 1e6)
52 | cax.plot(channels[probe_label].x * 1e6, channels[probe_label].z * 1e6, 'y.')
53 |
54 | # Raster plot -- Brainbox
55 | bbplot.driftmap(spikes[probe_label].times,
56 | spikes[probe_label].depths,
57 | ax=axs[2], plot_style='bincount')
58 |
--------------------------------------------------------------------------------
/examples/one/histology/visualize_track_file_coronal_GUIoption.py:
--------------------------------------------------------------------------------
1 | """
2 | Plot a coronal slice (best fit) that contains a given probe track.
3 | As input, use the path to a probe track (_pts.csv).
4 | environment installation guide https://github.com/int-brain-lab/iblenv
5 | """
6 | # Author: Olivier Winter
7 | import numpy as np
8 |
9 | from ibllib.pipes import histology
10 | import iblatlas.atlas as atlas
11 |
12 | # === Parameters section (edit) ===
13 | track_file = "/Users/gaelle/Downloads/electrodetracks_lic3/2019-08-27_lic3_002_probe00_pts.csv"
14 | FULL_BLOWN_GUI = True # set to False for simple matplotlib view
15 |
16 | # === Code (do not edit) ===
17 | ba = atlas.AllenAtlas(res_um=25)
18 | xyz_picks = histology.load_track_csv(track_file)
19 | bl, ins = histology.get_brain_regions(xyz_picks)
20 |
21 | if FULL_BLOWN_GUI:
22 | from iblapps.histology import atlas_mpl
23 | mw, cax = atlas_mpl.viewatlas(ba, ap_um=np.mean(ins.xyz[:, 1]) * 1e6)
24 | else:
25 | cax = ba.plot_cslice(ap_coordinate=np.mean(ins.xyz[:, 1]))
26 |
27 | cax.plot(ins.xyz[:, 0] * 1e6, ins.xyz[:, 2] * 1e6)
28 | cax.plot(bl.xyz[:, 0] * 1e6, bl.xyz[:, 2] * 1e6, '*')
29 | # cax.plot(ba.bc.xscale * 1e6, ba.top[ba.bc.y2i(np.mean(ins.xyz[:, 1])), :] * 1e6)
30 |
31 | if FULL_BLOWN_GUI:
32 | mw.mpl_widget.draw()
33 |
--------------------------------------------------------------------------------
/examples/one/histology/visualize_track_file_coronal_sagittal_slice.py:
--------------------------------------------------------------------------------
1 | """
2 | For a given track file (pts.csv), plot the coronal and sagittal
3 | slices.
4 | Option: either plot the best-fitted slices (tilted), or the
5 | projection of the probe track onto non-tilted cor/sag slices.
6 |
7 | """
8 | # Author: Olivier Winter
9 | import numpy as np
10 | import matplotlib.pyplot as plt
11 |
12 | from ibllib.pipes import histology
13 | import iblatlas.atlas as atlas
14 |
15 | # === Parameters section (edit) ===
16 |
17 | track_file = "/Users/gaelle/Downloads/electrodetracks_lic3/2019-08-27_lic3_002_probe00_pts.csv"
18 |
19 | # === Code (do not edit) ===
20 | ba = atlas.AllenAtlas(res_um=25)
21 | xyz_picks = histology.load_track_csv(track_file)
22 | bl, ins = histology.get_brain_regions(xyz_picks)
23 |
24 | # --- Initialise figure containing 4 subplots ---
25 | fig, axs = plt.subplots(1, 4)
26 |
27 | # --- PLOT TILTED SLICES THAT BEST CONTAIN THE PROBE TRACK ---
28 | # Sagittal view
29 | sax = ba.plot_tilted_slice(ins.xyz, axis=0, ax=axs[0])
30 | sax.plot(ins.xyz[:, 1] * 1e6, ins.xyz[:, 2] * 1e6)
31 | sax.plot(bl.xyz[:, 1] * 1e6, bl.xyz[:, 2] * 1e6, '.')
32 |
33 | # Coronal view
34 | cax = ba.plot_tilted_slice(ins.xyz, axis=1, ax=axs[1])
35 | cax.plot(ins.xyz[:, 0] * 1e6, ins.xyz[:, 2] * 1e6)
36 | cax.plot(bl.xyz[:, 0] * 1e6, bl.xyz[:, 2] * 1e6, '.')
37 |
38 | # --- PLOT SLICES THAT ARE COR/SAG PLANES, AND PROBE TRACK PROJECTED ---
39 | # Sagittal view
40 | sax2 = ba.plot_sslice(ml_coordinate=np.mean(ins.xyz[:, 0]), ax=axs[2])
41 | sax2.plot(ins.xyz[:, 1] * 1e6, ins.xyz[:, 2] * 1e6)
42 | sax2.plot(bl.xyz[:, 1] * 1e6, bl.xyz[:, 2] * 1e6, '.')
43 |
44 | # Coronal view
45 | cax2 = ba.plot_cslice(ap_coordinate=np.mean(ins.xyz[:, 1]), ax=axs[3])
46 | cax2.plot(ins.xyz[:, 0] * 1e6, ins.xyz[:, 2] * 1e6)
47 | cax2.plot(bl.xyz[:, 0] * 1e6, bl.xyz[:, 2] * 1e6, '.')
48 |
49 | # # -- Test insertion
50 | # xyz = np.array([[0, 0, 0], [0.001, 0.005, -0.005]])
51 | # # SAG TILTED
52 | # sax_t = ba.plot_tilted_slice(xyz, axis=0)
53 | # sax_t.plot(xyz[:, 1] * 1e6, xyz[:, 2] * 1e6)
54 | # # COR TILTED
55 | # cax_t = ba.plot_tilted_slice(xyz, axis=1)
56 | # cax_t.plot(xyz[:, 1] * 1e6, xyz[:, 2] * 1e6)
57 |
--------------------------------------------------------------------------------
/examples/one/one_queries.py:
--------------------------------------------------------------------------------
1 | from one.api import ONE
2 |
3 | one = ONE()
4 | # query session for several subjects
5 | subjects = ['DY_003', 'DY_006']
6 | ses = one.alyx.rest('sessions', 'list', django=f"subject__nickname__in,{subjects}")
7 |
8 | # query sessions that have histology available
9 | ses = one.alyx.rest('sessions', 'list', histology=True)
10 | # the generic way
11 | ses = one.alyx.rest('sessions', 'list',
12 | django="subject__actions_sessions__procedures__name,Histology")
13 |
14 | # query sessions having specific channel locations (hierarchical, will fetch everything below)
15 | ses = one.alyx.rest('sessions', 'list', atlas_id=500)
16 | ses = one.alyx.rest('sessions', 'list', atlas_acronym="MO")
17 | ses = one.alyx.rest('sessions', 'list', atlas_name="Somatomotor areas")
18 |
19 |
20 | # query sessions that do not have matlab in the project name
21 | ses = one.alyx.rest('sessions', 'list', django='~project__name__icontains,matlab')
22 |
23 | # query sessions that do not contain a given dataset type
24 | ses = one.alyx.rest('sessions', 'list',
25 | django='~data_dataset_session_related__dataset_type__name__icontains,wheel')
26 |
27 | # query probe insertions for a given task protocol
28 | one.alyx.rest('insertions', 'list', django='session__task_protocol__icontains,choiceworld')
29 |
--------------------------------------------------------------------------------
/gitflow_checklist.md:
--------------------------------------------------------------------------------
1 | # ibllib gitflow and git commands for releasing
2 | To use `git flow` install it by: `sudo apt install git-flow`
3 | ## Create a release branch
4 | git flow release start 0.4.35 | git checkout -b release/0.4.35 develop
5 |
6 | ## Change and commit locally:
7 | * ### Bump up version in setup.py
8 | ```python
9 | setup(
10 | name='ibllib',
11 | version='0.4.34', --> version='0.4.35'
12 | ...
13 | ```
14 | * ### Flakify
15 | * ### Docs if needed
16 | * ### Make sure tests pass
17 |
18 | **Committ changes normally to current release/0.4.35 branch**
19 | Normal push and pull for sharing an unfinished release branch apply
20 |
21 | ## Finalize a release branch
22 | git flow release finish 0.4.35 | git checkout master
23 | | git merge --no-ff release/0.4.35
24 | | git tag -a 0.4.35
25 | | git checkout develop
26 | | git merge --no-ff release/0.4.35
27 | | git branch -d release/0.4.35
28 |
29 | ## Push to repo
30 | git push origin master
31 | git push origin develop
32 | git push origin --tags
33 | .
34 | .
35 | .
36 | .
37 | # ibllib deploy to PYPI
38 | ## Build
39 | **First remove anything in ibllib/python/dist/***
40 | Then build
41 | ```shell
42 | rm -R dist
43 | python setup.py sdist bdist_wheel
44 | ```
45 |
46 | ## Test upload
47 | ```shell
48 | twine upload --repository-url https://test.pypi.org/legacy/ dist/*
49 | ```
50 |
51 | ## Upload
52 | ```shell
53 | twine upload dist/*
54 | ```
55 |
56 | ## Install
57 | As lib
58 | Activate environment and upgrade
59 | ```shell
60 | conda activate iblenv
61 | pip install ibllib --upgrade
62 | ```
63 | As code installed with `pip install -e .`
64 | ```shell
65 | cd /my/ibllib/repo/path
66 | git reset --hard
67 | git pull
68 | ```
--------------------------------------------------------------------------------
/ibllib/__init__.py:
--------------------------------------------------------------------------------
1 | """Library implementing the International Brain Laboratory data pipeline."""
2 | import logging
3 | import warnings
4 |
5 | __version__ = '3.3.1'
6 | warnings.filterwarnings('always', category=DeprecationWarning, module='ibllib')
7 |
8 | # if this becomes a full-blown library we should let the logging configuration to the discretion of the dev
9 | # who uses the library. However since it can also be provided as an app, the end-users should be provided
10 | # with a useful default logging in standard output without messing with the complex python logging system
11 | USE_LOGGING = True
12 | #%(asctime)s,%(msecs)d
13 | if USE_LOGGING:
14 | from iblutil.util import setup_logger
15 | setup_logger(name='ibllib', level=logging.INFO)
16 | else:
17 | # deactivate all log calls for use as a library
18 | logging.getLogger('ibllib').addHandler(logging.NullHandler())
19 |
--------------------------------------------------------------------------------
/ibllib/ephys/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/ephys/__init__.py
--------------------------------------------------------------------------------
/ibllib/exceptions.py:
--------------------------------------------------------------------------------
1 | class IblError(Exception):
2 | explanation = ''
3 |
4 | def __init__(self, *args):
5 | if args:
6 | self.message = args[0]
7 | else:
8 | self.message = None
9 |
10 | def __str__(self):
11 | return f"{self.message} \n {self.explanation} "
12 |
13 |
14 | class SyncBpodWheelException(IblError):
15 | explanation = "The bpod can't be synchronized with the Rotary Encoder."
16 |
17 |
18 | class SyncBpodFpgaException(IblError):
19 | explanation = "The bpod can't be synchronized with the FPGA."
20 |
21 |
22 | class Neuropixel3BSyncFrontsNonMatching(IblError):
23 | explanation = (" When the npy files containing sync pulses for probes do not match with nidq."
24 | "In 3B, this indicates that either the binary files is corrupt,"
25 | "either the extracted sync files are corrupt.")
26 |
27 |
28 | class NvidiaDriverNotReady(IblError):
29 | explanation = ('Nvidia driver does not respond. This usually means the GPU is inaccessible '
30 | 'and needs to be recovered through a system reboot.')
31 |
32 |
33 | class WidefieldWiringException(IblError):
34 | explanation = ("LED wiring in data is not found in the wiring map. Check that correct wiring map is being used.")
35 |
--------------------------------------------------------------------------------
/ibllib/io/__init__.py:
--------------------------------------------------------------------------------
1 | """Loaders for unprocessed IBL video and task data, and parameter files."""
2 |
--------------------------------------------------------------------------------
/ibllib/io/extractors/__init__.py:
--------------------------------------------------------------------------------
1 | """IBL rig data pre-processing functions.
2 |
3 | Extractor classes for loading raw rig data and returning ALF compliant pre-processed data.
4 | """
5 |
--------------------------------------------------------------------------------
/ibllib/io/extractors/bpod_trials.py:
--------------------------------------------------------------------------------
1 | """Trials data extraction from raw Bpod output.
2 |
3 | This module will extract the Bpod trials and wheel data based on the task protocol,
4 | i.e. habituation, training or biased.
5 | """
6 | import importlib
7 |
8 | from ibllib.io.extractors.base import get_bpod_extractor_class, protocol2extractor, BaseExtractor
9 | from ibllib.io.extractors.habituation_trials import HabituationTrials
10 | from ibllib.io.extractors.training_trials import TrainingTrials
11 | from ibllib.io.extractors.biased_trials import BiasedTrials, EphysTrials
12 | from ibllib.io.extractors.base import BaseBpodTrialsExtractor
13 |
14 |
15 | def get_bpod_extractor(session_path, protocol=None, task_collection='raw_behavior_data') -> BaseBpodTrialsExtractor:
16 | """
17 | Returns an extractor for a given session.
18 |
19 | Parameters
20 | ----------
21 | session_path : str, pathlib.Path
22 | The path to the session to be extracted.
23 | protocol : str, optional
24 | The protocol name, otherwise uses the PYBPOD_PROTOCOL key in iblrig task settings files.
25 | task_collection : str
26 | The folder within the session that contains the raw task data.
27 |
28 | Returns
29 | -------
30 | BaseBpodTrialsExtractor
31 | An instance of the task extractor class, instantiated with the session path.
32 | """
33 | builtins = {
34 | 'HabituationTrials': HabituationTrials,
35 | 'TrainingTrials': TrainingTrials,
36 | 'BiasedTrials': BiasedTrials,
37 | 'EphysTrials': EphysTrials
38 | }
39 |
40 | if protocol:
41 | extractor_class_name = protocol2extractor(protocol)
42 | else:
43 | extractor_class_name = get_bpod_extractor_class(session_path, task_collection=task_collection)
44 | if extractor_class_name in builtins:
45 | return builtins[extractor_class_name](session_path)
46 |
47 | # look if there are custom extractor types in the personal projects repo
48 | if not extractor_class_name.startswith('projects.'):
49 | extractor_class_name = 'projects.' + extractor_class_name
50 | module, extractor_class_name = extractor_class_name.rsplit('.', 1)
51 | mdl = importlib.import_module(module)
52 | extractor_class = getattr(mdl, extractor_class_name, None)
53 | if extractor_class:
54 | my_extractor = extractor_class(session_path)
55 | if not isinstance(my_extractor, BaseExtractor):
56 | raise ValueError(
57 | f"{my_extractor} should be an Extractor class inheriting from ibllib.io.extractors.base.BaseExtractor")
58 | return my_extractor
59 | else:
60 | raise ValueError(f'extractor {extractor_class_name} not found')
61 |
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_0_ephys_len_blocks.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_0_ephys_len_blocks.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_0_ephys_pcqs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_0_ephys_pcqs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_0_passive_pcs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_0_passive_pcs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_0_passive_stimDelays.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_0_passive_stimDelays.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_0_passive_stimIDs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_0_passive_stimIDs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_0_stim_phase.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_0_stim_phase.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_10_ephys_len_blocks.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_10_ephys_len_blocks.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_10_ephys_pcqs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_10_ephys_pcqs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_10_passive_pcs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_10_passive_pcs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_10_passive_stimDelays.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_10_passive_stimDelays.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_10_passive_stimIDs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_10_passive_stimIDs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_10_stim_phase.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_10_stim_phase.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_11_ephys_len_blocks.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_11_ephys_len_blocks.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_11_ephys_pcqs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_11_ephys_pcqs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_11_passive_pcs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_11_passive_pcs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_11_passive_stimDelays.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_11_passive_stimDelays.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_11_passive_stimIDs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_11_passive_stimIDs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_11_stim_phase.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_11_stim_phase.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_1_ephys_len_blocks.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_1_ephys_len_blocks.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_1_ephys_pcqs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_1_ephys_pcqs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_1_passive_pcs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_1_passive_pcs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_1_passive_stimDelays.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_1_passive_stimDelays.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_1_passive_stimIDs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_1_passive_stimIDs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_1_stim_phase.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_1_stim_phase.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_2_ephys_len_blocks.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_2_ephys_len_blocks.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_2_ephys_pcqs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_2_ephys_pcqs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_2_passive_pcs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_2_passive_pcs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_2_passive_stimDelays.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_2_passive_stimDelays.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_2_passive_stimIDs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_2_passive_stimIDs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_2_stim_phase.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_2_stim_phase.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_3_ephys_len_blocks.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_3_ephys_len_blocks.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_3_ephys_pcqs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_3_ephys_pcqs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_3_passive_pcs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_3_passive_pcs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_3_passive_stimDelays.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_3_passive_stimDelays.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_3_passive_stimIDs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_3_passive_stimIDs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_3_stim_phase.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_3_stim_phase.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_4_ephys_len_blocks.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_4_ephys_len_blocks.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_4_ephys_pcqs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_4_ephys_pcqs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_4_passive_pcs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_4_passive_pcs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_4_passive_stimDelays.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_4_passive_stimDelays.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_4_passive_stimIDs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_4_passive_stimIDs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_4_stim_phase.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_4_stim_phase.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_5_ephys_len_blocks.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_5_ephys_len_blocks.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_5_ephys_pcqs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_5_ephys_pcqs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_5_passive_pcs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_5_passive_pcs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_5_passive_stimDelays.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_5_passive_stimDelays.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_5_passive_stimIDs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_5_passive_stimIDs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_5_stim_phase.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_5_stim_phase.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_6_ephys_len_blocks.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_6_ephys_len_blocks.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_6_ephys_pcqs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_6_ephys_pcqs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_6_passive_pcs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_6_passive_pcs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_6_passive_stimDelays.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_6_passive_stimDelays.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_6_passive_stimIDs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_6_passive_stimIDs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_6_stim_phase.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_6_stim_phase.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_7_ephys_len_blocks.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_7_ephys_len_blocks.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_7_ephys_pcqs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_7_ephys_pcqs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_7_passive_pcs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_7_passive_pcs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_7_passive_stimDelays.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_7_passive_stimDelays.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_7_passive_stimIDs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_7_passive_stimIDs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_7_stim_phase.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_7_stim_phase.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_8_ephys_len_blocks.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_8_ephys_len_blocks.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_8_ephys_pcqs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_8_ephys_pcqs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_8_passive_pcs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_8_passive_pcs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_8_passive_stimDelays.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_8_passive_stimDelays.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_8_passive_stimIDs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_8_passive_stimIDs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_8_stim_phase.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_8_stim_phase.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_9_ephys_len_blocks.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_9_ephys_len_blocks.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_9_ephys_pcqs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_9_ephys_pcqs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_9_passive_pcs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_9_passive_pcs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_9_passive_stimDelays.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_9_passive_stimDelays.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_9_passive_stimIDs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_9_passive_stimIDs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_9_stim_phase.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_9_stim_phase.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_mock_ephys_len_blocks.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_mock_ephys_len_blocks.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_mock_ephys_pcqs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_mock_ephys_pcqs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_mock_passive_pcs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_mock_passive_pcs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_mock_passive_stimDelays.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_mock_passive_stimDelays.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_mock_passive_stimIDs.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_mock_passive_stimIDs.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/ephys_sessions/session_mock_stim_phase.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/ephys_sessions/session_mock_stim_phase.npy
--------------------------------------------------------------------------------
/ibllib/io/extractors/mesoscope/README.md:
--------------------------------------------------------------------------------
1 | # File fixtures
2 | ### surface_triangulation.npz
3 | A triangle mesh of the smoothed convex hull of the dorsal surface of the mouse brain, generated from
4 | the 2017 Allen 10um annotation volume.
5 |
6 | - **points** - An N by 3 integer array of x-y vertices, defining all points of the triangle mesh. These are in um relative to the IBL bregma coordinates.
7 | - **connectivity_list** - An N by 3 integer array of vertex indices defining all points that form a triangle.
8 |
9 | This triangulation was generated in MATLAB.
10 |
--------------------------------------------------------------------------------
/ibllib/io/extractors/mesoscope/surface_triangulation.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/io/extractors/mesoscope/surface_triangulation.npz
--------------------------------------------------------------------------------
/ibllib/io/extractors/opto_trials.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import numpy as np
3 |
4 | from ibllib.io.extractors.base import BaseBpodTrialsExtractor
5 |
6 | _logger = logging.getLogger(__name__)
7 |
8 |
9 | class LaserBool(BaseBpodTrialsExtractor):
10 | """
11 | Extracts the laser probabilities from the bpod jsonable
12 | """
13 | save_names = ('_ibl_trials.laserStimulation.npy', '_ibl_trials.laserProbability.npy')
14 | var_names = ('laserStimulation', 'laserProbability')
15 |
16 | def _extract(self, **kwargs):
17 | _logger.info('Extracting laser datasets')
18 | # reference pybpod implementation
19 | lstim = np.array([float(t.get('laser_stimulation', np.nan)) for t in self.bpod_trials])
20 | lprob = np.array([float(t.get('laser_probability', np.nan)) for t in self.bpod_trials])
21 |
22 | # Karolina's choice world legacy implementation - from Slack message:
23 | # it is possible that some versions I have used:
24 | # 1) opto_ON_time (NaN - no laser or some number-laser)
25 | # opto_ON_time=~isnan(opto_ON_time)
26 | # laserON_trials=(opto_ON_time==1);
27 | # laserOFF_trials=(opto_ON_time==0);
28 | # 2) optoOUT (0 - no laser or 255 - laser):
29 | # laserON_trials=(optoOUT ==255);
30 | # laserOFF_trials=(optoOUT ==0);
31 | if 'PROBABILITY_OPTO' in self.settings.keys() and np.all(np.isnan(lstim)):
32 | lprob = np.zeros_like(lprob) + self.settings['PROBABILITY_OPTO']
33 | lstim = np.array([float(t.get('opto_ON_time', np.nan)) for t in self.bpod_trials])
34 | if np.all(np.isnan(lstim)):
35 | lstim = np.array([float(t.get('optoOUT', np.nan)) for t in self.bpod_trials])
36 | lstim[lstim == 255] = 1
37 | else:
38 | lstim[~np.isnan(lstim)] = 1
39 | lstim[np.isnan(lstim)] = 0
40 |
41 | if np.all(np.isnan(lprob)):
42 | # this prevents the file from being saved when no data
43 | self.save_names = ('_ibl_trials.laserStimulation.npy', None)
44 | _logger.warning('No laser probability found in bpod data')
45 | if np.all(np.isnan(lstim)):
46 | # this prevents the file from being saved when no data
47 | self.save_names = (None, '_ibl_trials.laserProbability.npy')
48 | _logger.warning('No laser stimulation found in bpod data')
49 | return lstim, lprob
50 |
--------------------------------------------------------------------------------
/ibllib/io/extractors/task_extractor_map.json:
--------------------------------------------------------------------------------
1 | {"!!THIS FILE": "SHOULD NOT BE EDITED...",
2 | "SEE": "PROJECT EXTRACTION REPO!!",
3 | "************": "**********************",
4 | "ephysChoiceWorld": "EphysTrials",
5 | "_biasedChoiceWorld": "BiasedTrials",
6 | "_habituationChoiceWorld": "HabituationTrials",
7 | "_trainingChoiceWorld": "TrainingTrials",
8 | "_trainingPhaseChoiceWorld": "TrainingTrials",
9 | "_imagingChoiceWorld": "BiasedTrials",
10 | "_advancedChoiceWorld": "TrainingTrials",
11 | "_neuromodulatorChoiceWorld": "TrainingTrials"
12 | }
13 |
--------------------------------------------------------------------------------
/ibllib/io/misc.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import logging
3 |
4 | log = logging.getLogger(__name__)
5 |
6 |
7 | # Remove empty folders
8 | def delete_empty_folders(path, rglob_pattern='*', dry=True, recursive=False):
9 | """delete_empty_folders Will delete empty folders inside path, if recursive is set to True
10 | will delete all empty folders recusively untill all folders have a file inside
11 | recursive is ignored if dry==True
12 |
13 | :param path: path to check for empty folders
14 | :type path: str or pathlib.Path
15 | :param rglob_pattern: filter on folder names, defaults to '*'
16 | :type rglob_pattern: str, optional
17 | :param dry: dry run will simulate the action, defaults to True
18 | :type dry: bool, optional
19 | :param recursive: whether to recurse after the last level of empty folders
20 | is deleted, defaults to False
21 | :type recursive: bool, optional
22 | :return: [description]
23 | :rtype: [type]
24 | """
25 | path = Path(path)
26 | all_dirs = {p for p in path.rglob(rglob_pattern) if p.is_dir()}
27 | empty_dirs = {p for p in all_dirs if not list(p.glob('*'))}
28 | log.info(f'Empty folders: {len(empty_dirs)}')
29 | if dry:
30 | log.info(f'Empty folder names: {empty_dirs}')
31 | elif not dry:
32 | for d in empty_dirs:
33 | log.info(f'Deleting empty folder: {d}')
34 | d.rmdir()
35 | log.info(f'Deleted folders: {len(empty_dirs)}\n')
36 | if recursive:
37 | return delete_empty_folders(path, rglob_pattern=rglob_pattern, dry=dry)
38 |
--------------------------------------------------------------------------------
/ibllib/misc/__init__.py:
--------------------------------------------------------------------------------
1 | from .misc import structarr, check_nvidia_driver
2 |
--------------------------------------------------------------------------------
/ibllib/misc/misc.py:
--------------------------------------------------------------------------------
1 | # library of small functions
2 | import logging
3 | import subprocess
4 |
5 | import numpy as np
6 |
7 | from ibllib.exceptions import NvidiaDriverNotReady
8 |
9 | _logger = logging.getLogger(__name__)
10 |
11 |
12 | def _parametrized(dec):
13 | def layer(*args, **kwargs):
14 | def repl(f):
15 | return dec(f, *args, **kwargs)
16 | return repl
17 | return layer
18 |
19 |
20 | def structarr(names, shape=None, formats=None):
21 | if not formats:
22 | formats = ['f8'] * len(names)
23 | dtyp = np.dtype({'names': names, 'formats': formats})
24 | return np.zeros(shape, dtype=dtyp)
25 |
26 |
27 | def check_nvidia_driver():
28 | """
29 | Checks if the GPU driver reacts and otherwise raises a custom error.
30 | Useful to check before long GPU-dependent processes.
31 | """
32 | process = subprocess.Popen('nvidia-smi', shell=True, stdout=subprocess.PIPE,
33 | stderr=subprocess.PIPE, executable="/bin/bash")
34 | info, error = process.communicate()
35 | if process.returncode != 0:
36 | raise NvidiaDriverNotReady(f"{error.decode('utf-8')}")
37 | _logger.info("nvidia-smi command successful")
38 |
--------------------------------------------------------------------------------
/ibllib/misc/qt.py:
--------------------------------------------------------------------------------
1 | """PyQt5 helper functions."""
2 | import logging
3 | import sys
4 | from functools import wraps
5 |
6 | from PyQt5 import QtWidgets
7 |
8 | _logger = logging.getLogger(__name__)
9 |
10 |
11 | def get_main_window():
12 | """Get the Main window of a QT application."""
13 | app = QtWidgets.QApplication.instance()
14 | return [w for w in app.topLevelWidgets() if isinstance(w, QtWidgets.QMainWindow)][0]
15 |
16 |
17 | def create_app():
18 | """Create a Qt application."""
19 | global QT_APP
20 | QT_APP = QtWidgets.QApplication.instance()
21 | if QT_APP is None: # pragma: no cover
22 | QT_APP = QtWidgets.QApplication(sys.argv)
23 | return QT_APP
24 |
25 |
26 | def require_qt(func):
27 | """Function decorator to specify that a function requires a Qt application.
28 |
29 | Use this decorator to specify that a function needs a running Qt application before it can run.
30 | An error is raised if that is not the case.
31 | """
32 | @wraps(func)
33 | def wrapped(*args, **kwargs):
34 | if not QtWidgets.QApplication.instance():
35 | _logger.warning('Creating a Qt application.')
36 | create_app()
37 | return func(*args, **kwargs)
38 | return wrapped
39 |
40 |
41 | @require_qt
42 | def run_app(): # pragma: no cover
43 | """Run the Qt application."""
44 | global QT_APP # noqa F841
45 | return QT_APP.exit(QT_APP.exec_())
46 |
--------------------------------------------------------------------------------
/ibllib/oneibl/__init__.py:
--------------------------------------------------------------------------------
1 | """IBL pipeline-specific ONE functions"""
2 |
--------------------------------------------------------------------------------
/ibllib/plots/__init__.py:
--------------------------------------------------------------------------------
1 | from ibllib.plots.misc import *
2 |
--------------------------------------------------------------------------------
/ibllib/qc/__init__.py:
--------------------------------------------------------------------------------
1 | """Data quality control calculation and aggregation."""
2 |
--------------------------------------------------------------------------------
/ibllib/qc/reference/frame_src.json:
--------------------------------------------------------------------------------
1 | {
2 | "body": [
3 | "2019-07-25_1_ZM_1888#1000",
4 | "2019-07-19_1_ZM_1887#1000"
5 | ],
6 | "left": [
7 | "2019-07-19_1_ZM_1887#1000",
8 | "2019-07-25_1_ZM_1888#1000",
9 | "2020-12-03_1_ZFM-01576#1000"
10 | ],
11 | "right": [
12 | "2019-07-19_1_ZM_1887#1000"
13 | ]
14 | }
15 |
--------------------------------------------------------------------------------
/ibllib/qc/reference/frames_body.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/qc/reference/frames_body.npy
--------------------------------------------------------------------------------
/ibllib/qc/reference/frames_left.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/qc/reference/frames_left.npy
--------------------------------------------------------------------------------
/ibllib/qc/reference/frames_right.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/qc/reference/frames_right.npy
--------------------------------------------------------------------------------
/ibllib/qc/task_qc_viewer/README.md:
--------------------------------------------------------------------------------
1 | # Task QC Viewer
2 | This will download the TTL pulses and data collected on Bpod and/or FPGA and plot the results
3 | alongside an interactive table. The UUID is the session id.
4 |
5 | ## Usage: command line
6 |
7 | Launch the Viewer by typing `task_qc session-uuid` , example:
8 | ```shell
9 | task_qc c9fec76e-7a20-4da4-93ad-04510a89473b
10 | ```
11 |
12 | Or just using a local path (on a local server for example):
13 | ```shell
14 | task_qc /mnt/s0/Subjects/KS022/2019-12-10/001 --local
15 | ```
16 |
17 | On behaviour rigs, use the Bpod only flag:
18 | ```shell
19 | task_qc C:\iblrigv8_data\cortexlab\Subjects\KS022\2019-12-10\001 --local --bpod
20 | ```
21 |
22 | ## Usage: from ipython prompt
23 | ```python
24 | from ibllib.qc.task_qc_viewer.task_qc import show_session_task_qc
25 | session_path = r"/datadisk/Data/IntegrationTests/ephys/choice_world_init/KS022/2019-12-10/001"
26 | show_session_task_qc(session_path, local=True)
27 | ```
28 |
29 | ## Plots
30 | 1) Sync pulse display:
31 | - TTL sync pulses (as recorded on the Bpod or FPGA for ephys sessions) for some key apparatus (i
32 | .e. frame2TTL, audio signal). TTL pulse trains are displayed in black (time on x-axis, voltage on y-axis), offset by an increment of 1 each time (e.g. audio signal is on line 3, cf legend).
33 | - trial event types, vertical lines (marked in different colours)
34 |
35 | 2) Wheel display:
36 | - the wheel position in radians
37 | - trial event types, vertical lines (marked in different colours)
38 |
39 | 3) Interactive table:
40 | Each row is a trial entry. Each column is a trial event
41 |
42 | When double-clicking on any field of that table, the Sync pulse display time (x-) axis is adjusted so as to visualise the corresponding trial selected.
43 |
44 | ### What to look for
45 | Tests are defined in the SINGLE METRICS section of ibllib/qc/task_metrics.py: https://github.com/int-brain-lab/ibllib/blob/master/ibllib/qc/task_metrics.py#L420
46 |
47 | ### Exit
48 | Close the GUI window containing the interactive table to exit.
49 |
--------------------------------------------------------------------------------
/ibllib/qc/task_qc_viewer/__init__.py:
--------------------------------------------------------------------------------
1 | """Interactive task QC viewer."""
2 |
--------------------------------------------------------------------------------
/ibllib/tests/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 | import json
3 |
4 |
5 | def _get_test_db():
6 | db_json = os.getenv('TEST_DB_CONFIG', None)
7 | if db_json:
8 | with open(db_json, 'r') as f:
9 | return json.load(f)
10 | else:
11 | return {
12 | 'base_url': 'https://test.alyx.internationalbrainlab.org',
13 | 'username': 'test_user',
14 | 'password': 'TapetesBloc18',
15 | 'silent': True
16 | }
17 |
18 |
19 | TEST_DB = _get_test_db()
20 |
--------------------------------------------------------------------------------
/ibllib/tests/extractors/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/tests/extractors/__init__.py
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/session_biased_ge5/raw_behavior_data/_iblrig_ambientSensorData.raw.jsonable:
--------------------------------------------------------------------------------
1 | {"Temperature_C": [24.389999389648438], "AirPressure_mb": [1035.1611328125], "RelativeHumidity": [50.1953125]}
2 | {"Temperature_C": [24.389999389648438], "AirPressure_mb": [1035.138916015625], "RelativeHumidity": [50.15234375]}
3 | {"Temperature_C": [24.389999389648438], "AirPressure_mb": [1035.14697265625], "RelativeHumidity": [50.173828125]}
4 | {"Temperature_C": [24.389999389648438], "AirPressure_mb": [1035.1751708984375], "RelativeHumidity": [50.1953125]}
5 | {"Temperature_C": [24.389999389648438], "AirPressure_mb": [1035.171142578125], "RelativeHumidity": [50.2275390625]}
6 | {"Temperature_C": [24.389999389648438], "AirPressure_mb": [1035.167236328125], "RelativeHumidity": [50.2275390625]}
7 | {"Temperature_C": [24.40999984741211], "AirPressure_mb": [1035.1365966796875], "RelativeHumidity": [50.228515625]}
8 | {"Temperature_C": [24.399999618530273], "AirPressure_mb": [1035.1103515625], "RelativeHumidity": [50.21875]}
9 |
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/session_biased_ge5/raw_behavior_data/_iblrig_encoderEvents.raw.ssv:
--------------------------------------------------------------------------------
1 | 3963997 2
2 | 4064896 3
3 | 5582679 1
4 | 7022962 2
5 | 7093164 3
6 | 11870909 1
7 | 13333491 2
8 | 13397992 3
9 | 15782765 1
10 | 17229749 2
11 | 17292148 3
12 | 18652832 1
13 | 20154216 2
14 | 20230117 3
15 | 21558501 1
16 | 22980885 2
17 | 23041684 3
18 | 85040992 1
19 | 86700273 2
20 | 86770073 3
21 | 88224957 1
22 | 89772439 2
23 | 89836339 3
24 | 92503609 1
25 | 93931893 2
26 | 94008194 3
27 |
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/session_biased_ge5/raw_behavior_data/_iblrig_encoderTrialInfo.raw.ssv:
--------------------------------------------------------------------------------
1 | 0 -35 1 0.1 0 4 49 0 2019-07-01T12:18:20.7942912+01:00
2 | 1 35 0.125 0.1 0 4 49 0 2019-07-01T12:18:22.9904256+01:00
3 | 2 35 1 0.1 0 4 49 1.102179 2019-07-01T12:18:27.0239616+01:00
4 | 3 35 0.125 0.1 0 4 49 0.5217499 2019-07-01T12:18:33.3576576+01:00
5 | 4 -35 0.125 0.1 0 4 49 2.239181 2019-07-01T12:18:37.2245504+01:00
6 | 5 35 0.125 0.1 0 4 49 2.720602 2019-07-01T12:18:40.1080704+01:00
7 | 6 -35 0.125 0.1 0 4 49 0.5806797 2019-07-01T12:18:43.0249088+01:00
8 | 7 35 0.125 0.1 0 4 49 0.6424892 2019-07-01T12:19:46.6119040+01:00
9 | 8 -35 0.125 0.1 0 4 49 2.694518 2019-07-01T12:19:49.6953856+01:00
10 | 9 -35 0.125 0.1 0 4 49 2.018845 2019-07-01T12:19:53.9789312+01:00
11 |
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/session_biased_lt5/raw_behavior_data/_iblrig_encoderEvents.raw.ssv:
--------------------------------------------------------------------------------
1 | Event 1002228 StateMachine 1 2018-12-11T19:39:06.7438592+00:00
2 | Event 1002228 StateMachine 2 2018-12-11T19:39:07.7443456+00:00
3 | Event 1002228 StateMachine 3 2018-12-11T19:39:07.8443520+00:00
4 | Event 1005200 StateMachine 1 2018-12-11T19:39:27.3099904+00:00
5 | Event 1005200 StateMachine 2 2018-12-11T19:39:28.2120960+00:00
6 | Event 1005200 StateMachine 3 2018-12-11T19:39:28.3121024+00:00
7 | Event 1010532 StateMachine 1 2018-12-11T19:39:06.7438592+00:00
8 | Event 1010532 StateMachine 2 2018-12-11T19:39:07.7443456+00:00
9 | Event 1010532 StateMachine 3 2018-12-11T19:39:07.8443520+00:00
10 | Event 1016998 StateMachine 1 2018-12-11T19:39:27.3099904+00:00
11 | Event 1016998 StateMachine 2 2018-12-11T19:39:28.2120960+00:00
12 | Event 1016998 StateMachine 3 2018-12-11T19:39:28.3121024+00:00
13 |
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/session_biased_lt5/raw_behavior_data/_iblrig_encoderPositions.raw.ssv:
--------------------------------------------------------------------------------
1 | Position 1708862576 -80 2019-02-13T14:24:59.3946368+00:00
2 | Position 4192349 -53 2019-02-13T14:25:03.5792896+00:00
3 | Position 4196191 -52 2019-02-13T14:25:03.5830528+00:00
4 | Position 4199991 -51 2019-02-13T14:25:03.5868416+00:00
5 | Position 4203771 -50 2019-02-13T14:25:03.5906560+00:00
6 | Position 4207553 -49 2019-02-13T14:25:03.5944704+00:00
7 | Position 4211351 -48 2019-02-13T14:25:03.5982336+00:00
8 | Position 4215156 -47 2019-02-13T14:25:03.6019712+00:00
9 | Position 4218975 -46 2019-02-13T14:25:03.6057600+00:00
10 | Position 4224751 0 2019-02-13T14:25:03.6341248+00:00
11 | Position 4224747 -45 2019-02-13T14:25:03.6100352+00:00
12 | Position 4224747 -45 2019-02-13T14:25:03.6100352+00:00
13 | Position 4226719 1 2019-02-13T14:25:03.6341888+00:00
14 | Position 4230621 2 2019-02-13T14:25:03.6477696+00:00
15 | Position 4234532 3 2019-02-13T14:25:03.6477824+00:00
16 | Position 4234651 0 2019-02-13T14:25:03.6477952+00:00
17 |
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/session_ephys/raw_behavior_data/_iblrig_taskCodeFiles.raw.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/tests/extractors/data/session_ephys/raw_behavior_data/_iblrig_taskCodeFiles.raw.zip
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/session_ephys/raw_behavior_data/_iblrig_taskDataCodeFiles.raw.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/tests/extractors/data/session_ephys/raw_behavior_data/_iblrig_taskDataCodeFiles.raw.zip
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/session_ephys/raw_video_data/_iblrig_bodyCamera.GPIO.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/tests/extractors/data/session_ephys/raw_video_data/_iblrig_bodyCamera.GPIO.bin
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/session_ephys/raw_video_data/_iblrig_bodyCamera.frame_counter.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/tests/extractors/data/session_ephys/raw_video_data/_iblrig_bodyCamera.frame_counter.bin
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/session_training_ge5/raw_behavior_data/_iblrig_ambientSensorData.raw.jsonable:
--------------------------------------------------------------------------------
1 | {"Temperature_C": [24.389999389648438], "AirPressure_mb": [1035.1329345703125], "RelativeHumidity": [49.6865234375]}
2 | {"Temperature_C": [24.360000610351562], "AirPressure_mb": [1035.1595458984375], "RelativeHumidity": [49.9384765625]}
3 | {"Temperature_C": [24.3700008392334], "AirPressure_mb": [1035.1534423828125], "RelativeHumidity": [49.9921875]}
4 | {"Temperature_C": [24.3799991607666], "AirPressure_mb": [1035.1087646484375], "RelativeHumidity": [49.9931640625]}
5 | {"Temperature_C": [24.3700008392334], "AirPressure_mb": [1035.1473388671875], "RelativeHumidity": [50.013671875]}
6 | {"Temperature_C": [24.3700008392334], "AirPressure_mb": [1035.143310546875], "RelativeHumidity": [50.0341796875]}
7 | {"Temperature_C": [24.3700008392334], "AirPressure_mb": [1035.1151123046875], "RelativeHumidity": [50.0556640625]}
8 | {"Temperature_C": [24.3700008392334], "AirPressure_mb": [1035.1292724609375], "RelativeHumidity": [50.076171875]}
9 | {"Temperature_C": [24.3799991607666], "AirPressure_mb": [1035.1087646484375], "RelativeHumidity": [50.0986328125]}
10 | {"Temperature_C": [24.3799991607666], "AirPressure_mb": [1035.0643310546875], "RelativeHumidity": [50.0576171875]}
11 | {"Temperature_C": [24.3799991607666], "AirPressure_mb": [1035.1490478515625], "RelativeHumidity": [50.078125]}
12 | {"Temperature_C": [24.389999389648438], "AirPressure_mb": [1035.1932373046875], "RelativeHumidity": [50.1005859375]}
13 |
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/session_training_ge5/raw_behavior_data/_iblrig_encoderEvents.raw.ssv:
--------------------------------------------------------------------------------
1 | 4469833 2
2 | 4555033 3
3 | 6721407 1
4 | 8106292 2
5 | 8172192 3
6 | 11750550 1
7 | 13263935 2
8 | 13338134 3
9 | 16143602 1
10 | 17766585 2
11 | 17838284 3
12 | 20451956 1
13 | 21829939 2
14 | 21904538 3
15 | 23350522 1
16 | 24931904 2
17 | 25004204 3
18 | 26570885 1
19 | 28130169 2
20 | 28203768 3
21 | 29761751 1
22 | 31282934 2
23 | 31353633 3
24 | 34135202 1
25 | 35728084 2
26 | 35803585 3
27 | 97802893 1
28 | 99489675 2
29 | 99564974 3
30 | 101907248 1
31 | 103452030 2
32 | 103509531 3
33 | 106382898 1
34 | 107766182 2
35 | 107825683 3
36 | 110669550 1
37 | 112063535 2
38 | 112125334 3
39 |
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/session_training_ge5/raw_behavior_data/_iblrig_encoderTrialInfo.raw.ssv:
--------------------------------------------------------------------------------
1 | 0 -35 1 0.1 0 4 49 0 2019-07-01T12:15:23.5891968+01:00
2 | 1 35 0.5 0.1 0 8 49 0 2019-07-01T12:15:26.2697600+01:00
3 | 2 35 0.5 0.1 0 8 49 1.334349 2019-07-01T12:15:30.9197824+01:00
4 | 3 -35 0.5 0.1 0 8 49 1.232429 2019-07-01T12:15:35.9867008+01:00
5 | 4 -35 0.5 0.1 0 8 49 2.025711 2019-07-01T12:15:40.3536000+01:00
6 | 5 -35 0.5 0.1 0 8 49 1.184817 2019-07-01T12:15:44.6705408+01:00
7 | 6 -35 0.5 0.1 0 8 49 0.5892029 2019-07-01T12:15:47.5707264+01:00
8 | 7 -35 1 0.1 0 8 49 1.368757 2019-07-01T12:15:50.7875840+01:00
9 | 8 35 1 0.1 0 8 49 1.013087 2019-07-01T12:15:53.9710976+01:00
10 | 9 35 1 0.1 0 8 49 0.6657558 2019-07-01T12:15:58.3880192+01:00
11 | 10 35 1 0.1 0 8 49 2.347584 2019-07-01T12:17:02.1084800+01:00
12 | 11 35 0.5 0.1 0 8 49 3.12149 2019-07-01T12:17:06.1920256+01:00
13 | 12 35 0.5 0.1 0 8 49 0.5976557 2019-07-01T12:17:10.6089600+01:00
14 | 13 35 1 0.1 0 8 49 1.392785 2019-07-01T12:17:14.9091840+01:00
15 |
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/session_training_lt5/raw_behavior_data/_iblrig_encoderEvents.raw.ssv:
--------------------------------------------------------------------------------
1 | Event 1000574 StateMachine 1 2018-12-11T19:39:06.7438592+00:00
2 | Event 1000574 StateMachine 2 2018-12-11T19:39:07.7443456+00:00
3 | Event 1000574 StateMachine 3 2018-12-11T19:39:07.8443520+00:00
4 | Event 1698630 StateMachine 1 2018-12-11T19:39:27.3099904+00:00
5 | Event 1698630 StateMachine 2 2018-12-11T19:39:28.2120960+00:00
6 | Event 1698630 StateMachine 3 2018-12-11T19:39:28.3121024+00:00
7 | Event 2327471 StateMachine 1 2018-12-11T19:39:06.7438592+00:00
8 | Event 2327471 StateMachine 2 2018-12-11T19:39:07.7443456+00:00
9 | Event 2327471 StateMachine 3 2018-12-11T19:39:07.8443520+00:00
10 | Event 2496044 StateMachine 1 2018-12-11T19:39:27.3099904+00:00
11 | Event 2496044 StateMachine 2 2018-12-11T19:39:28.2120960+00:00
12 | Event 2496044 StateMachine 3 2018-12-11T19:39:28.3121024+00:00
13 |
14 |
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/session_training_lt5/raw_behavior_data/_iblrig_encoderPositions.raw.ssv:
--------------------------------------------------------------------------------
1 | Position 4294361553 2 2019-02-07T16:49:20.5192064+00:00
2 | Position 4294410128 3 2019-02-07T16:49:20.5677696+00:00
3 | Position 4294425725 4 2019-02-07T16:49:20.5833472+00:00
4 | Position 4294446023 5 2019-02-07T16:49:20.6036736+00:00
5 | Position 4294510642 4 2019-02-07T16:49:20.6683136+00:00
6 | Position 4294832523 3 2019-02-07T16:49:20.9903232+00:00
7 | Position 849736 0 2019-02-07T16:49:21.9795456+00:00
8 | Position 1532230 0 2019-02-07T16:49:22.6626944+00:00
9 | Position 1822449 1 2019-02-07T16:49:22.9478272+00:00
10 | Position 1833514 2 2019-02-07T16:49:22.9589248+00:00
11 | Position 1841566 3 2019-02-07T16:49:22.9669504+00:00
12 | Position 1848206 4 2019-02-07T16:49:22.9735936+00:00
13 | Position 1853979 5 2019-02-07T16:49:22.9793664+00:00
14 | Position 1859144 6 2019-02-07T16:49:22.9845504+00:00
15 | Position
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/wheel/ge5/_iblrig_encoderEvents.raw.ssv:
--------------------------------------------------------------------------------
1 | 4412 2
2 | 4487 3
3 | 6877 1
4 | 8298 2
5 | 8364 3
6 | 9606 1
7 | 11006 2
8 | 11071 3
9 | 13403 1
10 | 14975 2
11 | 15046 3
12 | 16347 1
13 | 19146 2
14 | 19220 3
15 | 20453 1
16 | 21772 2
17 | 21830 3
18 | 24034 1
19 | 25566 2
20 | 25637 3
21 |
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/wheel/ge5/_iblrig_encoderPositions.raw.ssv:
--------------------------------------------------------------------------------
1 | 1597 -1
2 | 1606 -2
3 | 1612 -3
4 | 1618 -4
5 | 1622 -5
6 | 1627 -6
7 | 1630 -7
8 | 1634 -8
9 | 1638 -9
10 | 1641 -10
11 | 1645 -11
12 | 1648 -12
13 | 1652 -13
14 | 1656 -14
15 | 1660 -15
16 | 1664 -16
17 | 1668 -17
18 | 1672 -18
19 | 1678 -19
20 | 1684 -20
21 |
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/wheel/lt5/_iblrig_encoderEvents.raw.00.ssv:
--------------------------------------------------------------------------------
1 | Event 1349173 StateMachine 1 2018-12-11T19:39:06.7438592+00:00
2 | Event 1350170 StateMachine 2 2018-12-11T19:39:07.7443456+00:00
3 | Event 1350270 StateMachine 3 2018-12-11T19:39:07.8443520+00:00
4 | Event 1369737 StateMachine 1 2018-12-11T19:39:27.3099904+00:00
5 | Event 1370636 StateMachine 2 2018-12-11T19:39:28.2120960+00:00
6 | Event 1370736 StateMachine 3 2018-12-11T19:39:28.3121024+00:00
7 | Event 1389582 StateMachine 1 2018-12-11T19:39:47.1606528+00:00
8 | Event 1389582 StateMachine 1 2018-12-11T19:39:52.1606528
9 | Event 1389586 StateMachine 1 2018-12-11T19:39:5
10 | Event 1390259 Sta
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/wheel/lt5/_iblrig_encoderEvents.raw.CorruptMiddle.ssv:
--------------------------------------------------------------------------------
1 | Event 3817978560 StateMachine 3 2019-03-05T15:49:41.0348160-05:00
2 | Event 3827564346 StateMachine 1 2019-03-05T15:49:50.6218880-05:00
3 | Event 3827965141 StateMachine 2 2019-03-05T15:49:51.0228736-05:00
4 | Event 3827965342 StateMachine 3 2019-03-05T15:49:51.0231552-05:00
5 | Event 3830431012 StateMachine 1 2019-03-05T15:49:53.4865792-05:00
6 | Event 3830924207 StateMachine 2 2019-03-05T15:49:53.9822336-05:00
7 | Event 3830924407 StateMachine 3 2019-03-05T15:49:53.9824000-05:00
8 | Event 3841298783 StateMachine 1 2019-03-05T15:50:04.3585152-05:00
9 | ateMachine 3 2019-03-05T15:49:19.9862528-05:00
10 | Event 3817373267 StateMachine 1 2019-03-05T15:49:40.4295808-05:00
11 | Event 3817978360 StateMachine 2 2019-03-05T15:49:41.0346496-05:00
12 | Event 3817978560 StateMachine 3 2019-03-05T15:49:41.0348160-05:00
13 | Event 3827564346 StateMachine 1 2019-03-05T15:49:50.6218880-05:00
14 |
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/wheel/lt5/_iblrig_encoderEvents.raw.CorruptTimestamp.ssv:
--------------------------------------------------------------------------------
1 | Event 3285495093 StateMachine 3 2019-07-11T14:40:14.1270656-04:00
2 | Event 3287346976 StateMachine 1 2019-07-11T14:40:15.9789056-04:00
3 | Event 3287860271 StateMachine 2 2019-07-11T14:40:16.4929920-04:00
4 | Event 3287860471 StateMachine 3 2019-07-11T14:40:16.4931584-04:00
5 | Event 328965pshhhh
6 | Event 3290201050 StateMachine 2 2019-07-11T14:40:18.8332672-04:00
7 | Event 3290201251 StateMachine 3 2019-07-11T14:40:18.8334336-04:00
8 |
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/wheel/lt5/_iblrig_encoderPositions.raw.00.ssv:
--------------------------------------------------------------------------------
1 | Position 1708862576 -80 2019-02-13T14:24:59.3946368+00:00
2 | Position 4192349 -53 2019-02-13T14:25:03.5792896+00:00
3 | Position 4196191 -52 2019-02-13T14:25:03.5830528+00:00
4 | Position 4199991 -51 2019-02-13T14:25:03.5868416+00:00
5 | Position 4203771 -50 2019-02-13T14:25:03.5906560+00:00
6 | Position 4207553 -49 2019-02-13T14:25:03.5944704+00:00
7 | Position 4211351 -48 2019-02-13T14:25:03.5982336+00:00
8 | Position 4215156 -47 2019-02-13T14:25:03.6019712+00:00
9 | Position 4218975 -46 2019-02-13T14:25:03.6057600+00:00
10 | Position 4224751 0 2019-02-13T14:25:03.6341248+00:00
11 | Position 4224747 -45 2019-02-13T14:25:03.6100352+00:00
12 | Position 4224747 -45 2019-02-13T14:25:03.6100352+00:00 asdf sdf
13 | Position 4226719 1 2019-02-13T14:25:03.6341888+00:00
14 | Position 4230621 2 2019-02-13T14:25:03.6477696+00:00
15 | Position 4234532 3 2019-02-13T14:25:03.6477824+00:00
16 | Position 4234651 0 2019-02-13T14:25:03.6477952+00:00
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/wheel/lt5/_iblrig_encoderPositions.raw.01.ssv:
--------------------------------------------------------------------------------
1 | Position 4294361553 2 2019-02-07T16:49:20.5192064+00:00
2 | Position 4294410128 3 2019-02-07T16:49:20.5677696+00:00
3 | Position 4294425725 4 2019-02-07T16:49:20.5833472+00:00
4 | Position 4294446023 5 2019-02-07T16:49:20.6036736+00:00
5 | Position 4294510642 4 2019-02-07T16:49:20.6683136+00:00
6 | Position 4294832523 3 2019-02-07T16:49:20.9903232+00:00
7 | Position 849736 0 2019-02-07T16:49:21.9795456+00:00
8 | Position 1532230 0 2019-02-07T16:49:22.6626944+00:00
9 | Position 1822449 1 2019-02-07T16:49:22.9478272+00:00
10 | Position 1833514 2 2019-02-07T16:49:22.9589248+00:00
11 | Position 1841566 3 2019-02-07T16:49:22.9669504+00:00
12 | Position 1848206 4 2019-02-07T16:49:22.9735936+00:00
13 | Position 1853979 5 2019-02-07T16:49:22.9793664+00:00
14 | Position 1859144 6 2019-02-07T16:49:22.9845504+00:00
15 | Position
--------------------------------------------------------------------------------
/ibllib/tests/extractors/data/wheel/lt5/_iblrig_encoderPositions.raw.2firstsamples.ssv:
--------------------------------------------------------------------------------
1 | Position 1708861234 -78 2019-02-13T14:24:59.3945123+00:00
2 | Position 1708862576 -80 2019-02-13T14:24:59.3946368+00:00
3 | Position 4192349 -53 2019-02-13T14:25:03.5792896+00:00
4 | Position 4196191 -52 2019-02-13T14:25:03.5830528+00:00
5 | Position 4199991 -51 2019-02-13T14:25:03.5868416+00:00
6 | Position 4203771 -50 2019-02-13T14:25:03.5906560+00:00
7 | Position 4207553 -49 2019-02-13T14:25:03.5944704+00:00
8 | Position 4211351 -48 2019-02-13T14:25:03.5982336+00:00
9 | Position 4215156 -47 2019-02-13T14:25:03.6019712+00:00
10 | Position 4218975 -46 2019-02-13T14:25:03.6057600+00:00
11 | Position 4224751 0 2019-02-13T14:25:03.6341248+00:00
12 | Position 4224747 -45 2019-02-13T14:25:03.6100352+00:00
13 | Position 4226719 1 2019-02-13T14:25:03.6341888+00:00
14 | Position 4230621 2 2019-02-13T14:25:03.6477696+00:00
15 | Position 4234532 3 2019-02-13T14:25:03.6477824+00:00
16 | Position 4234651 0 2019-02-13T14:25:03.6477952+00:00
17 |
--------------------------------------------------------------------------------
/ibllib/tests/extractors/test_ephys_passive.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | # @Author: Niccolò Bonacchi
4 | # @Date: Friday, October 30th 2020, 10:42:49 am
5 | import unittest
6 |
7 | import ibllib.io.extractors.ephys_passive as passive
8 | import numpy as np
9 |
10 |
11 | class TestsPassiveExtractor(unittest.TestCase):
12 | def setUp(self):
13 | pass
14 |
15 | def test_load_passive_stim_meta(self):
16 | meta = passive._load_passive_stim_meta()
17 | self.assertTrue(isinstance(meta, dict))
18 |
19 | def test_interpolate_rf_mapping_stimulus(self):
20 | idxs_up = np.array([0, 4, 8])
21 | idxs_dn = np.array([1, 5, 9])
22 | times = np.array([0, 1, 4, 5, 8, 9])
23 | Xq = np.arange(15)
24 | t_bin = 1 # Use 1 so can compare directly Xq and Tq
25 | Tq = passive._interpolate_rf_mapping_stimulus(
26 | idxs_up=idxs_up, idxs_dn=idxs_dn, times=times, Xq=Xq, t_bin=t_bin
27 | )
28 | self.assertTrue(np.array_equal(Tq, Xq))
29 |
30 | def tearDown(self):
31 | pass
32 |
33 |
34 | if __name__ == "__main__":
35 | unittest.main(exit=False, verbosity=2)
36 |
--------------------------------------------------------------------------------
/ibllib/tests/fixtures/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/tests/fixtures/__init__.py
--------------------------------------------------------------------------------
/ibllib/tests/fixtures/ephysalignment/alignment_data.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/tests/fixtures/ephysalignment/alignment_data.npz
--------------------------------------------------------------------------------
/ibllib/tests/fixtures/histology/tracks/2019-12-04_KS014_001_probe00_pts.csv:
--------------------------------------------------------------------------------
1 | 200,279,165
2 | 201,279,164
3 | 201,279,163
4 | 200,279,161
5 | 200,279,159
6 | 200,279,158
7 | 200,280,157
8 | 200,280,156
9 | 200,280,154
10 | 200,280,153
11 | 200,280,151
12 | 200,280,150
13 | 200,280,149
14 | 200,280,148
15 | 200,281,147
16 | 200,281,146
17 | 200,281,145
18 | 200,281,143
19 | 200,281,143
20 | 200,281,141
21 | 200,281,140
22 | 200,281,138
23 | 199,281,137
24 | 200,282,135
25 | 200,282,134
26 | 200,282,132
27 | 200,282,130
28 | 200,282,129
29 | 200,283,127
30 | 200,283,126
31 | 200,283,125
32 | 199,283,123
33 | 199,283,122
34 | 200,284,121
35 | 199,284,119
36 | 199,284,118
37 | 199,284,116
38 | 199,284,114
39 | 198,284,113
40 | 199,285,111
41 | 198,285,110
42 | 198,285,108
43 | 199,285,107
44 | 198,286,106
45 | 198,286,104
46 | 198,286,103
47 | 198,287,101
48 | 198,287,100
49 | 198,287,98
50 | 198,288,96
51 | 198,287,94
52 | 197,288,93
53 | 197,288,91
54 | 197,288,89
55 | 196,288,87
56 | 196,289,84
57 | 196,289,83
58 | 196,289,81
59 | 195,289,78
60 | 195,289,76
61 | 196,289,74
62 | 195,290,74
63 | 196,290,72
64 | 195,291,70
65 | 195,292,68
66 | 195,292,66
67 | 195,292,64
68 | 195,292,62
69 | 194,293,60
70 | 194,293,58
71 | 194,292,54
72 | 193,293,49
73 | 191,294,44
74 | 191,295,40
75 | 189,295,34
76 | 188,295,29
77 | 187,295,24
78 | 186,295,20
79 | 185,296,15
80 | 184,296,12
81 | 184,296,9
82 | 183,296,5
83 |
--------------------------------------------------------------------------------
/ibllib/tests/fixtures/io/_ibl_experiment.description.yaml:
--------------------------------------------------------------------------------
1 | devices:
2 | widefield:
3 | widefield:
4 | collection: raw_widefield_data
5 | sync_label: frame_trigger
6 | microphone:
7 | microphone:
8 | collection: raw_behavior_data
9 | sync_label: null
10 | neuropixel:
11 | probe00:
12 | collection: raw_ephys_data/probe00
13 | sync_label: imec_sync
14 | probe01:
15 | collection: raw_ephys_data/probe01
16 | sync_label: imec_sync
17 | procedures:
18 | - Imaging
19 | - Behavior training/tasks
20 | projects:
21 | - ibl_neuropixel_brainwide_01
22 | - ibl_cortexlab
23 | sync:
24 | nidq:
25 | collection: raw_ephys_data
26 | extension: bin
27 | acquisition_software: spikeglx
28 | tasks:
29 | - passiveChoiceWorld:
30 | collection: raw_passive_data
31 | sync_label: bpod
32 | - ephysChoiceWorld:
33 | collection: raw_behavior_data
34 | sync_label: bpod
35 | version: 1.0.0
36 |
--------------------------------------------------------------------------------
/ibllib/tests/fixtures/io/data_loaders/_iblrig_test_mouse_2020-01-01_001/raw_video_data/_iblrig_leftCamera.frameData.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/tests/fixtures/io/data_loaders/_iblrig_test_mouse_2020-01-01_001/raw_video_data/_iblrig_leftCamera.frameData.bin
--------------------------------------------------------------------------------
/ibllib/tests/fixtures/pipes/sample3B_g0_t0.nidq.meta:
--------------------------------------------------------------------------------
1 | acqMnMaXaDw=0,0,1,1
2 | appVersion=20190327
3 | fileCreateTime=2019-08-15T17:37:20
4 | fileName=D:/Testing Data/test4olivier_g0/test4olivier_g0_t0.nidq.bin
5 | fileSHA1=62B0989A934AE4A9C8FA9254CE828BEAFCE31364
6 | fileSizeBytes=98945268
7 | fileTimeSecs=824.4614456108245
8 | firstSample=1738164
9 | gateMode=Immediate
10 | nSavedChans=2
11 | niAiRangeMax=5
12 | niAiRangeMin=-5
13 | niAiTermination=Default
14 | niClockLine1=Internal
15 | niClockSource=PXI1Slot2_1ch_Int : 30003.000300
16 | niDev1=PXI1Slot2
17 | niDev1ProductName=PXIe-6341
18 | niMAChans1=
19 | niMAGain=1
20 | niMNChans1=
21 | niMNGain=200
22 | niMuxFactor=1
23 | niSampRate=30003.0003
24 | niStartEnable=false
25 | niStartLine=PXI1Slot2/port0/line0
26 | niXAChans1=0
27 | niXDBytes1=1
28 | niXDChans1=0:7
29 | snsMnMaXaDw=0,0,1,1
30 | snsSaveChanSubset=all
31 | syncNiChan=3
32 | syncNiChanType=0
33 | syncNiThresh=1.1
34 | syncSourceIdx=3
35 | syncSourcePeriod=1
36 | trigMode=Immediate
37 | typeImEnabled=2
38 | typeNiEnabled=1
39 | typeThis=nidq
40 | userNotes=
41 | ~snsChanMap=(0,0,1,1,1)(XA0;0:0)(XD0;1:1)
42 | ~snsShankMap=(1,2,0)
43 |
--------------------------------------------------------------------------------
/ibllib/tests/fixtures/qc/camera_times.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/tests/fixtures/qc/camera_times.npy
--------------------------------------------------------------------------------
/ibllib/tests/fixtures/qc/data_alignmentqc_existing.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/tests/fixtures/qc/data_alignmentqc_existing.npz
--------------------------------------------------------------------------------
/ibllib/tests/fixtures/qc/data_alignmentqc_manual.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/tests/fixtures/qc/data_alignmentqc_manual.npz
--------------------------------------------------------------------------------
/ibllib/tests/fixtures/qc/pupil_diameter.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/tests/fixtures/qc/pupil_diameter.npy
--------------------------------------------------------------------------------
/ibllib/tests/fixtures/qc/stimOn_times.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/tests/fixtures/qc/stimOn_times.npy
--------------------------------------------------------------------------------
/ibllib/tests/fixtures/qc/wheel.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/tests/fixtures/qc/wheel.npy
--------------------------------------------------------------------------------
/ibllib/tests/fixtures/sync_ephys_fpga/sample3B_g0_t0.nidq.meta:
--------------------------------------------------------------------------------
1 | acqMnMaXaDw=0,0,1,1
2 | appVersion=20190327
3 | fileCreateTime=2019-08-15T17:37:20
4 | fileName=D:/Testing Data/test4olivier_g0/test4olivier_g0_t0.nidq.bin
5 | fileSHA1=62B0989A934AE4A9C8FA9254CE828BEAFCE31364
6 | fileSizeBytes=98945268
7 | fileTimeSecs=824.4614456108245
8 | firstSample=1738164
9 | gateMode=Immediate
10 | nSavedChans=2
11 | niAiRangeMax=5
12 | niAiRangeMin=-5
13 | niAiTermination=Default
14 | niClockLine1=Internal
15 | niClockSource=PXI1Slot2_1ch_Int : 30003.000300
16 | niDev1=PXI1Slot2
17 | niDev1ProductName=PXIe-6341
18 | niMAChans1=
19 | niMAGain=1
20 | niMNChans1=
21 | niMNGain=200
22 | niMuxFactor=1
23 | niSampRate=30003.0003
24 | niStartEnable=false
25 | niStartLine=PXI1Slot2/port0/line0
26 | niXAChans1=0
27 | niXDBytes1=1
28 | niXDChans1=0:7
29 | snsMnMaXaDw=0,0,1,1
30 | snsSaveChanSubset=all
31 | syncNiChan=3
32 | syncNiChanType=0
33 | syncNiThresh=1.1
34 | syncSourceIdx=3
35 | syncSourcePeriod=1
36 | trigMode=Immediate
37 | typeImEnabled=2
38 | typeNiEnabled=1
39 | typeThis=nidq
40 | userNotes=
41 | ~snsChanMap=(0,0,1,1,1)(XA0;0:0)(XD0;1:1)
42 | ~snsShankMap=(1,2,0)
43 |
--------------------------------------------------------------------------------
/ibllib/tests/qc/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/int-brain-lab/ibllib/88d39b1cae30428ab5e8c995a2ac5aa57358eb2e/ibllib/tests/qc/__init__.py
--------------------------------------------------------------------------------
/ibllib/tests/test_time.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import ibllib.time
3 | import datetime
4 | import pandas as pd
5 |
6 |
7 | class TestUtils(unittest.TestCase):
8 |
9 | def test_isostr2date(self):
10 | # test the full string
11 | a = ibllib.time.isostr2date('2018-03-01T12:34:56.99999')
12 | self.assertTrue(a == datetime.datetime(2018, 3, 1, 12, 34, 56, 999990))
13 | # test UTC offset
14 | # a = ibllib.time.isostr2date('2018-03-01T12:34:56+02:00') # FAILS!
15 | # if ms is rounded, test without the F field
16 | b = ibllib.time.isostr2date('2018-03-01T12:34:56')
17 | self.assertTrue(b == datetime.datetime(2018, 3, 1, 12, 34, 56))
18 | # test a mixed list input
19 | c = ['2018-03-01T12:34:56.99999', '2018-03-01T12:34:56']
20 | d = ibllib.time.isostr2date(c)
21 | self.assertTrue((d[0] == a) and (d[1] == b))
22 | # test with pandas series
23 | e = ibllib.time.isostr2date(pd.Series(c))
24 | self.assertTrue((e[0] == a) and (e[1] == b))
25 |
26 | def test_date2isostr(self):
27 | expected = '2018-08-14T00:00:00'
28 | day = datetime.date(2018, 8, 14)
29 | self.assertEqual(expected, ibllib.time.date2isostr(day))
30 | dt = datetime.datetime(2018, 8, 14)
31 | self.assertEqual(expected, ibllib.time.date2isostr(dt))
32 |
33 |
34 | if __name__ == "__main__":
35 | unittest.main(exit=False, verbosity=2)
36 |
--------------------------------------------------------------------------------
/ibllib/time.py:
--------------------------------------------------------------------------------
1 | # library of small functions
2 | import datetime
3 | import numpy as np
4 |
5 |
6 | def isostr2date(isostr):
7 | """
8 | Convert strings representing dates into datetime.datetime objects aimed ad Django REST API
9 | ISO 8601: '2018-05-22T14:35:22.99585' or '2018-05-22T14:35:22'
10 |
11 | :param isostr: a string, list of strings or panda Series / numpy arrays containing strings
12 | :return: a scalar, list of
13 | """
14 | # NB this is intended for scalars or small list. See the ciso8601 pypi module instead for
15 | # a performance implementation
16 | if not isinstance(isostr, str):
17 | return [isostr2date(el) for el in isostr]
18 |
19 | format = '%Y-%m-%dT%H:%M:%S'
20 | if '.' in isostr:
21 | format += '.%f'
22 | if '+' in isostr:
23 | format += '.%f'
24 | return datetime.datetime.strptime(isostr, format)
25 |
26 |
27 | def date2isostr(adate):
28 | # NB this is intended for scalars or small list. See the ciso8601 pypi module instead for
29 | # a performance implementation
30 | if type(adate) is datetime.date:
31 | adate = datetime.datetime.fromordinal(adate.toordinal())
32 | return datetime.datetime.isoformat(adate)
33 |
34 |
35 | def convert_pgts(time):
36 | """Convert PointGray cameras timestamps to seconds.
37 | Use convert then uncycle"""
38 | # offset = time & 0xFFF
39 | cycle1 = (time >> 12) & 0x1FFF
40 | cycle2 = (time >> 25) & 0x7F
41 | seconds = cycle2 + cycle1 / 8000.
42 | return seconds
43 |
44 |
45 | def uncycle_pgts(time):
46 | """Unwrap the converted seconds of a PointGray camera timestamp series."""
47 | cycles = np.insert(np.diff(time) < 0, 0, False)
48 | cycleindex = np.cumsum(cycles)
49 | return time + cycleindex * 128
50 |
--------------------------------------------------------------------------------
/readthedocs.yml:
--------------------------------------------------------------------------------
1 | # .readthedocs.yml
2 |
3 | build:
4 | image: latest
5 |
6 | python:
7 | version: 3.6
8 |
--------------------------------------------------------------------------------
/requirements-analysis.txt:
--------------------------------------------------------------------------------
1 | click>=7.0.0
2 | colorlog>=4.0.2
3 | flake8>=3.7.8
4 | globus-sdk>=1.7.1
5 | jupyter>=1.0
6 | jupyterlab>=1.0
7 | matplotlib>=3.0.3
8 | mtscomp>=1.0.1
9 | numpy>=1.16.4
10 | opencv-python
11 | pandas>=0.24.2
12 | phylib>=2.2
13 | pynrrd>=0.4.0
14 | requests>=2.22.0
15 | scikit-learn>=0.22.1
16 | scipy>=1.3.0
17 | seaborn>=0.9.0
18 | torch>=1.6.0
19 | tqdm>=4.32.1
20 | pyqt5
21 | pyqtgraph
22 | ipython
23 | datajoint
24 | psychofit
25 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | boto3
2 | click>=7.0.0
3 | colorlog>=4.0.2
4 | flake8>=3.7.8
5 | globus-sdk
6 | graphviz
7 | matplotlib>=3.0.3
8 | numba>=0.56
9 | numpy>=1.18
10 | nptdms
11 | opencv-python-headless
12 | pandas
13 | pyarrow
14 | pynrrd>=0.4.0
15 | pytest
16 | requests>=2.22.0
17 | scikit-learn>=0.22.1
18 | scipy>=1.7.0
19 | scikit-image # this is a widefield requirement missing as of July 2023, we may remove it once wfield has this figured out
20 | imagecodecs # used to convert tif snapshots to png when registering mesoscope snapshots (also requires skimage)
21 | sparse
22 | seaborn>=0.9.0
23 | tqdm>=4.32.1
24 | # ibl libraries
25 | iblatlas>=0.5.3
26 | ibl-neuropixel>=1.6.2
27 | iblutil>=1.13.0
28 | iblqt>=0.4.2
29 | mtscomp>=1.0.1
30 | ONE-api>=3.0.0
31 | phylib>=2.6.0
32 | psychofit
33 | slidingRP>=1.1.1 # steinmetz lab refractory period metrics
34 | pyqt5
35 | ibl-style
36 |
--------------------------------------------------------------------------------
/ruff.toml:
--------------------------------------------------------------------------------
1 | line-length = 130
2 |
3 | [format]
4 | quote-style = "single"
5 |
--------------------------------------------------------------------------------
/run_tests:
--------------------------------------------------------------------------------
1 | python -m unittest discover
2 |
--------------------------------------------------------------------------------
/run_tests.bat:
--------------------------------------------------------------------------------
1 | python -m unittest discover
2 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from pathlib import Path
3 |
4 | from setuptools import find_packages, setup
5 |
6 | CURRENT_DIRECTORY = Path(__file__).parent.absolute()
7 |
8 | CURRENT_PYTHON = sys.version_info[:2]
9 | REQUIRED_PYTHON = (3, 10)
10 | VER_ERR_MSG = """
11 | ==========================
12 | Unsupported Python version
13 | ==========================
14 | This version of ibllib requires Python {}.{}, but you're trying to
15 | install it on Python {}.{}.
16 | """
17 | if CURRENT_PYTHON < REQUIRED_PYTHON:
18 | sys.stderr.write(VER_ERR_MSG.format(*REQUIRED_PYTHON + CURRENT_PYTHON))
19 | sys.exit(1)
20 |
21 | with open("README.md", "r") as f:
22 | long_description = f.read()
23 |
24 | with open("requirements.txt") as f:
25 | require = [x.strip() for x in f.readlines() if not x.startswith("git+")]
26 |
27 |
28 | def read(rel_path):
29 | here = Path(__file__).parent.absolute()
30 | with open(here.joinpath(rel_path), "r") as fp:
31 | return fp.read()
32 |
33 |
34 | def get_version(rel_path):
35 | for line in read(rel_path).splitlines():
36 | if line.startswith("__version__"):
37 | delim = '"' if '"' in line else "'"
38 | return line.split(delim)[1]
39 | else:
40 | raise RuntimeError("Unable to find version string.")
41 |
42 |
43 | setup(
44 | name="ibllib",
45 | version=get_version(Path("ibllib").joinpath("__init__.py")),
46 | python_requires=">={}.{}".format(*REQUIRED_PYTHON),
47 | description="IBL libraries",
48 | license="MIT",
49 | long_description=long_description,
50 | long_description_content_type="text/markdown",
51 | author="IBL Staff",
52 | url="https://www.internationalbrainlab.com/",
53 | packages=find_packages(exclude=["scratch"]), # same as name
54 | include_package_data=True,
55 | # external packages as dependencies
56 | install_requires=require,
57 | entry_points={
58 | 'console_scripts': [
59 | 'task_qc = ibllib.qc.task_qc_viewer.task_qc:qc_gui_cli',
60 | ],
61 | },
62 | extras_require={
63 | 'wfield': ['wfield==0.3.7', 'labcams'],
64 | },
65 | scripts=[],
66 | )
67 |
--------------------------------------------------------------------------------