├── .all-contributorsrc ├── .binder └── Dockerfile ├── .codecov.yml ├── .coveragerc ├── .gitattributes ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.yml │ ├── config.yml │ ├── doc_improvement.yml │ ├── feature_request.yml │ └── other_issue.yml ├── PULL_REQUEST_TEMPLATE.md ├── actions │ ├── cpu_all_extras │ │ └── action.yml │ └── numba_cache │ │ └── action.yml ├── dependabot.yml ├── utilities │ ├── ai_spam.py │ ├── changelog_generator.py │ ├── codespell_ignore_words.txt │ ├── generate_developer_tables.py │ ├── issue_assign.py │ ├── pr_labeler.py │ ├── pr_open_commenter.py │ ├── pr_welcome_edited.py │ ├── remove_good_first_issue.py │ └── run_examples.sh └── workflows │ ├── ai_spam.yml │ ├── fast_release.yml │ ├── issue_assigned.yml │ ├── issue_comment_edited.yml │ ├── issue_comment_posted.yml │ ├── periodic_tests.yml │ ├── pr_core_dep_import.yml │ ├── pr_examples.yml │ ├── pr_opened.yml │ ├── pr_precommit.yml │ ├── pr_pytest.yml │ ├── pr_typecheck.yml │ ├── release.yml │ ├── update_contributors.yml │ └── weekly_github_maintenance.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yml ├── CHANGELOG.md ├── CODEOWNERS ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── CONTRIBUTORS.md ├── GOVERNANCE.md ├── LICENSE ├── MANIFEST.in ├── README.md ├── aeon ├── __init__.py ├── anomaly_detection │ ├── __init__.py │ ├── base.py │ ├── collection │ │ ├── __init__.py │ │ ├── _classification.py │ │ ├── _outlier_detection.py │ │ └── base.py │ └── series │ │ ├── __init__.py │ │ ├── _pyodadapter.py │ │ ├── base.py │ │ ├── distance_based │ │ ├── __init__.py │ │ ├── _cblof.py │ │ ├── _kmeans.py │ │ ├── _left_stampi.py │ │ ├── _lof.py │ │ ├── _merlin.py │ │ ├── _rockad.py │ │ ├── _stomp.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_cblof.py │ │ │ ├── test_kmeans.py │ │ │ ├── test_left_stampi.py │ │ │ ├── test_lof.py │ │ │ ├── test_merlin.py │ │ │ ├── test_rockad.py │ │ │ └── test_stomp.py │ │ ├── distribution_based │ │ ├── __init__.py │ │ ├── _copod.py │ │ ├── _dwt_mlead.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_copod.py │ │ │ └── test_dwt_mlead.py │ │ ├── outlier_detection │ │ ├── __init__.py │ │ ├── _iforest.py │ │ ├── _one_class_svm.py │ │ ├── _stray.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_iforest.py │ │ │ ├── test_one_class_svm.py │ │ │ └── test_stray.py │ │ └── tests │ │ ├── __init__.py │ │ ├── test_base.py │ │ └── test_pyod_adapter.py ├── base │ ├── __init__.py │ ├── _base.py │ ├── _base_collection.py │ ├── _base_series.py │ ├── _compose.py │ ├── _estimators │ │ ├── __init__.py │ │ ├── compose │ │ │ ├── __init__.py │ │ │ ├── _commons.py │ │ │ ├── collection_channel_ensemble.py │ │ │ ├── collection_ensemble.py │ │ │ ├── collection_pipeline.py │ │ │ └── series_pipeline.py │ │ ├── hybrid │ │ │ ├── __init__.py │ │ │ ├── base_rist.py │ │ │ └── tests │ │ │ │ ├── __init__.py │ │ │ │ └── test_base_rist.py │ │ └── interval_based │ │ │ ├── __init__.py │ │ │ ├── base_interval_forest.py │ │ │ └── tests │ │ │ ├── __init__.py │ │ │ └── test_base_interval_forest.py │ └── tests │ │ ├── __init__.py │ │ ├── test_base.py │ │ ├── test_base_collection.py │ │ ├── test_base_series.py │ │ └── test_compose.py ├── benchmarking │ ├── __init__.py │ ├── metrics │ │ ├── __init__.py │ │ ├── anomaly_detection │ │ │ ├── __init__.py │ │ │ ├── _continuous.py │ │ │ ├── _range_metrics.py │ │ │ ├── _range_ts_metrics.py │ │ │ ├── _util.py │ │ │ ├── _vus_metrics.py │ │ │ ├── tests │ │ │ │ ├── __init__.py │ │ │ │ ├── test_ad_metrics.py │ │ │ │ ├── test_range_ts_metrics.py │ │ │ │ └── test_thresholding.py │ │ │ └── thresholding.py │ │ ├── clustering.py │ │ ├── segmentation.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_clustering.py │ │ │ └── test_segmentation.py │ ├── published_results.py │ ├── resampling.py │ ├── results_loaders.py │ ├── stats.py │ └── tests │ │ ├── __init__.py │ │ ├── test_published_results.py │ │ ├── test_resampling.py │ │ ├── test_results_loaders.py │ │ └── test_stats.py ├── classification │ ├── __init__.py │ ├── base.py │ ├── compose │ │ ├── __init__.py │ │ ├── _channel_ensemble.py │ │ ├── _ensemble.py │ │ ├── _pipeline.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_ensemble.py │ │ │ └── test_pipeline.py │ ├── convolution_based │ │ ├── __init__.py │ │ ├── _arsenal.py │ │ ├── _hydra.py │ │ ├── _minirocket.py │ │ ├── _mr_hydra.py │ │ ├── _multirocket.py │ │ ├── _rocket.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ └── test_arsenal.py │ ├── deep_learning │ │ ├── __init__.py │ │ ├── _cnn.py │ │ ├── _disjoint_cnn.py │ │ ├── _encoder.py │ │ ├── _fcn.py │ │ ├── _inception_time.py │ │ ├── _lite_time.py │ │ ├── _mlp.py │ │ ├── _resnet.py │ │ ├── base.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_deep_classifier_base.py │ │ │ ├── test_inception_time.py │ │ │ └── test_lite_time.py │ ├── dictionary_based │ │ ├── __init__.py │ │ ├── _boss.py │ │ ├── _cboss.py │ │ ├── _mrseql.py │ │ ├── _mrsqm.py │ │ ├── _muse.py │ │ ├── _redcomets.py │ │ ├── _tde.py │ │ ├── _weasel.py │ │ ├── _weasel_v2.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_boss.py │ │ │ ├── test_muse.py │ │ │ ├── test_redcomets.py │ │ │ ├── test_tde.py │ │ │ └── test_weasel.py │ ├── distance_based │ │ ├── __init__.py │ │ ├── _elastic_ensemble.py │ │ ├── _proximity_forest.py │ │ ├── _proximity_tree.py │ │ ├── _time_series_neighbors.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_elastic_ensemble.py │ │ │ ├── test_proximity_tree.py │ │ │ └── test_time_series_neighbors.py │ ├── dummy.py │ ├── early_classification │ │ ├── __init__.py │ │ ├── _probability_threshold.py │ │ ├── _teaser.py │ │ ├── base.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_probability_threshold.py │ │ │ └── test_teaser.py │ ├── feature_based │ │ ├── __init__.py │ │ ├── _catch22.py │ │ ├── _fresh_prince.py │ │ ├── _signature_classifier.py │ │ ├── _summary.py │ │ ├── _tdmvdc.py │ │ ├── _tsfresh.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_catch22.py │ │ │ ├── test_signature.py │ │ │ ├── test_summary.py │ │ │ ├── test_tdmvdc.py │ │ │ └── test_tsfresh.py │ ├── hybrid │ │ ├── __init__.py │ │ ├── _hivecote_v1.py │ │ ├── _hivecote_v2.py │ │ ├── _rist.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ └── test_hc.py │ ├── interval_based │ │ ├── __init__.py │ │ ├── _cif.py │ │ ├── _drcif.py │ │ ├── _interval_forest.py │ │ ├── _interval_pipelines.py │ │ ├── _quant.py │ │ ├── _rise.py │ │ ├── _rstsf.py │ │ ├── _stsf.py │ │ ├── _tsf.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_interval_forests.py │ │ │ ├── test_interval_pipelines.py │ │ │ └── test_quant.py │ ├── ordinal_classification │ │ ├── __init__.py │ │ ├── _ordinal_tde.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ └── test_ordinal_tde.py │ ├── shapelet_based │ │ ├── __init__.py │ │ ├── _ls.py │ │ ├── _rdst.py │ │ ├── _rsast.py │ │ ├── _sast.py │ │ ├── _stc.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_ls.py │ │ │ ├── test_rdst.py │ │ │ ├── test_sast.py │ │ │ └── test_stc.py │ ├── sklearn │ │ ├── __init__.py │ │ ├── _continuous_interval_tree.py │ │ ├── _rotation_forest_classifier.py │ │ ├── _wrapper.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_all_classifiers.py │ │ │ ├── test_continuous_interval_tree.py │ │ │ └── test_rotation_forest_classifier.py │ └── tests │ │ ├── __init__.py │ │ ├── test_base.py │ │ ├── test_dummy.py │ │ └── test_sklearn_compatability.py ├── clustering │ ├── __init__.py │ ├── _clara.py │ ├── _clarans.py │ ├── _elastic_som.py │ ├── _k_means.py │ ├── _k_medoids.py │ ├── _k_sc.py │ ├── _k_shape.py │ ├── _kasba.py │ ├── _kernel_k_means.py │ ├── averaging │ │ ├── __init__.py │ │ ├── _averaging.py │ │ ├── _ba_petitjean.py │ │ ├── _ba_subgradient.py │ │ ├── _ba_utils.py │ │ ├── _barycenter_averaging.py │ │ ├── _kasba_average.py │ │ ├── _shift_scale_invariant_averaging.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_dba.py │ │ │ └── test_shift_scale_invariant.py │ ├── base.py │ ├── compose │ │ ├── __init__.py │ │ ├── _pipeline.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ └── test_pipeline.py │ ├── deep_learning │ │ ├── __init__.py │ │ ├── _ae_abgru.py │ │ ├── _ae_bgru.py │ │ ├── _ae_dcnn.py │ │ ├── _ae_drnn.py │ │ ├── _ae_fcn.py │ │ ├── _ae_resnet.py │ │ ├── base.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_deep_clusterer_base.py │ │ │ └── test_deep_clusterer_features.py │ ├── dummy.py │ ├── feature_based │ │ ├── __init__.py │ │ ├── _catch22.py │ │ ├── _summary.py │ │ ├── _tsfresh.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_catch22.py │ │ │ ├── test_summary.py │ │ │ └── test_tsfresh.py │ └── tests │ │ ├── __init__.py │ │ ├── test_base.py │ │ ├── test_clara.py │ │ ├── test_clarans.py │ │ ├── test_dummy.py │ │ ├── test_elastic_som.py │ │ ├── test_k_means.py │ │ ├── test_k_medoids.py │ │ ├── test_k_sc.py │ │ ├── test_k_shape.py │ │ ├── test_kasba.py │ │ └── test_kernel_k_means.py ├── datasets │ ├── __init__.py │ ├── _data_loaders.py │ ├── _data_writers.py │ ├── _single_problem_loaders.py │ ├── _tsad_data_loaders.py │ ├── _tss_data_loaders.py │ ├── data │ │ ├── ACSF1 │ │ │ ├── ACSF1_TEST.ts │ │ │ └── ACSF1_TRAIN.ts │ │ ├── Airline │ │ │ └── Airline.csv │ │ ├── ArrowHead │ │ │ ├── ArrowHead_TEST.arff │ │ │ ├── ArrowHead_TEST.ts │ │ │ ├── ArrowHead_TRAIN.arff │ │ │ ├── ArrowHead_TRAIN.ts │ │ │ └── ArrowHead_TRAIN.tsv │ │ ├── BasicMotions │ │ │ ├── BasicMotions_TEST.arff │ │ │ ├── BasicMotions_TEST.ts │ │ │ ├── BasicMotions_TRAIN.arff │ │ │ └── BasicMotions_TRAIN.ts │ │ ├── CardanoSentiment │ │ │ ├── CardanoSentiment_TEST.ts │ │ │ └── CardanoSentiment_TRAIN.ts │ │ ├── Covid3Month │ │ │ ├── Covid3Month_TEST.ts │ │ │ ├── Covid3Month_TRAIN.ts │ │ │ ├── Covid3Month_disc_TEST.ts │ │ │ └── Covid3Month_disc_TRAIN.ts │ │ ├── Daphnet_S06R02E0 │ │ │ └── S06R02E0.csv │ │ ├── GunPoint │ │ │ ├── GunPoint_TEST.ts │ │ │ ├── GunPoint_TRAIN.arff │ │ │ ├── GunPoint_TRAIN.ts │ │ │ └── GunPoint_TRAIN.tsv │ │ ├── ItalyPowerDemand │ │ │ ├── ItalyPowerDemand_TEST.arff │ │ │ ├── ItalyPowerDemand_TEST.ts │ │ │ ├── ItalyPowerDemand_TRAIN.arff │ │ │ └── ItalyPowerDemand_TRAIN.ts │ │ ├── JapaneseVowels │ │ │ ├── JapaneseVowels_TEST.ts │ │ │ ├── JapaneseVowels_TRAIN.ts │ │ │ ├── JapaneseVowels_eq_TEST.ts │ │ │ └── JapaneseVowels_eq_TRAIN.ts │ │ ├── KDD-TSAD_135 │ │ │ ├── 135_UCR_Anomaly_InternalBleeding16_TEST.csv │ │ │ └── 135_UCR_Anomaly_InternalBleeding16_TRAIN.csv │ │ ├── Longley │ │ │ └── Longley.csv │ │ ├── Lynx │ │ │ └── Lynx.csv │ │ ├── OSULeaf │ │ │ ├── OSULeaf_TEST.ts │ │ │ └── OSULeaf_TRAIN.ts │ │ ├── PBS_dataset │ │ │ └── PBS_dataset.csv │ │ ├── PLAID │ │ │ ├── PLAID_TEST.arff │ │ │ ├── PLAID_TEST.ts │ │ │ ├── PLAID_TRAIN.arff │ │ │ └── PLAID_TRAIN.ts │ │ ├── PickupGestureWiimoteZ │ │ │ ├── PickupGestureWiimoteZ_TEST.ts │ │ │ ├── PickupGestureWiimoteZ_TRAIN.ts │ │ │ ├── PickupGestureWiimoteZ_eq_TEST.ts │ │ │ └── PickupGestureWiimoteZ_eq_TRAIN.ts │ │ ├── ShampooSales │ │ │ └── ShampooSales.csv │ │ ├── UnitTest │ │ │ ├── UnitTestTimeStamps_TRAIN.ts │ │ │ ├── UnitTest_TEST.ts │ │ │ ├── UnitTest_TRAIN.ts │ │ │ ├── UnitTest_Tsf_Loader.tsf │ │ │ ├── UnitTest_Tsf_Loader_hierarchical.tsf │ │ │ ├── UnitTest_Tsf_Loader_no_start_timestamp.tsf │ │ │ ├── ecg-diff-count-3_TEST.csv │ │ │ ├── ecg-diff-count-3_TRAIN_A.csv │ │ │ └── ecg-diff-count-3_TRAIN_NA.csv │ │ ├── Uschange │ │ │ └── Uschange.csv │ │ ├── m1_yearly_dataset │ │ │ └── m1_yearly_dataset.tsf │ │ ├── segmentation │ │ │ ├── ElectricDevices.csv │ │ │ └── GunPoint.csv │ │ └── solar │ │ │ └── solar.csv │ ├── dataset_collections.py │ ├── tests │ │ ├── __init__.py │ │ ├── test_data_loaders.py │ │ ├── test_data_writers.py │ │ ├── test_dataset_collections.py │ │ ├── test_load_forecasting.py │ │ ├── test_single_problem_loaders.py │ │ ├── test_tsad_data_loader.py │ │ ├── test_tsad_datasets.py │ │ └── test_tss_data_loader.py │ ├── tsad_datasets.py │ ├── tsc_datasets.py │ ├── tser_datasets.py │ └── tsf_datasets.py ├── distances │ ├── __init__.py │ ├── _distance.py │ ├── _mpdist.py │ ├── _sbd.py │ ├── _shift_scale_invariant.py │ ├── elastic │ │ ├── __init__.py │ │ ├── _adtw.py │ │ ├── _alignment_paths.py │ │ ├── _bounding_matrix.py │ │ ├── _ddtw.py │ │ ├── _dtw.py │ │ ├── _dtw_gi.py │ │ ├── _edr.py │ │ ├── _erp.py │ │ ├── _lcss.py │ │ ├── _msm.py │ │ ├── _shape_dtw.py │ │ ├── _soft_dtw.py │ │ ├── _twe.py │ │ ├── _wddtw.py │ │ ├── _wdtw.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_alignment_path.py │ │ │ ├── test_bounding.py │ │ │ ├── test_cost_matrix.py │ │ │ └── test_distance_correctness.py │ ├── mindist │ │ ├── __init__.py │ │ ├── _dft_sfa.py │ │ ├── _paa_sax.py │ │ ├── _sax.py │ │ └── _sfa.py │ ├── pointwise │ │ ├── __init__.py │ │ ├── _euclidean.py │ │ ├── _manhattan.py │ │ ├── _minkowski.py │ │ └── _squared.py │ └── tests │ │ ├── __init__.py │ │ ├── test_distances.py │ │ ├── test_miscellaneous_distances.py │ │ ├── test_mpdist.py │ │ ├── test_numba_distance_parameters.py │ │ ├── test_pairwise.py │ │ ├── test_sklearn_compatibility.py │ │ └── test_symbolic_mindist.py ├── forecasting │ ├── __init__.py │ ├── _ets.py │ ├── _naive.py │ ├── _regression.py │ ├── _tvp.py │ ├── base.py │ └── tests │ │ ├── __init__.py │ │ ├── test_base.py │ │ ├── test_ets.py │ │ ├── test_naive.py │ │ ├── test_regressor.py │ │ └── test_tvp.py ├── networks │ ├── __init__.py │ ├── _ae_abgru.py │ ├── _ae_bgru.py │ ├── _ae_dcnn.py │ ├── _ae_drnn.py │ ├── _ae_fcn.py │ ├── _ae_resnet.py │ ├── _cnn.py │ ├── _dcnn.py │ ├── _disjoint_cnn.py │ ├── _encoder.py │ ├── _fcn.py │ ├── _inception.py │ ├── _lite.py │ ├── _mlp.py │ ├── _resnet.py │ ├── _rnn.py │ ├── base.py │ └── tests │ │ ├── __init__.py │ │ ├── test_ae_abgru.py │ │ ├── test_ae_bgru.py │ │ ├── test_ae_dcnn.py │ │ ├── test_ae_drnn.py │ │ ├── test_ae_fcn.py │ │ ├── test_ae_resnet.py │ │ ├── test_all_networks.py │ │ ├── test_dcnn.py │ │ ├── test_disjoint_cnn.py │ │ ├── test_fcn.py │ │ ├── test_inception.py │ │ ├── test_mlp.py │ │ ├── test_network_base.py │ │ ├── test_resnet.py │ │ ├── test_rnn.py │ │ └── test_time_cnn.py ├── pipeline │ ├── __init__.py │ ├── _make_pipeline.py │ ├── _sklearn_to_aeon.py │ └── tests │ │ ├── __init__.py │ │ ├── test_make_pipeline.py │ │ └── test_sklearn_to_aeon.py ├── regression │ ├── __init__.py │ ├── _dummy.py │ ├── base.py │ ├── compose │ │ ├── __init__.py │ │ ├── _ensemble.py │ │ ├── _pipeline.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_ensemble.py │ │ │ └── test_pipeline.py │ ├── convolution_based │ │ ├── __init__.py │ │ ├── _hydra.py │ │ ├── _minirocket.py │ │ ├── _mr_hydra.py │ │ ├── _multirocket.py │ │ └── _rocket.py │ ├── deep_learning │ │ ├── __init__.py │ │ ├── _cnn.py │ │ ├── _disjoint_cnn.py │ │ ├── _encoder.py │ │ ├── _fcn.py │ │ ├── _inception_time.py │ │ ├── _lite_time.py │ │ ├── _mlp.py │ │ ├── _resnet.py │ │ ├── _rnn.py │ │ ├── base.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ └── test_deep_regressor_base.py │ ├── distance_based │ │ ├── __init__.py │ │ ├── _time_series_neighbors.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ └── test_time_series_neighbors.py │ ├── feature_based │ │ ├── __init__.py │ │ ├── _catch22.py │ │ ├── _fresh_prince.py │ │ ├── _summary.py │ │ └── _tsfresh.py │ ├── hybrid │ │ ├── __init__.py │ │ └── _rist.py │ ├── interval_based │ │ ├── __init__.py │ │ ├── _cif.py │ │ ├── _drcif.py │ │ ├── _interval_forest.py │ │ ├── _interval_pipelines.py │ │ ├── _quant.py │ │ ├── _rise.py │ │ ├── _tsf.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_interval_forests.py │ │ │ └── test_interval_pipelines.py │ ├── shapelet_based │ │ ├── __init__.py │ │ └── _rdst.py │ ├── sklearn │ │ ├── __init__.py │ │ ├── _rotation_forest_regressor.py │ │ ├── _wrapper.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_all_regressors.py │ │ │ └── test_rotation_forest_regressor.py │ └── tests │ │ ├── __init__.py │ │ ├── test_base.py │ │ └── test_dummy.py ├── segmentation │ ├── __init__.py │ ├── _binseg.py │ ├── _clasp.py │ ├── _eagglo.py │ ├── _fluss.py │ ├── _ggs.py │ ├── _hidalgo.py │ ├── _hmm.py │ ├── _igts.py │ ├── _random.py │ ├── base.py │ └── tests │ │ ├── __init__.py │ │ ├── test_base.py │ │ ├── test_binseg.py │ │ ├── test_clasp.py │ │ ├── test_eagglo.py │ │ ├── test_fluss.py │ │ ├── test_ggs.py │ │ ├── test_hidalgo.py │ │ ├── test_hmm.py │ │ ├── test_igts.py │ │ └── test_random.py ├── similarity_search │ ├── __init__.py │ ├── _base.py │ ├── collection │ │ ├── __init__.py │ │ ├── _base.py │ │ ├── motifs │ │ │ └── __init__.py │ │ ├── neighbors │ │ │ ├── __init__.py │ │ │ ├── _rp_cosine_lsh.py │ │ │ └── tests │ │ │ │ ├── __init__.py │ │ │ │ └── test_rp_cosine_lsh.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ └── test_base.py │ ├── series │ │ ├── __init__.py │ │ ├── _base.py │ │ ├── _commons.py │ │ ├── motifs │ │ │ ├── __init__.py │ │ │ ├── _stomp.py │ │ │ └── tests │ │ │ │ ├── __init__.py │ │ │ │ └── test_stomp.py │ │ ├── neighbors │ │ │ ├── __init__.py │ │ │ ├── _dummy.py │ │ │ ├── _mass.py │ │ │ └── tests │ │ │ │ ├── __init__.py │ │ │ │ ├── test_dummy.py │ │ │ │ └── test_mass.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_base.py │ │ │ └── test_commons.py │ └── tests │ │ └── __init__.py ├── testing │ ├── __init__.py │ ├── _cicd_numba_caching.py │ ├── data_generation │ │ ├── __init__.py │ │ ├── _collection.py │ │ ├── _series.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_collection.py │ │ │ └── test_series.py │ ├── estimator_checking │ │ ├── __init__.py │ │ ├── _estimator_checking.py │ │ ├── _yield_anomaly_detection_checks.py │ │ ├── _yield_classification_checks.py │ │ ├── _yield_clustering_checks.py │ │ ├── _yield_collection_anomaly_detection_checks.py │ │ ├── _yield_early_classification_checks.py │ │ ├── _yield_estimator_checks.py │ │ ├── _yield_forecasting_checks.py │ │ ├── _yield_multithreading_checks.py │ │ ├── _yield_regression_checks.py │ │ ├── _yield_segmentation_checks.py │ │ ├── _yield_series_anomaly_detection_checks.py │ │ ├── _yield_soft_dependency_checks.py │ │ ├── _yield_transformation_checks.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ └── test_check_estimator.py │ ├── example_results_files │ │ └── classification │ │ │ └── accuracy │ │ │ ├── FreshPRINCE_accuracy.csv │ │ │ ├── HC2_accuracy.csv │ │ │ ├── InceptionTime_accuracy.csv │ │ │ └── WEASEL-2.0_accuracy.csv │ ├── expected_results │ │ ├── __init__.py │ │ ├── expected_classifier_outputs.py │ │ ├── expected_distance_results.py │ │ ├── expected_regressor_outputs.py │ │ ├── expected_transform_outputs.py │ │ ├── results_reproduction │ │ │ ├── __init__.py │ │ │ ├── classifier_results_reproduction.py │ │ │ ├── regressor_results_reproduction.py │ │ │ └── transform_results_reproduction.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ └── test_expected_outputs.py │ ├── mock_estimators │ │ ├── __init__.py │ │ ├── _mock_anomaly_detectors.py │ │ ├── _mock_classifiers.py │ │ ├── _mock_clusterers.py │ │ ├── _mock_collection_transformers.py │ │ ├── _mock_forecasters.py │ │ ├── _mock_regressors.py │ │ ├── _mock_segmenters.py │ │ ├── _mock_series_transformers.py │ │ └── _mock_similarity_searchers.py │ ├── testing_config.py │ ├── testing_data.py │ ├── tests │ │ ├── __init__.py │ │ ├── test_all_estimators.py │ │ ├── test_core_imports.py │ │ ├── test_softdeps.py │ │ └── test_testing_data.py │ └── utils │ │ ├── __init__.py │ │ ├── deep_equals.py │ │ ├── estimator_checks.py │ │ ├── output_suppression.py │ │ └── tests │ │ ├── __init__.py │ │ ├── test_deep_equals.py │ │ └── test_output_supression.py ├── transformations │ ├── __init__.py │ ├── base.py │ ├── collection │ │ ├── __init__.py │ │ ├── _acf.py │ │ ├── _ar_coefficient.py │ │ ├── _broadcaster.py │ │ ├── _downsample.py │ │ ├── _dwt.py │ │ ├── _hog1d.py │ │ ├── _impute.py │ │ ├── _matrix_profile.py │ │ ├── _pad.py │ │ ├── _periodogram.py │ │ ├── _reduce.py │ │ ├── _rescale.py │ │ ├── _resize.py │ │ ├── _slope.py │ │ ├── _truncate.py │ │ ├── base.py │ │ ├── channel_selection │ │ │ ├── __init__.py │ │ │ ├── _channel_scorer.py │ │ │ ├── _elbow_class.py │ │ │ ├── _random.py │ │ │ ├── base.py │ │ │ └── tests │ │ │ │ ├── __init__.py │ │ │ │ ├── test_channel_scorer.py │ │ │ │ ├── test_elbow_class.py │ │ │ │ └── test_random.py │ │ ├── compose │ │ │ ├── __init__.py │ │ │ ├── _identity.py │ │ │ ├── _pipeline.py │ │ │ └── tests │ │ │ │ ├── __init__.py │ │ │ │ └── test_pipeline.py │ │ ├── convolution_based │ │ │ ├── __init__.py │ │ │ ├── _hydra.py │ │ │ ├── _minirocket.py │ │ │ ├── _multirocket.py │ │ │ ├── _rocket.py │ │ │ ├── rocketGPU │ │ │ │ ├── __init__.py │ │ │ │ ├── _rocket_gpu.py │ │ │ │ ├── base.py │ │ │ │ └── tests │ │ │ │ │ ├── __init__.py │ │ │ │ │ └── test_base_rocketGPU.py │ │ │ └── tests │ │ │ │ ├── __init__.py │ │ │ │ ├── test_all_rockets.py │ │ │ │ ├── test_hydra.py │ │ │ │ └── test_minirocket.py │ │ ├── dictionary_based │ │ │ ├── __init__.py │ │ │ ├── _borf.py │ │ │ ├── _paa.py │ │ │ ├── _sax.py │ │ │ ├── _sfa.py │ │ │ ├── _sfa_fast.py │ │ │ ├── _sfa_whole.py │ │ │ └── tests │ │ │ │ ├── __init__.py │ │ │ │ ├── test_paa.py │ │ │ │ ├── test_sax.py │ │ │ │ └── test_sfa.py │ │ ├── feature_based │ │ │ ├── __init__.py │ │ │ ├── _catch22.py │ │ │ ├── _summary.py │ │ │ ├── _tsfresh.py │ │ │ └── tests │ │ │ │ ├── __init__.py │ │ │ │ ├── test_catch22.py │ │ │ │ ├── test_summary.py │ │ │ │ └── test_tsfresh.py │ │ ├── interval_based │ │ │ ├── __init__.py │ │ │ ├── _quant.py │ │ │ ├── _random_intervals.py │ │ │ ├── _supervised_intervals.py │ │ │ └── tests │ │ │ │ ├── __init__.py │ │ │ │ └── test_intervals.py │ │ ├── self_supervised │ │ │ ├── __init__.py │ │ │ ├── _trilite.py │ │ │ └── tests │ │ │ │ ├── __init__.py │ │ │ │ └── test_trilite.py │ │ ├── shapelet_based │ │ │ ├── __init__.py │ │ │ ├── _dilated_shapelet_transform.py │ │ │ ├── _rsast.py │ │ │ ├── _sast.py │ │ │ ├── _shapelet_transform.py │ │ │ └── tests │ │ │ │ ├── __init__.py │ │ │ │ ├── test_dilated_shapelet_transform.py │ │ │ │ └── test_shapelet_transform.py │ │ ├── signature_based │ │ │ ├── __init__.py │ │ │ ├── _augmentations.py │ │ │ ├── _compute.py │ │ │ ├── _rescaling.py │ │ │ ├── _signature_method.py │ │ │ ├── _window.py │ │ │ └── tests │ │ │ │ ├── __init__.py │ │ │ │ ├── test_augmentations.py │ │ │ │ └── test_method.py │ │ ├── tests │ │ │ ├── __init__.py │ │ │ ├── test_acf.py │ │ │ ├── test_ar_coefficient.py │ │ │ ├── test_base.py │ │ │ ├── test_broadcaster.py │ │ │ ├── test_downsample.py │ │ │ ├── test_dwt.py │ │ │ ├── test_hog1d_transformer.py │ │ │ ├── test_rescaler.py │ │ │ ├── test_simple_imputer.py │ │ │ ├── test_slope_transformer.py │ │ │ └── test_tabularizer.py │ │ └── unequal_length │ │ │ ├── __init__.py │ │ │ ├── _commons.py │ │ │ ├── _pad.py │ │ │ ├── _resize.py │ │ │ ├── _truncate.py │ │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_pad.py │ │ │ ├── test_resize.py │ │ │ └── test_truncate.py │ └── series │ │ ├── __init__.py │ │ ├── _acf.py │ │ ├── _bkfilter.py │ │ ├── _boxcox.py │ │ ├── _clasp.py │ │ ├── _dft.py │ │ ├── _diff.py │ │ ├── _dobin.py │ │ ├── _exp_smoothing.py │ │ ├── _gauss.py │ │ ├── _log.py │ │ ├── _matrix_profile.py │ │ ├── _moving_average.py │ │ ├── _pca.py │ │ ├── _pla.py │ │ ├── _scaled_logit.py │ │ ├── _sg.py │ │ ├── _siv.py │ │ ├── _warping.py │ │ ├── _yeojohnson.py │ │ ├── base.py │ │ ├── compose │ │ ├── __init__.py │ │ ├── _identity.py │ │ ├── _pipeline.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ └── test_pipeline.py │ │ ├── smoothing │ │ ├── __init__.py │ │ ├── _dfa.py │ │ ├── _exp_smoothing.py │ │ ├── _gauss.py │ │ ├── _moving_average.py │ │ ├── _rms.py │ │ ├── _sg.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_dft.py │ │ │ ├── test_exp_smoothing.py │ │ │ ├── test_gauss.py │ │ │ ├── test_moving_average.py │ │ │ ├── test_rms.py │ │ │ └── test_sg.py │ │ └── tests │ │ ├── __init__.py │ │ ├── test_acf.py │ │ ├── test_base.py │ │ ├── test_bkfilter.py │ │ ├── test_boxcox.py │ │ ├── test_clasp.py │ │ ├── test_diff.py │ │ ├── test_dobin.py │ │ ├── test_matrix_profile.py │ │ ├── test_pca.py │ │ ├── test_pla.py │ │ ├── test_scaled_logit.py │ │ ├── test_warping.py │ │ └── test_yeojohnson.py ├── utils │ ├── __init__.py │ ├── base │ │ ├── __init__.py │ │ ├── _identifier.py │ │ ├── _register.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_identifier.py │ │ │ └── test_register.py │ ├── conversion │ │ ├── __init__.py │ │ ├── _convert_collection.py │ │ ├── _convert_series.py │ │ ├── _convert_tsf.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_convert_collection.py │ │ │ ├── test_convert_series.py │ │ │ └── test_tsf_converter.py │ ├── data_types.py │ ├── discovery.py │ ├── networks │ │ ├── __init__.py │ │ └── weight_norm.py │ ├── numba │ │ ├── __init__.py │ │ ├── general.py │ │ ├── stats.py │ │ ├── tests │ │ │ ├── __init__.py │ │ │ ├── test_general.py │ │ │ ├── test_stats.py │ │ │ └── test_wavelets.py │ │ └── wavelets.py │ ├── repr.py │ ├── self_supervised │ │ ├── __init__.py │ │ └── general.py │ ├── show_versions.py │ ├── sklearn.py │ ├── split.py │ ├── tags │ │ ├── __init__.py │ │ ├── _discovery.py │ │ ├── _tags.py │ │ ├── _validate.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_discovery.py │ │ │ ├── test_tags.py │ │ │ └── test_validate.py │ ├── tests │ │ ├── __init__.py │ │ ├── test_discovery.py │ │ ├── test_show_versions.py │ │ ├── test_sklearn_typing.py │ │ ├── test_split.py │ │ ├── test_weightnorm.py │ │ └── test_windowing.py │ ├── validation │ │ ├── __init__.py │ │ ├── _dependencies.py │ │ ├── collection.py │ │ ├── labels.py │ │ ├── series.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ ├── test_check_imports.py │ │ │ ├── test_collection.py │ │ │ ├── test_dependencies.py │ │ │ ├── test_init.py │ │ │ └── test_series.py │ └── windowing.py └── visualisation │ ├── __init__.py │ ├── distances │ ├── __init__.py │ ├── _pairwise_distance_matrix.py │ └── tests │ │ ├── __init__.py │ │ └── test_pairwise_distance_matrix.py │ ├── estimator │ ├── __init__.py │ ├── _clasp.py │ ├── _clustering.py │ ├── _network_plot.py │ ├── _shapelets.py │ ├── _temporal_importance_curves.py │ └── tests │ │ ├── __init__.py │ │ ├── test_clasp_plotting.py │ │ ├── test_cluster_plotting.py │ │ ├── test_shapelet_plotting.py │ │ └── test_tic_plotting.py │ ├── learning_task │ ├── __init__.py │ ├── _forecasting.py │ ├── _segmentation.py │ └── tests │ │ ├── __init__.py │ │ ├── test_forecasting_plotting.py │ │ └── test_segmentation_plotting.py │ ├── results │ ├── __init__.py │ ├── _boxplot.py │ ├── _critical_difference.py │ ├── _mcm.py │ ├── _scatter.py │ ├── _significance.py │ └── tests │ │ ├── __init__.py │ │ ├── test_boxplot.py │ │ ├── test_critical_difference.py │ │ ├── test_mcm.py │ │ ├── test_scatter.py │ │ └── test_significance.py │ └── series │ ├── __init__.py │ ├── _collections.py │ ├── _series.py │ └── tests │ ├── __init__.py │ ├── test_collection_plotting.py │ └── test_series_plotting.py ├── conftest.py ├── docs ├── Makefile ├── _sphinxext │ └── sphinx_remove_toctrees.py ├── _static │ └── css │ │ └── custom.css ├── _templates │ ├── class.rst │ ├── function.rst │ └── numpydoc_docstring.rst ├── about.md ├── about │ ├── code_of_conduct_moderators.md │ ├── communications_workgroup.md │ ├── core_developers.md │ ├── finance_workgroup.md │ ├── infrastructure_workgroup.md │ ├── release_management_workgroup.md │ └── supporting_developers.md ├── api_reference.md ├── api_reference │ ├── anomaly_detection.rst │ ├── base.md │ ├── benchmarking.rst │ ├── classification.rst │ ├── clustering.rst │ ├── data_format.md │ ├── datasets.md │ ├── distances.rst │ ├── forecasting.md │ ├── networks.rst │ ├── regression.rst │ ├── segmentation.rst │ ├── similarity_search.rst │ ├── transformations.md │ ├── utils.rst │ └── visualisation.rst ├── changelog.md ├── changelogs │ ├── v0 │ │ ├── v0.1.md │ │ ├── v0.10.md │ │ ├── v0.11.md │ │ ├── v0.2.md │ │ ├── v0.3.md │ │ ├── v0.4.md │ │ ├── v0.5.md │ │ ├── v0.6.md │ │ ├── v0.7.md │ │ ├── v0.8.md │ │ └── v0.9.md │ ├── v1.0.md │ ├── v1.1.md │ └── v1.2.md ├── code_of_conduct.md ├── conf.py ├── contributing.md ├── contributing │ └── issues.md ├── contributors.md ├── developer_guide.md ├── developer_guide │ ├── adding_typehints.md │ ├── aep.md │ ├── coding_standards.md │ ├── dependencies.md │ ├── deprecation.md │ ├── dev_installation.md │ ├── documentation.md │ ├── release.md │ └── testing.md ├── estimator_overview.md ├── examples ├── examples.md ├── getting_started.md ├── governance.md ├── images │ ├── funder_logos │ │ ├── ati-logo.png │ │ ├── cdrc-logo.png │ │ ├── mercedes-benz-logo.png │ │ ├── ukri-epsrc-logo.png │ │ └── ukri-esrc-logo.png │ ├── logo │ │ ├── aeon-favicon.ico │ │ ├── aeon-logo-blue-2-compact.png │ │ ├── aeon-logo-blue-2-transparent.png │ │ ├── aeon-logo-blue-compact.png │ │ └── source │ │ │ ├── aeon-logo-black-2.eps │ │ │ ├── aeon-logo-black-2.jpg │ │ │ ├── aeon-logo-black-2.png │ │ │ ├── aeon-logo-black.eps │ │ │ ├── aeon-logo-black.jpg │ │ │ ├── aeon-logo-black.png │ │ │ ├── aeon-logo-blue-2.eps │ │ │ ├── aeon-logo-blue-2.jpg │ │ │ ├── aeon-logo-blue-2.png │ │ │ ├── aeon-logo-blue.eps │ │ │ ├── aeon-logo-blue.jpg │ │ │ ├── aeon-logo-blue.png │ │ │ ├── aeon-logo-gradient-2.eps │ │ │ ├── aeon-logo-gradient-2.jpg │ │ │ ├── aeon-logo-gradient-2.png │ │ │ ├── aeon-logo-gradient.eps │ │ │ ├── aeon-logo-gradient.jpg │ │ │ ├── aeon-logo-gradient.png │ │ │ ├── aeon-logo-green-2.eps │ │ │ ├── aeon-logo-green-2.jpg │ │ │ ├── aeon-logo-green-2.png │ │ │ ├── aeon-logo-green.eps │ │ │ ├── aeon-logo-green.jpg │ │ │ ├── aeon-logo-green.png │ │ │ ├── aeon-logo-no-text-black.png │ │ │ ├── aeon-logo-no-text-white.png │ │ │ ├── aeon-logo-no-text.png │ │ │ ├── aeon-logo-white-2.eps │ │ │ ├── aeon-logo-white-2.jpg │ │ │ ├── aeon-logo-white-2.png │ │ │ ├── aeon-logo-white.eps │ │ │ ├── aeon-logo-white.jpg │ │ │ ├── aeon-logo-white.png │ │ │ └── proof_sheet.pdf │ └── other_logos │ │ └── numfocus-logo.png ├── index.md ├── installation.md ├── make.bat ├── papers_using_aeon.md ├── projects.md └── projects │ └── previous_projects.md ├── examples ├── anomaly_detection │ ├── anomaly_detection.ipynb │ └── img │ │ └── anomaly_detection.png ├── base │ ├── base_classes.ipynb │ ├── img │ │ ├── base_annotator.png │ │ ├── base_classifier.png │ │ ├── base_clusterer.png │ │ ├── base_collection_estimator.png │ │ ├── base_collection_transformer.png │ │ ├── base_deep_network.png │ │ ├── base_estimator.png │ │ ├── base_forecaster.png │ │ ├── base_metric.png │ │ ├── base_object.png │ │ ├── base_regressor.png │ │ ├── base_transformer.png │ │ ├── sklearn_base.png │ │ ├── uml.png │ │ ├── uml1.png │ │ ├── uml2.png │ │ ├── uml3.png │ │ └── uml_base_classes.pdf │ └── series_estimator.ipynb ├── benchmarking │ ├── benchmarking.ipynb │ ├── img │ │ ├── bakeoff2015.png │ │ ├── bakeoff2021.png │ │ └── benchmarking.png │ ├── published_results.ipynb │ ├── reference_results.ipynb │ └── regression.ipynb ├── classification │ ├── classification.ipynb │ ├── convolution_based.ipynb │ ├── deep_learning.ipynb │ ├── dictionary_based.ipynb │ ├── distance_based.ipynb │ ├── early_classification.ipynb │ ├── feature_based.ipynb │ ├── hybrid.ipynb │ ├── img │ │ ├── arrow-heads.png │ │ ├── class_diff.png │ │ ├── convolution.png │ │ ├── dictionary.png │ │ ├── dtw.png │ │ ├── dtw2.png │ │ ├── early_classification.png │ │ ├── ensemble.png │ │ ├── ex_alpha_sim.png │ │ ├── ex_dil_shp_slide.png │ │ ├── ex_shp_simple.png │ │ ├── from-shapes-to-time-series.png │ │ ├── hc2.png │ │ ├── hist.png │ │ ├── inception.png │ │ ├── inception_module.png │ │ ├── interval.png │ │ ├── it.png │ │ ├── pipeline.png │ │ ├── resnet.png │ │ ├── rocket.png │ │ ├── rocket2.png │ │ ├── rotation_forest.png │ │ ├── shape_dtw.PNG │ │ ├── shapelet.png │ │ ├── shp_general.png │ │ ├── spatial_pyramids.png │ │ └── tsc.png │ ├── interval_based.ipynb │ ├── rotation_forest.ipynb │ └── shapelet_based.ipynb ├── clustering │ ├── clustering.ipynb │ ├── feature_based_clustering.ipynb │ ├── img │ │ ├── clst_cd.png │ │ ├── partitional.png │ │ ├── taxonomy.png │ │ └── tscl.png │ └── partitional_clustering.ipynb ├── datasets │ ├── data_loading.ipynb │ ├── data_unequal.ipynb │ ├── datasets.ipynb │ ├── img │ │ ├── data.png │ │ ├── download1.png │ │ ├── download2.png │ │ └── download3.png │ ├── load_data_from_web.ipynb │ └── provided_data.ipynb ├── distances │ ├── distances.ipynb │ ├── img │ │ ├── DTW_GUI_warped.gif │ │ ├── bounding_window.png │ │ ├── distances.png │ │ ├── dtw_algo.png │ │ ├── dtw_alignment.png │ │ ├── dtw_path_through_costmatrix.png │ │ ├── dtw_vis.png │ │ ├── euclidean_path_through_pairwise.png │ │ └── svm.png │ └── sklearn_distances.ipynb ├── forecasting │ ├── direct.ipynb │ ├── ets.ipynb │ ├── forecasting.ipynb │ ├── img │ │ ├── direct.png │ │ ├── forecasting.png │ │ ├── iterative.png │ │ └── window.png │ ├── iterative.ipynb │ └── regression.ipynb ├── networks │ ├── deep_learning.ipynb │ └── img │ │ ├── H-Inception-archi.png │ │ ├── H-Inception.png │ │ ├── Inception-archi.png │ │ ├── Inception.png │ │ └── legend.png ├── regression │ ├── img │ │ ├── spectra.png │ │ └── tser.png │ └── regression.ipynb ├── segmentation │ ├── hidalgo_segmentation.ipynb │ ├── img │ │ ├── clasp.png │ │ ├── hidalgo.png │ │ ├── segmentation.png │ │ └── segmentation_use_cases.png │ ├── segmentation.ipynb │ └── segmentation_with_clasp.ipynb ├── similarity_search │ ├── code_speed.ipynb │ ├── distance_profiles.ipynb │ ├── img │ │ ├── code_speed.png │ │ ├── distance_profile.png │ │ └── sim_search.png │ └── similarity_search.ipynb ├── transformations │ ├── catch22.ipynb │ ├── channel_selection.ipynb │ ├── img │ │ ├── catch22.png │ │ ├── hidalgo.png │ │ ├── interpolation.png │ │ ├── leaves_shapelets.png │ │ ├── mini_rocket.png │ │ ├── preprocessing.png │ │ ├── rocket.png │ │ ├── sast.png │ │ ├── segmentation.png │ │ ├── signature.png │ │ ├── signatures_generalised_method.png │ │ ├── theta.png │ │ ├── transformations.png │ │ ├── tsfresh.png │ │ └── verdena_shapelet.png │ ├── minirocket.ipynb │ ├── preprocessing.ipynb │ ├── rocket.ipynb │ ├── sast.ipynb │ ├── signature_method.ipynb │ ├── smoothing_filters.ipynb │ ├── transformations.ipynb │ └── tsfresh.ipynb └── visualisation │ ├── plotting_for_learning_tasks.ipynb │ ├── plotting_results.ipynb │ └── plotting_series.ipynb └── pyproject.toml /.binder/Dockerfile: -------------------------------------------------------------------------------- 1 | # This Dockerfile is used to build aeon when launching binder. 2 | # Find out more at: https://mybinder.readthedocs.io/en/latest/index.html 3 | 4 | # Load jupyter python 3.11 image 5 | FROM jupyter/scipy-notebook:python-3.11 6 | 7 | # Set up user to avoid running as root 8 | ARG NB_USER=user 9 | ARG NB_UID=1000 10 | ENV USER ${NB_USER} 11 | ENV HOME /home/${NB_USER} 12 | 13 | # Binder will automatically clone the repo, but we need to make sure the 14 | # contents of our repo are in the ${HOME} directory 15 | COPY . ${HOME} 16 | USER root 17 | RUN chown -R ${NB_UID} ${HOME} 18 | 19 | # Switch user and directory 20 | USER ${USER} 21 | WORKDIR ${HOME} 22 | 23 | # Install extra requirements and aeon based on master branch 24 | RUN pip install --upgrade pip --no-cache-dir 25 | RUN pip install llvmlite --ignore-installed 26 | RUN pip install .[binder] 27 | -------------------------------------------------------------------------------- /.codecov.yml: -------------------------------------------------------------------------------- 1 | # PR status check 2 | coverage: 3 | status: 4 | project: 5 | default: 6 | target: auto 7 | threshold: 1% 8 | # If true PRs will always pass and reports are just for information 9 | informational: true 10 | patch: 11 | default: 12 | target: auto 13 | threshold: 1% 14 | # If true PRs will always pass and reports are just for information 15 | informational: true 16 | 17 | # post coverage report as comment on PR 18 | comment: false 19 | 20 | # enable codecov to report to GitHub status checks 21 | github_checks: 22 | annotations: false 23 | 24 | # paths to ignore 25 | ignore: 26 | - ".github/" 27 | - ".binder/" 28 | - "aeon/testing/expected_results/results_reproduction/" 29 | - "docs/" 30 | - "examples/" 31 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [report] 2 | show_missing = True 3 | exclude_lines = 4 | pragma: no cover 5 | @abstractmethod 6 | @abc.abstractmethod 7 | 8 | [run] 9 | branch = True 10 | source = aeon 11 | parallel = True 12 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.ts linguist-detectable=false 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | contact_links: 2 | - name: "\U0001F4AC Discussions" 3 | url: https://github.com/aeon-toolkit/aeon/discussions/new/choose 4 | about: If none of these options fit, your question or topic may be better suited to a discussion. 5 | - name: "\U0001F4AC Slack" 6 | url: https://join.slack.com/t/aeon-toolkit/shared_invite/zt-36dlmbouu-vajTShUYAHopSXUUVtHGzw 7 | about: For all other questions and general chat, come chat with the aeon community on Slack! 8 | - name: "\u2709\uFE0F Email" 9 | url: https://mailxto.com/jbp3ave49x 10 | about: Send an email to the aeon developers at contact@aeon-toolkit.org. Checked periodically. 11 | - name: "\u2709\uFE0F CoC Report" 12 | url: https://www.aeon-toolkit.org/en/stable/code_of_conduct.html 13 | about: Report an incident to the Code of Conduct Moderators. 14 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/doc_improvement.yml: -------------------------------------------------------------------------------- 1 | name: 📖 Documentation improvement 2 | description: Create a report to help us improve the aeon documentation. 3 | title: "[DOC] " 4 | labels: ["documentation"] 5 | 6 | body: 7 | - type: textarea 8 | attributes: 9 | label: Describe the issue linked to the documentation 10 | description: > 11 | Tell us about the confusion introduced in the documentation. 12 | validations: 13 | required: true 14 | - type: textarea 15 | attributes: 16 | label: Suggest a potential alternative/fix 17 | description: > 18 | Tell us how we could improve the documentation in this regard. 19 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yml: -------------------------------------------------------------------------------- 1 | name: ✨ Feature request 2 | description: Suggest a new algorithm or idea for aeon. 3 | title: "[ENH] " 4 | labels: ["enhancement"] 5 | 6 | body: 7 | - type: textarea 8 | attributes: 9 | label: Describe the feature or idea you want to propose 10 | description: > 11 | Is your feature request related to a problem? Please describe. If it is a new estimator, you should link to the paper or other reference. 12 | validations: 13 | required: true 14 | - type: textarea 15 | attributes: 16 | label: Describe your proposed solution 17 | description: > 18 | A clear and concise description of what you want to happen, ideally taking into consideration the existing toolbox design, classes and methods. 19 | validations: 20 | required: true 21 | - type: textarea 22 | attributes: 23 | label: Describe alternatives you've considered, if relevant 24 | description: > 25 | A clear and concise description of any alternative solutions or features you've considered. 26 | - type: textarea 27 | attributes: 28 | label: Additional context 29 | description: > 30 | Add any other context about the problem here. 31 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/other_issue.yml: -------------------------------------------------------------------------------- 1 | name: 🔧 Other issue 2 | description: Other issue categories do not fit. You may wish to consider a Discussion. 3 | 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: > 8 | If the issue is governance related, please start the title with [GOV] and add the governance tag. If the issue is maintenance related (i.e. CI), please start the title with [MNT] and add the maintenance tag. 9 | - type: textarea 10 | attributes: 11 | label: Describe the issue 12 | validations: 13 | required: true 14 | - type: textarea 15 | attributes: 16 | label: Suggest a potential alternative/fix 17 | - type: textarea 18 | attributes: 19 | label: Additional context 20 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # update GitHub actions versions 4 | - package-ecosystem: "github-actions" 5 | directory: "/" 6 | schedule: 7 | interval: "weekly" 8 | time: "01:00" 9 | commit-message: 10 | prefix: "[MNT] " 11 | labels: 12 | - "maintenance" 13 | - "dependencies" 14 | - "no changelog" 15 | groups: 16 | github-actions: 17 | patterns: 18 | - "*" 19 | # update Python dependencies 20 | - package-ecosystem: "pip" 21 | directory: "/" 22 | schedule: 23 | interval: "weekly" 24 | time: "01:00" 25 | commit-message: 26 | prefix: "[MNT] " 27 | labels: 28 | - "maintenance" 29 | - "dependencies" 30 | - "full pytest actions" 31 | - "no changelog" 32 | groups: 33 | python-packages: 34 | patterns: 35 | - "*" 36 | -------------------------------------------------------------------------------- /.github/utilities/ai_spam.py: -------------------------------------------------------------------------------- 1 | """Script for handling AI Spam label on pull requests. 2 | 3 | Triggered when AI Spam label is added to a PR, 4 | it adds a comment and closes the PR. 5 | """ 6 | 7 | import json 8 | import os 9 | 10 | from github import Github 11 | 12 | context_dict = json.loads(os.getenv("CONTEXT_GITHUB")) 13 | 14 | repo_name = context_dict["repository"] 15 | g = Github(os.getenv("GITHUB_TOKEN")) 16 | repo = g.get_repo(repo_name) 17 | pr_number = context_dict["event"]["pull_request"]["number"] 18 | pr = repo.get_pull(pr_number) 19 | label_name = context_dict["event"]["label"]["name"] 20 | 21 | if label_name == "AI Spam": 22 | comment_body = ( 23 | "This pull request has been flagged with the **AI Spam** label.\n\n" 24 | "This PR is being closed." 25 | ) 26 | pr.create_issue_comment(comment_body) 27 | pr.edit(state="closed") 28 | -------------------------------------------------------------------------------- /.github/utilities/codespell_ignore_words.txt: -------------------------------------------------------------------------------- 1 | fpr 2 | mape 3 | recuse 4 | strat 5 | -------------------------------------------------------------------------------- /.github/utilities/remove_good_first_issue.py: -------------------------------------------------------------------------------- 1 | """Removes the good first issue tag when an issue has been assigned.""" 2 | 3 | import json 4 | import os 5 | 6 | from github import Github 7 | 8 | context_dict = json.loads(os.getenv("CONTEXT_GITHUB")) 9 | 10 | repo = context_dict["repository"] 11 | g = Github(os.getenv("GITHUB_TOKEN")) 12 | repo = g.get_repo(repo) 13 | issue_number = context_dict["event"]["issue"]["number"] 14 | issue = repo.get_issue(number=issue_number) 15 | 16 | issue.remove_from_labels("good first issue") 17 | -------------------------------------------------------------------------------- /.github/workflows/ai_spam.yml: -------------------------------------------------------------------------------- 1 | name: AI Spam Detection On PR 2 | 3 | on: 4 | pull_request: 5 | types: [labeled] 6 | 7 | concurrency: 8 | group: ${{ github.workflow }}-${{ github.event.pull_request.number }} 9 | cancel-in-progress: true 10 | 11 | jobs: 12 | ai-spam-present: 13 | if: ${{ github.event.label.name == 'AI Spam' }} 14 | runs-on: ubuntu-24.04 15 | 16 | steps: 17 | - name: Create app token 18 | uses: actions/create-github-app-token@v2 19 | id: app-token 20 | with: 21 | app-id: ${{ vars.PR_APP_ID }} 22 | private-key: ${{ secrets.PR_APP_KEY }} 23 | 24 | - name: Checkout main 25 | uses: actions/checkout@v4 26 | with: 27 | sparse-checkout: .github/utilities 28 | 29 | - name: Setup Python 3.11 30 | uses: actions/setup-python@v5 31 | with: 32 | python-version: "3.11" 33 | 34 | - name: Install PyGithub 35 | run: pip install -Uq PyGithub 36 | 37 | - name: Process AI Spam 38 | id: handle_spam 39 | run: python .github/utilities/ai_spam.py 40 | env: 41 | CONTEXT_GITHUB: ${{ toJson(github) }} 42 | GITHUB_TOKEN: ${{ steps.app-token.outputs.token }} 43 | -------------------------------------------------------------------------------- /.github/workflows/fast_release.yml: -------------------------------------------------------------------------------- 1 | # Makes a release without testing. Don't run this unless you have to. 2 | name: Fast release 3 | 4 | on: 5 | workflow_dispatch: 6 | 7 | jobs: 8 | build-project: 9 | runs-on: ubuntu-24.04 10 | 11 | steps: 12 | - uses: actions/checkout@v4 13 | 14 | - name: Setup Python 3.12 15 | uses: actions/setup-python@v5 16 | with: 17 | python-version: "3.12" 18 | 19 | - name: Build project 20 | run: | 21 | python -m pip install build 22 | python -m build 23 | 24 | - name: Store build files 25 | uses: actions/upload-artifact@v4 26 | with: 27 | name: dist 28 | path: dist/* 29 | retention-days: 5 30 | 31 | upload-wheels: 32 | runs-on: ubuntu-24.04 33 | 34 | environment: 35 | name: release 36 | url: https://pypi.org/p/aeon/ 37 | permissions: 38 | id-token: write 39 | 40 | steps: 41 | - uses: actions/download-artifact@v4 42 | with: 43 | name: dist 44 | path: dist 45 | 46 | - name: Publish package to PyPI 47 | uses: pypa/gh-action-pypi-publish@release/v1 48 | -------------------------------------------------------------------------------- /.github/workflows/issue_assigned.yml: -------------------------------------------------------------------------------- 1 | name: Issue Assigned 2 | 3 | on: 4 | issues: 5 | types: [assigned] 6 | 7 | concurrency: 8 | group: ${{ github.workflow }}-${{ github.event.issue.id }} 9 | cancel-in-progress: true 10 | 11 | jobs: 12 | remove-good-first-issue: 13 | runs-on: ubuntu-24.04 14 | 15 | steps: 16 | - name: Create app token 17 | uses: actions/create-github-app-token@v2 18 | id: app-token 19 | with: 20 | app-id: ${{ vars.PR_APP_ID }} 21 | private-key: ${{ secrets.PR_APP_KEY }} 22 | 23 | - name: Checkout main 24 | uses: actions/checkout@v4 25 | with: 26 | sparse-checkout: .github/utilities 27 | 28 | - name: Setup Python 3.12 29 | uses: actions/setup-python@v5 30 | with: 31 | python-version: "3.12" 32 | 33 | - name: Install PyGithub 34 | run: pip install -Uq PyGithub 35 | 36 | - name: Process comment edit 37 | id: label_out 38 | run: python .github/utilities/remove_good_first_issue.py 39 | env: 40 | CONTEXT_GITHUB: ${{ toJson(github) }} 41 | GITHUB_TOKEN: ${{ steps.app-token.outputs.token }} 42 | -------------------------------------------------------------------------------- /.github/workflows/issue_comment_posted.yml: -------------------------------------------------------------------------------- 1 | name: Issue Comment Posted 2 | 3 | on: 4 | issue_comment: 5 | types: [created] 6 | 7 | jobs: 8 | self-assign: 9 | runs-on: ubuntu-24.04 10 | 11 | steps: 12 | - name: Checkout 13 | uses: actions/checkout@v4 14 | with: 15 | sparse-checkout: .github/utilities 16 | 17 | - name: Setup Python 3.12 18 | uses: actions/setup-python@v5 19 | with: 20 | python-version: "3.12" 21 | 22 | - name: Install PyGithub 23 | run: pip install -Uq PyGithub 24 | 25 | - name: Create app token 26 | uses: actions/create-github-app-token@v2 27 | id: app-token 28 | with: 29 | app-id: ${{ vars.PR_APP_ID }} 30 | private-key: ${{ secrets.PR_APP_KEY }} 31 | 32 | - name: Assign issue 33 | run: python .github/utilities/issue_assign.py 34 | env: 35 | CONTEXT_GITHUB: ${{ toJson(github) }} 36 | GITHUB_TOKEN: ${{ steps.app-token.outputs.token }} 37 | -------------------------------------------------------------------------------- /.github/workflows/pr_core_dep_import.yml: -------------------------------------------------------------------------------- 1 | name: PR module imports 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | paths: 9 | - "aeon/**" 10 | - ".github/workflows/**" 11 | - "pyproject.toml" 12 | 13 | concurrency: 14 | group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} 15 | cancel-in-progress: true 16 | 17 | jobs: 18 | test-core-imports: 19 | runs-on: ubuntu-24.04 20 | 21 | steps: 22 | - name: Checkout 23 | uses: actions/checkout@v4 24 | 25 | - name: Setup Python 3.12 26 | uses: actions/setup-python@v5 27 | with: 28 | python-version: "3.12" 29 | 30 | - name: Install aeon and dependencies 31 | uses: nick-fields/retry@v3 32 | with: 33 | timeout_minutes: 30 34 | max_attempts: 3 35 | command: python -m pip install . 36 | 37 | - name: Show dependencies 38 | run: python -m pip list 39 | 40 | - name: Run import test 41 | run: python aeon/testing/tests/test_core_imports.py 42 | -------------------------------------------------------------------------------- /.github/workflows/pr_typecheck.yml: -------------------------------------------------------------------------------- 1 | name: PR Typecheck 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | paths: 9 | - "aeon/**" 10 | 11 | concurrency: 12 | group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} 13 | cancel-in-progress: true 14 | 15 | jobs: 16 | typecheck: 17 | # run the code coverage job if a PR has the '' label 18 | if: ${{ github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'run typecheck test') }} 19 | runs-on: ubuntu-24.04 20 | 21 | steps: 22 | - name: Checkout 23 | uses: actions/checkout@v4 24 | 25 | - name: Setup Python 3.12 26 | uses: actions/setup-python@v5 27 | with: 28 | python-version: "3.12" 29 | 30 | - name: Install aeon, dependencies and mypy 31 | uses: nick-fields/retry@v3 32 | with: 33 | timeout_minutes: 30 34 | max_attempts: 3 35 | command: python -m pip install .[all_extras,unstable_extras,dev] mypy 36 | 37 | - name: Show dependencies 38 | run: python -m pip list 39 | 40 | - name: Run mypy typecheck 41 | run: mypy aeon/ 42 | -------------------------------------------------------------------------------- /.github/workflows/update_contributors.yml: -------------------------------------------------------------------------------- 1 | name: Update Contributors 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths: 8 | - '.all-contributorsrc' 9 | 10 | jobs: 11 | generate-markdown-and-commit: 12 | runs-on: ubuntu-24.04 13 | 14 | steps: 15 | - uses: actions/checkout@v4 16 | 17 | - uses: actions/setup-node@v4 18 | with: 19 | node-version: 18 20 | 21 | - name: Setup all-contributors-cli 22 | run: npm install -g all-contributors-cli 23 | 24 | - name: Generate CONTRIBUTORS.md 25 | id: generate 26 | run: npx all-contributors generate 27 | 28 | - uses: actions/create-github-app-token@v2 29 | id: app-token 30 | with: 31 | app-id: ${{ vars.PR_APP_ID }} 32 | private-key: ${{ secrets.PR_APP_KEY }} 33 | 34 | - uses: peter-evans/create-pull-request@v7 35 | with: 36 | token: ${{ steps.app-token.outputs.token }} 37 | commit-message: "Automated `CONTRIBUTORS.md` update" 38 | branch: update_contributors 39 | title: "[MNT] Automated `CONTRIBUTORS.md` update" 40 | body: "Automated update to CONTRIBUTORS.md caused by an update to the `.all-contributorsrc` file." 41 | labels: maintenance, no changelog 42 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # Read the Docs configuration file 2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 3 | 4 | version: 2 5 | 6 | python: 7 | install: 8 | - method: pip 9 | path: . 10 | extra_requirements: 11 | - docs 12 | 13 | build: 14 | os: ubuntu-24.04 15 | tools: 16 | python: "3.11" 17 | 18 | sphinx: 19 | configuration: docs/conf.py 20 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | You can find the `aeon` changelog pages on our [website](https://www.aeon-toolkit.org/en/stable/changelog.html). 4 | 5 | `aeon` adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html>) and the source code for 6 | all [releases](https://github.com/aeon-toolkit/aeon/releases>) is available on GitHub. 7 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guide 2 | 3 | `aeon` is a community-driven project and contributions are most welcome. We value 4 | all kinds of contributions, not just code. Improvements to docs, bug reports, and 5 | taking on communications or code of conduct responsibilities are all examples of 6 | valuable contributions beyond code which help make aeon a great package. 7 | 8 | You can find our full contributing guide on our [website](https://www.aeon-toolkit.org/en/stable/contributing.html). 9 | -------------------------------------------------------------------------------- /GOVERNANCE.md: -------------------------------------------------------------------------------- 1 | # Governance 2 | 3 | You can find our full governance guidelines on our [website](https://www.aeon-toolkit.org/en/stable/governance.html). 4 | 5 | `aeon` welcomes all contributors, and anyone can involve themselves in the community 6 | and contribute code, documentation, and ideas. The project is maintained by a group 7 | of core developers, who are responsible for reviewing and merging contributions, 8 | engaging with the community, and steering the project. Tasks are delegated to 9 | workgroups, which are groups of core developers and contributors assigned by 10 | core developers to fulfil a specific role. 11 | 12 | `aeon`'s decision-making process is transparent and consensus-based. 13 | Most decisions involve seeking consensus among contributors and core developers. 14 | If consensus is not reached, a vote can be called, requiring a two-thirds majority 15 | of core developers for approval. The process emphasises transparency and open 16 | discussion, with some sensitive topics discussed privately. 17 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include aeon *.py 2 | recursive-include aeon/datasets *.csv *.arff *.txt *.ts *.tsv *.tsf 3 | recursive-include aeon/testing/example_results_files *.csv 4 | include aeon/registry/README.md 5 | include .coveragerc 6 | include conftest.py 7 | include LICENSE 8 | include MANIFEST.in 9 | include pyproject.toml 10 | include README.md 11 | 12 | recursive-exclude .binder * 13 | recursive-exclude .github * 14 | recursive-exclude docs * 15 | recursive-exclude examples * 16 | exclude .all-contributorsrc 17 | exclude .codecov.yml 18 | exclude .gitattributes 19 | exclude .gitignore 20 | exclude .pre-commit-config.yaml 21 | exclude .readthedocs.yml 22 | exclude CHANGELOG.md 23 | exclude CODE_OF_CONDUCT.md 24 | exclude CODEOWNERS 25 | exclude CONTRIBUTING.md 26 | exclude CONTRIBUTORS.md 27 | exclude GOVERNANCE.md 28 | -------------------------------------------------------------------------------- /aeon/__init__.py: -------------------------------------------------------------------------------- 1 | """aeon toolkit.""" 2 | 3 | __version__ = "1.2.0" 4 | -------------------------------------------------------------------------------- /aeon/anomaly_detection/__init__.py: -------------------------------------------------------------------------------- 1 | """Time Series Anomaly Detection.""" 2 | 3 | __all__ = [ 4 | "BaseAnomalyDetector", 5 | ] 6 | 7 | from aeon.anomaly_detection.base import BaseAnomalyDetector 8 | -------------------------------------------------------------------------------- /aeon/anomaly_detection/collection/__init__.py: -------------------------------------------------------------------------------- 1 | """Whole-series anomaly detection methods.""" 2 | 3 | __all__ = [ 4 | "BaseCollectionAnomalyDetector", 5 | "ClassificationAdapter", 6 | "OutlierDetectionAdapter", 7 | ] 8 | 9 | from aeon.anomaly_detection.collection._classification import ClassificationAdapter 10 | from aeon.anomaly_detection.collection._outlier_detection import OutlierDetectionAdapter 11 | from aeon.anomaly_detection.collection.base import BaseCollectionAnomalyDetector 12 | -------------------------------------------------------------------------------- /aeon/anomaly_detection/series/__init__.py: -------------------------------------------------------------------------------- 1 | """Single series Time Series Anomaly Detection.""" 2 | 3 | __all__ = [ 4 | "BaseSeriesAnomalyDetector", 5 | "PyODAdapter", 6 | ] 7 | 8 | from aeon.anomaly_detection.series._pyodadapter import PyODAdapter 9 | from aeon.anomaly_detection.series.base import BaseSeriesAnomalyDetector 10 | -------------------------------------------------------------------------------- /aeon/anomaly_detection/series/distance_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Distance-based Time Series Anomaly Detection.""" 2 | 3 | __all__ = [ 4 | "CBLOF", 5 | "KMeansAD", 6 | "LeftSTAMPi", 7 | "LOF", 8 | "MERLIN", 9 | "STOMP", 10 | "ROCKAD", 11 | ] 12 | 13 | from aeon.anomaly_detection.series.distance_based._cblof import CBLOF 14 | from aeon.anomaly_detection.series.distance_based._kmeans import KMeansAD 15 | from aeon.anomaly_detection.series.distance_based._left_stampi import LeftSTAMPi 16 | from aeon.anomaly_detection.series.distance_based._lof import LOF 17 | from aeon.anomaly_detection.series.distance_based._merlin import MERLIN 18 | from aeon.anomaly_detection.series.distance_based._rockad import ROCKAD 19 | from aeon.anomaly_detection.series.distance_based._stomp import STOMP 20 | -------------------------------------------------------------------------------- /aeon/anomaly_detection/series/distance_based/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Distance based test code.""" 2 | -------------------------------------------------------------------------------- /aeon/anomaly_detection/series/distribution_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Distribution-based Time Series Anomaly Detection.""" 2 | 3 | __all__ = [ 4 | "COPOD", 5 | "DWT_MLEAD", 6 | ] 7 | 8 | from aeon.anomaly_detection.series.distribution_based._copod import COPOD 9 | from aeon.anomaly_detection.series.distribution_based._dwt_mlead import DWT_MLEAD 10 | -------------------------------------------------------------------------------- /aeon/anomaly_detection/series/distribution_based/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Distribution based test code.""" 2 | -------------------------------------------------------------------------------- /aeon/anomaly_detection/series/outlier_detection/__init__.py: -------------------------------------------------------------------------------- 1 | """Time Series Outlier Detection.""" 2 | 3 | __all__ = [ 4 | "IsolationForest", 5 | "OneClassSVM", 6 | "STRAY", 7 | ] 8 | 9 | from aeon.anomaly_detection.series.outlier_detection._iforest import IsolationForest 10 | from aeon.anomaly_detection.series.outlier_detection._one_class_svm import OneClassSVM 11 | from aeon.anomaly_detection.series.outlier_detection._stray import STRAY 12 | -------------------------------------------------------------------------------- /aeon/anomaly_detection/series/outlier_detection/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Outlier based test code.""" 2 | -------------------------------------------------------------------------------- /aeon/anomaly_detection/series/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for anomaly detection.""" 2 | -------------------------------------------------------------------------------- /aeon/base/__init__.py: -------------------------------------------------------------------------------- 1 | """Base classes for defining estimators in aeon.""" 2 | 3 | __all__ = [ 4 | "BaseAeonEstimator", 5 | "BaseCollectionEstimator", 6 | "BaseSeriesEstimator", 7 | "ComposableEstimatorMixin", 8 | ] 9 | 10 | from aeon.base._base import BaseAeonEstimator 11 | from aeon.base._base_collection import BaseCollectionEstimator 12 | from aeon.base._base_series import BaseSeriesEstimator 13 | from aeon.base._compose import ComposableEstimatorMixin 14 | -------------------------------------------------------------------------------- /aeon/base/_estimators/__init__.py: -------------------------------------------------------------------------------- 1 | """Base classes for estimators which are part of multiple aeon modules.""" 2 | -------------------------------------------------------------------------------- /aeon/base/_estimators/compose/__init__.py: -------------------------------------------------------------------------------- 1 | """Base class for composable estimators.""" 2 | -------------------------------------------------------------------------------- /aeon/base/_estimators/hybrid/__init__.py: -------------------------------------------------------------------------------- 1 | """Base classes for hybrid time series estimators.""" 2 | 3 | __all__ = ["BaseRIST"] 4 | 5 | from aeon.base._estimators.hybrid.base_rist import BaseRIST 6 | -------------------------------------------------------------------------------- /aeon/base/_estimators/hybrid/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Testing for hybrid base classes.""" 2 | -------------------------------------------------------------------------------- /aeon/base/_estimators/interval_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Base classes for interval-based time series estimators.""" 2 | 3 | __all__ = ["BaseIntervalForest"] 4 | 5 | from aeon.base._estimators.interval_based.base_interval_forest import BaseIntervalForest 6 | -------------------------------------------------------------------------------- /aeon/base/_estimators/interval_based/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Testing for interval-based base classes.""" 2 | -------------------------------------------------------------------------------- /aeon/base/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests.""" 2 | -------------------------------------------------------------------------------- /aeon/benchmarking/__init__.py: -------------------------------------------------------------------------------- 1 | """Benchmarking.""" 2 | -------------------------------------------------------------------------------- /aeon/benchmarking/metrics/__init__.py: -------------------------------------------------------------------------------- 1 | """Performance metrics.""" 2 | -------------------------------------------------------------------------------- /aeon/benchmarking/metrics/anomaly_detection/__init__.py: -------------------------------------------------------------------------------- 1 | """Metrics for anomaly detection.""" 2 | 3 | __all__ = [ 4 | "range_precision", 5 | "range_recall", 6 | "range_f_score", 7 | "roc_auc_score", 8 | "pr_auc_score", 9 | "rp_rr_auc_score", 10 | "f_score_at_k_points", 11 | "f_score_at_k_ranges", 12 | "range_pr_roc_auc_support", 13 | "range_roc_auc_score", 14 | "range_pr_auc_score", 15 | "range_pr_vus_score", 16 | "range_roc_vus_score", 17 | "ts_precision", 18 | "ts_recall", 19 | "ts_fscore", 20 | ] 21 | 22 | from aeon.benchmarking.metrics.anomaly_detection._continuous import ( 23 | f_score_at_k_points, 24 | f_score_at_k_ranges, 25 | pr_auc_score, 26 | roc_auc_score, 27 | rp_rr_auc_score, 28 | ) 29 | from aeon.benchmarking.metrics.anomaly_detection._range_metrics import ( 30 | range_f_score, 31 | range_precision, 32 | range_recall, 33 | ) 34 | from aeon.benchmarking.metrics.anomaly_detection._range_ts_metrics import ( 35 | ts_fscore, 36 | ts_precision, 37 | ts_recall, 38 | ) 39 | from aeon.benchmarking.metrics.anomaly_detection._vus_metrics import ( 40 | range_pr_auc_score, 41 | range_pr_roc_auc_support, 42 | range_pr_vus_score, 43 | range_roc_auc_score, 44 | range_roc_vus_score, 45 | ) 46 | -------------------------------------------------------------------------------- /aeon/benchmarking/metrics/anomaly_detection/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for metrics in anomaly_detection module.""" 2 | -------------------------------------------------------------------------------- /aeon/benchmarking/metrics/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for performance metrics.""" 2 | -------------------------------------------------------------------------------- /aeon/benchmarking/metrics/tests/test_clustering.py: -------------------------------------------------------------------------------- 1 | """Tests for performance metric functions.""" 2 | 3 | import numpy as np 4 | 5 | from aeon.benchmarking.metrics.clustering import clustering_accuracy_score 6 | 7 | 8 | def test_clustering_accuracy(): 9 | """Test clustering accuracy with random labels and clusters.""" 10 | labels = np.random.randint(0, 3, 10) 11 | clusters = np.random.randint(0, 3, 10) 12 | cl_acc = clustering_accuracy_score(labels, clusters) 13 | 14 | assert isinstance(cl_acc, float) 15 | assert 0 <= cl_acc <= 1 16 | -------------------------------------------------------------------------------- /aeon/benchmarking/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for benchmarking.""" 2 | -------------------------------------------------------------------------------- /aeon/classification/__init__.py: -------------------------------------------------------------------------------- 1 | """Classifier Base.""" 2 | 3 | __all__ = [ 4 | "BaseClassifier", 5 | "DummyClassifier", 6 | ] 7 | 8 | from aeon.classification.base import BaseClassifier 9 | from aeon.classification.dummy import DummyClassifier 10 | -------------------------------------------------------------------------------- /aeon/classification/compose/__init__.py: -------------------------------------------------------------------------------- 1 | """Compositions for classifiers.""" 2 | 3 | __all__ = [ 4 | "ClassifierChannelEnsemble", 5 | "ClassifierEnsemble", 6 | "ClassifierPipeline", 7 | ] 8 | 9 | from aeon.classification.compose._channel_ensemble import ClassifierChannelEnsemble 10 | from aeon.classification.compose._ensemble import ClassifierEnsemble 11 | from aeon.classification.compose._pipeline import ClassifierPipeline 12 | -------------------------------------------------------------------------------- /aeon/classification/compose/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for classification composable estimators.""" 2 | -------------------------------------------------------------------------------- /aeon/classification/convolution_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Convolution-based time series classifiers.""" 2 | 3 | __all__ = [ 4 | "RocketClassifier", 5 | "MiniRocketClassifier", 6 | "MultiRocketClassifier", 7 | "Arsenal", 8 | "HydraClassifier", 9 | "MultiRocketHydraClassifier", 10 | ] 11 | 12 | from aeon.classification.convolution_based._arsenal import Arsenal 13 | from aeon.classification.convolution_based._hydra import HydraClassifier 14 | from aeon.classification.convolution_based._minirocket import MiniRocketClassifier 15 | from aeon.classification.convolution_based._mr_hydra import MultiRocketHydraClassifier 16 | from aeon.classification.convolution_based._multirocket import MultiRocketClassifier 17 | from aeon.classification.convolution_based._rocket import RocketClassifier 18 | -------------------------------------------------------------------------------- /aeon/classification/convolution_based/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Kernel based test code.""" 2 | -------------------------------------------------------------------------------- /aeon/classification/deep_learning/__init__.py: -------------------------------------------------------------------------------- 1 | """Deep learning based classifiers.""" 2 | 3 | __all__ = [ 4 | "BaseDeepClassifier", 5 | "TimeCNNClassifier", 6 | "EncoderClassifier", 7 | "FCNClassifier", 8 | "InceptionTimeClassifier", 9 | "IndividualInceptionClassifier", 10 | "MLPClassifier", 11 | "ResNetClassifier", 12 | "LITETimeClassifier", 13 | "IndividualLITEClassifier", 14 | "DisjointCNNClassifier", 15 | ] 16 | from aeon.classification.deep_learning._cnn import TimeCNNClassifier 17 | from aeon.classification.deep_learning._disjoint_cnn import DisjointCNNClassifier 18 | from aeon.classification.deep_learning._encoder import EncoderClassifier 19 | from aeon.classification.deep_learning._fcn import FCNClassifier 20 | from aeon.classification.deep_learning._inception_time import ( 21 | InceptionTimeClassifier, 22 | IndividualInceptionClassifier, 23 | ) 24 | from aeon.classification.deep_learning._lite_time import ( 25 | IndividualLITEClassifier, 26 | LITETimeClassifier, 27 | ) 28 | from aeon.classification.deep_learning._mlp import MLPClassifier 29 | from aeon.classification.deep_learning._resnet import ResNetClassifier 30 | from aeon.classification.deep_learning.base import BaseDeepClassifier 31 | -------------------------------------------------------------------------------- /aeon/classification/deep_learning/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for deep learning classifiers.""" 2 | -------------------------------------------------------------------------------- /aeon/classification/dictionary_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Dictionary based time series classifiers.""" 2 | 3 | __all__ = [ 4 | "IndividualBOSS", 5 | "BOSSEnsemble", 6 | "ContractableBOSS", 7 | "TemporalDictionaryEnsemble", 8 | "IndividualTDE", 9 | "WEASEL", 10 | "WEASEL_V2", 11 | "MUSE", 12 | "REDCOMETS", 13 | "MrSQMClassifier", 14 | "MrSEQLClassifier", 15 | ] 16 | 17 | from aeon.classification.dictionary_based._boss import BOSSEnsemble, IndividualBOSS 18 | from aeon.classification.dictionary_based._cboss import ContractableBOSS 19 | from aeon.classification.dictionary_based._mrseql import MrSEQLClassifier 20 | from aeon.classification.dictionary_based._mrsqm import MrSQMClassifier 21 | from aeon.classification.dictionary_based._muse import MUSE 22 | from aeon.classification.dictionary_based._redcomets import REDCOMETS 23 | from aeon.classification.dictionary_based._tde import ( 24 | IndividualTDE, 25 | TemporalDictionaryEnsemble, 26 | ) 27 | from aeon.classification.dictionary_based._weasel import WEASEL 28 | from aeon.classification.dictionary_based._weasel_v2 import WEASEL_V2 29 | -------------------------------------------------------------------------------- /aeon/classification/dictionary_based/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Dictionary based test code.""" 2 | -------------------------------------------------------------------------------- /aeon/classification/dictionary_based/tests/test_boss.py: -------------------------------------------------------------------------------- 1 | """BOSS test code.""" 2 | 3 | import pytest 4 | 5 | from aeon.classification.dictionary_based import BOSSEnsemble, ContractableBOSS 6 | from aeon.testing.data_generation import ( 7 | make_example_2d_numpy_collection, 8 | make_example_3d_numpy, 9 | ) 10 | 11 | 12 | def test_cboss_small_train(): 13 | """Test with a small amount of train cases, subsampling can cause issues.""" 14 | X, y = make_example_2d_numpy_collection(n_cases=3, n_timepoints=20, n_labels=2) 15 | cboss = ContractableBOSS(n_parameter_samples=10, max_ensemble_size=3) 16 | cboss.fit(X, y) 17 | cboss.predict(X) 18 | 19 | 20 | def test_boss_min_window(): 21 | """Test BOSS throws error when min window too big.""" 22 | boss = BOSSEnsemble(min_window=20) 23 | X, y = make_example_3d_numpy(n_cases=10, n_channels=1, n_timepoints=10) 24 | with pytest.raises(ValueError, match="Error in BOSSEnsemble, min_window"): 25 | boss._fit(X, y) 26 | -------------------------------------------------------------------------------- /aeon/classification/dictionary_based/tests/test_muse.py: -------------------------------------------------------------------------------- 1 | """Test MUSE multivariate classifier.""" 2 | 3 | import pytest 4 | 5 | from aeon.classification.dictionary_based import MUSE 6 | from aeon.testing.data_generation import make_example_3d_numpy 7 | 8 | 9 | def test_muse(): 10 | """Test MUSE with first order differences and incorrect input.""" 11 | muse = MUSE(use_first_order_differences=True) 12 | X, y = make_example_3d_numpy(n_cases=10, n_channels=3, n_timepoints=5) 13 | X2 = muse._add_first_order_differences(X) 14 | assert X2.shape[2] == X.shape[2] and X2.shape[1] == X.shape[1] * 2 15 | with pytest.raises(ValueError, match="Error in MUSE, min_window"): 16 | muse.fit(X, y) 17 | -------------------------------------------------------------------------------- /aeon/classification/dictionary_based/tests/test_weasel.py: -------------------------------------------------------------------------------- 1 | """WEASEL test code.""" 2 | 3 | import numpy as np 4 | 5 | from aeon.classification.dictionary_based._weasel import WEASEL 6 | from aeon.classification.dictionary_based._weasel_v2 import WEASEL_V2 7 | from aeon.datasets import load_unit_test 8 | 9 | 10 | def test_weasel_score(): 11 | """Test of WEASEL train estimate on unit test data.""" 12 | # load unit test data 13 | X_train, y_train = load_unit_test(split="train") 14 | X_test, y_test = load_unit_test(split="test") 15 | 16 | # train weasel 17 | weasel = WEASEL(random_state=0) 18 | weasel.fit(X_train, y_train) 19 | score = weasel.score(X_test, y_test) 20 | 21 | assert isinstance(score, float) 22 | np.testing.assert_almost_equal(score, 0.727272, decimal=4) 23 | 24 | 25 | def test_weasel_v2_score(): 26 | """Test of WEASEL v2 train estimate on unit test data.""" 27 | # load unit test data 28 | X_train, y_train = load_unit_test(split="train") 29 | X_test, y_test = load_unit_test(split="test") 30 | 31 | # train weasel 32 | weasel = WEASEL_V2(random_state=0) 33 | weasel.fit(X_train, y_train) 34 | score = weasel.score(X_test, y_test) 35 | 36 | assert isinstance(score, float) 37 | np.testing.assert_almost_equal(score, 0.90909, decimal=4) 38 | -------------------------------------------------------------------------------- /aeon/classification/distance_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Distance based time series classifiers.""" 2 | 3 | __all__ = [ 4 | "ElasticEnsemble", 5 | "KNeighborsTimeSeriesClassifier", 6 | "ProximityTree", 7 | "ProximityForest", 8 | ] 9 | 10 | from aeon.classification.distance_based._elastic_ensemble import ElasticEnsemble 11 | from aeon.classification.distance_based._proximity_forest import ProximityForest 12 | from aeon.classification.distance_based._proximity_tree import ProximityTree 13 | from aeon.classification.distance_based._time_series_neighbors import ( 14 | KNeighborsTimeSeriesClassifier, 15 | ) 16 | -------------------------------------------------------------------------------- /aeon/classification/distance_based/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Distance based test code.""" 2 | -------------------------------------------------------------------------------- /aeon/classification/early_classification/__init__.py: -------------------------------------------------------------------------------- 1 | """Early classification time series classifiers.""" 2 | 3 | __all__ = [ 4 | "BaseEarlyClassifier", 5 | "ProbabilityThresholdEarlyClassifier", 6 | "TEASER", 7 | ] 8 | 9 | from aeon.classification.early_classification._probability_threshold import ( 10 | ProbabilityThresholdEarlyClassifier, 11 | ) 12 | from aeon.classification.early_classification._teaser import TEASER 13 | from aeon.classification.early_classification.base import BaseEarlyClassifier 14 | -------------------------------------------------------------------------------- /aeon/classification/early_classification/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Early classification test code.""" 2 | -------------------------------------------------------------------------------- /aeon/classification/feature_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Feature based time series classifiers. 2 | 3 | While a bit vague, the contents mostly consist of transformers that extract features 4 | pipelined to a vector classifier. 5 | """ 6 | 7 | __all__ = [ 8 | "Catch22Classifier", 9 | "SignatureClassifier", 10 | "SummaryClassifier", 11 | "TSFreshClassifier", 12 | "FreshPRINCEClassifier", 13 | "TDMVDCClassifier", 14 | ] 15 | 16 | from aeon.classification.feature_based._catch22 import Catch22Classifier 17 | from aeon.classification.feature_based._fresh_prince import FreshPRINCEClassifier 18 | from aeon.classification.feature_based._signature_classifier import SignatureClassifier 19 | from aeon.classification.feature_based._summary import SummaryClassifier 20 | from aeon.classification.feature_based._tdmvdc import TDMVDCClassifier 21 | from aeon.classification.feature_based._tsfresh import TSFreshClassifier 22 | -------------------------------------------------------------------------------- /aeon/classification/feature_based/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for feature-based classification.""" 2 | -------------------------------------------------------------------------------- /aeon/classification/hybrid/__init__.py: -------------------------------------------------------------------------------- 1 | """Hybrid time series classifiers.""" 2 | 3 | __all__ = [ 4 | "HIVECOTEV1", 5 | "HIVECOTEV2", 6 | "RISTClassifier", 7 | ] 8 | 9 | from aeon.classification.hybrid._hivecote_v1 import HIVECOTEV1 10 | from aeon.classification.hybrid._hivecote_v2 import HIVECOTEV2 11 | from aeon.classification.hybrid._rist import RISTClassifier 12 | -------------------------------------------------------------------------------- /aeon/classification/hybrid/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for time series classifiers.""" 2 | -------------------------------------------------------------------------------- /aeon/classification/interval_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Interval-based time series classifiers.""" 2 | 3 | __all__ = [ 4 | "CanonicalIntervalForestClassifier", 5 | "DrCIFClassifier", 6 | "IntervalForestClassifier", 7 | "RandomIntervalClassifier", 8 | "SupervisedIntervalClassifier", 9 | "RandomIntervalSpectralEnsembleClassifier", 10 | "RSTSF", 11 | "SupervisedTimeSeriesForest", 12 | "TimeSeriesForestClassifier", 13 | "QUANTClassifier", 14 | ] 15 | 16 | from aeon.classification.interval_based._cif import CanonicalIntervalForestClassifier 17 | from aeon.classification.interval_based._drcif import DrCIFClassifier 18 | from aeon.classification.interval_based._interval_forest import IntervalForestClassifier 19 | from aeon.classification.interval_based._interval_pipelines import ( 20 | RandomIntervalClassifier, 21 | SupervisedIntervalClassifier, 22 | ) 23 | from aeon.classification.interval_based._quant import QUANTClassifier 24 | from aeon.classification.interval_based._rise import ( 25 | RandomIntervalSpectralEnsembleClassifier, 26 | ) 27 | from aeon.classification.interval_based._rstsf import RSTSF 28 | from aeon.classification.interval_based._stsf import SupervisedTimeSeriesForest 29 | from aeon.classification.interval_based._tsf import TimeSeriesForestClassifier 30 | -------------------------------------------------------------------------------- /aeon/classification/interval_based/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for interval-based classifiers.""" 2 | -------------------------------------------------------------------------------- /aeon/classification/interval_based/tests/test_interval_pipelines.py: -------------------------------------------------------------------------------- 1 | """Test interval pipelines.""" 2 | 3 | import pytest 4 | from sklearn.svm import SVC 5 | 6 | from aeon.classification.interval_based import ( 7 | RandomIntervalClassifier, 8 | SupervisedIntervalClassifier, 9 | ) 10 | from aeon.testing.testing_data import EQUAL_LENGTH_UNIVARIATE_CLASSIFICATION 11 | from aeon.testing.utils.estimator_checks import _assert_predict_probabilities 12 | 13 | 14 | @pytest.mark.parametrize( 15 | "cls", [SupervisedIntervalClassifier, RandomIntervalClassifier] 16 | ) 17 | def test_interval_pipeline_classifiers(cls): 18 | """Test the random interval classifiers.""" 19 | X_train, y_train = EQUAL_LENGTH_UNIVARIATE_CLASSIFICATION["numpy3D"]["train"] 20 | X_test, y_test = EQUAL_LENGTH_UNIVARIATE_CLASSIFICATION["numpy3D"]["test"] 21 | 22 | params = cls._get_test_params() 23 | if isinstance(params, list): 24 | params = params[0] 25 | params.update({"estimator": SVC()}) 26 | 27 | clf = cls(**params) 28 | clf.fit(X_train, y_train) 29 | prob = clf.predict_proba(X_test) 30 | _assert_predict_probabilities(prob, X_test, n_classes=2) 31 | -------------------------------------------------------------------------------- /aeon/classification/ordinal_classification/__init__.py: -------------------------------------------------------------------------------- 1 | """Ordinal time series classifiers.""" 2 | 3 | __all__ = [ 4 | "OrdinalTDE", 5 | "IndividualOrdinalTDE", 6 | "histogram_intersection", 7 | ] 8 | 9 | from aeon.classification.ordinal_classification._ordinal_tde import ( 10 | IndividualOrdinalTDE, 11 | OrdinalTDE, 12 | histogram_intersection, 13 | ) 14 | -------------------------------------------------------------------------------- /aeon/classification/ordinal_classification/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Ordinal classifier tests.""" 2 | -------------------------------------------------------------------------------- /aeon/classification/shapelet_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Shapelet based time series classifiers.""" 2 | 3 | __all__ = [ 4 | "ShapeletTransformClassifier", 5 | "RDSTClassifier", 6 | "SASTClassifier", 7 | "RSASTClassifier", 8 | "LearningShapeletClassifier", 9 | ] 10 | 11 | from aeon.classification.shapelet_based._ls import LearningShapeletClassifier 12 | from aeon.classification.shapelet_based._rdst import RDSTClassifier 13 | from aeon.classification.shapelet_based._rsast import RSASTClassifier 14 | from aeon.classification.shapelet_based._sast import SASTClassifier 15 | from aeon.classification.shapelet_based._stc import ShapeletTransformClassifier 16 | -------------------------------------------------------------------------------- /aeon/classification/shapelet_based/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Shapelet based tests.""" 2 | -------------------------------------------------------------------------------- /aeon/classification/shapelet_based/tests/test_ls.py: -------------------------------------------------------------------------------- 1 | """Learned Shapelets tests.""" 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from aeon.classification.shapelet_based import LearningShapeletClassifier 7 | from aeon.testing.data_generation import make_example_3d_numpy 8 | from aeon.utils.validation._dependencies import _check_soft_dependencies 9 | 10 | 11 | @pytest.mark.skipif( 12 | not _check_soft_dependencies(["tslearn", "tensorflow"], severity="none"), 13 | reason="skip test if required soft dependency not available", 14 | ) 15 | def test_get_transform(): 16 | """Learned Shapelets tests not covered by standard test suite.""" 17 | X = make_example_3d_numpy(return_y=False, n_cases=10, n_timepoints=20) 18 | y = np.array([0, 1, 0, 1, 0, 1, 0, 1, 0, 1]) 19 | 20 | # Test get transform and location with and without save_transformed_data 21 | clf = LearningShapeletClassifier( 22 | max_iter=10, total_lengths=1, save_transformed_data=True 23 | ) 24 | with pytest.raises(ValueError): 25 | clf.get_transform(X) 26 | with pytest.raises(ValueError): 27 | clf.get_locations(X) 28 | clf.fit(X, y) 29 | t = clf.get_transform(X) 30 | assert isinstance(t, np.ndarray) 31 | -------------------------------------------------------------------------------- /aeon/classification/shapelet_based/tests/test_rdst.py: -------------------------------------------------------------------------------- 1 | """RDST tests.""" 2 | 3 | import numpy as np 4 | from sklearn.ensemble import RandomForestClassifier 5 | 6 | from aeon.classification.shapelet_based import RDSTClassifier 7 | from aeon.testing.data_generation import make_example_3d_numpy 8 | 9 | 10 | def test_predict_proba(): 11 | """RDST tests for code not covered by standard tests.""" 12 | X = make_example_3d_numpy(return_y=False, n_cases=10) 13 | y = np.array([0, 1, 0, 1, 0, 1, 0, 1, 0, 1]) 14 | 15 | clf = RDSTClassifier(estimator=RandomForestClassifier(n_jobs=1)) 16 | clf.fit(X, y) 17 | p = clf._predict_proba(X) 18 | assert p.shape == (10, 2) 19 | p = clf._predict(X) 20 | assert p.shape == (10,) 21 | -------------------------------------------------------------------------------- /aeon/classification/shapelet_based/tests/test_stc.py: -------------------------------------------------------------------------------- 1 | """STC specific tests.""" 2 | 3 | import numpy as np 4 | import pytest 5 | from sklearn.ensemble import RandomForestClassifier 6 | from sklearn.svm import SVC 7 | 8 | from aeon.classification.shapelet_based import ShapeletTransformClassifier 9 | from aeon.testing.data_generation import make_example_3d_numpy 10 | 11 | 12 | def test_predict_proba(): 13 | """Test predict_proba when classifier has no predict_proba method.""" 14 | X = make_example_3d_numpy(return_y=False, n_cases=10) 15 | y = np.array([0, 1, 0, 1, 0, 1, 0, 1, 0, 1]) 16 | stc = ShapeletTransformClassifier(estimator=SVC(probability=False)) 17 | stc.fit(X, y) 18 | probas = stc._predict_proba(X) 19 | assert np.all( 20 | (probas == 0.0) | (probas == 1.0) 21 | ), "Array contains values other than 0 and 1" 22 | with pytest.raises(ValueError, match="Estimator must have a predict_proba method"): 23 | stc._fit_predict_proba(X, y) 24 | stc = ShapeletTransformClassifier(estimator=RandomForestClassifier(n_estimators=10)) 25 | y = np.array([0, 0, 0, 0, 0, 0, 0, 0, 0, 1]) 26 | with pytest.raises(ValueError, match="All classes must have at least 2 values"): 27 | stc._fit_predict_proba(X, y) 28 | -------------------------------------------------------------------------------- /aeon/classification/sklearn/__init__.py: -------------------------------------------------------------------------------- 1 | """Vector sklearn classifiers.""" 2 | 3 | __all__ = [ 4 | "RotationForestClassifier", 5 | "ContinuousIntervalTree", 6 | "SklearnClassifierWrapper", 7 | ] 8 | 9 | from aeon.classification.sklearn._continuous_interval_tree import ContinuousIntervalTree 10 | from aeon.classification.sklearn._rotation_forest_classifier import ( 11 | RotationForestClassifier, 12 | ) 13 | from aeon.classification.sklearn._wrapper import SklearnClassifierWrapper 14 | -------------------------------------------------------------------------------- /aeon/classification/sklearn/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """sklearn classifier test code.""" 2 | -------------------------------------------------------------------------------- /aeon/classification/sklearn/tests/test_all_classifiers.py: -------------------------------------------------------------------------------- 1 | """Unit tests for sklearn classifiers.""" 2 | 3 | __maintainer__ = [] 4 | 5 | from sklearn.utils.estimator_checks import parametrize_with_checks 6 | 7 | from aeon.classification.sklearn import ContinuousIntervalTree, RotationForestClassifier 8 | 9 | 10 | @parametrize_with_checks( 11 | [RotationForestClassifier(n_estimators=3), ContinuousIntervalTree()] 12 | ) 13 | def test_sklearn_compatible_estimator(estimator, check): 14 | """Test that sklearn estimators adhere to sklearn conventions.""" 15 | try: 16 | check(estimator) 17 | except AssertionError as error: 18 | # ContinuousIntervalTree can handle NaN values 19 | if not isinstance( 20 | estimator, ContinuousIntervalTree 21 | ) or "check for NaN and inf" not in str(error): 22 | raise error 23 | -------------------------------------------------------------------------------- /aeon/classification/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Classifier Tests.""" 2 | -------------------------------------------------------------------------------- /aeon/classification/tests/test_dummy.py: -------------------------------------------------------------------------------- 1 | """Test function of DummyClassifier.""" 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from aeon.classification import DummyClassifier 7 | 8 | 9 | @pytest.mark.parametrize( 10 | "strategy", ["most_frequent", "prior", "stratified", "uniform", "constant"] 11 | ) 12 | def test_dummy_classifier_strategies(strategy): 13 | """Test DummyClassifier strategies.""" 14 | X = np.ones(shape=(10, 10)) 15 | y_train = np.random.choice([0, 1], size=10) 16 | 17 | dummy = DummyClassifier(strategy=strategy, constant=1) 18 | dummy.fit(X, y_train) 19 | 20 | pred = dummy.predict(X) 21 | assert isinstance(pred, np.ndarray) 22 | assert all(i in [0, 1] for i in pred) 23 | 24 | 25 | def test_dummy_classifier_default(): 26 | """Test DummyClassifier predicts majority class and prior distribution.""" 27 | X = np.ones(shape=(10, 10)) 28 | y_train = np.array([1, 1, 1, 1, 1, 1, 1, 1, 0, 0]) 29 | y_expected = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1]) 30 | 31 | dummy = DummyClassifier() 32 | dummy.fit(X, y_train) 33 | 34 | pred = dummy.predict(X) 35 | np.testing.assert_array_equal(y_expected, pred) 36 | 37 | pred_proba = dummy.predict_proba(X) 38 | assert all(np.array_equal([0.2, 0.8], i) for i in pred_proba) 39 | -------------------------------------------------------------------------------- /aeon/clustering/__init__.py: -------------------------------------------------------------------------------- 1 | """Time series clustering module.""" 2 | 3 | __all__ = [ 4 | "BaseClusterer", 5 | "TimeSeriesKMedoids", 6 | "TimeSeriesCLARA", 7 | "TimeSeriesCLARANS", 8 | "TimeSeriesKMeans", 9 | "TimeSeriesKShape", 10 | "TimeSeriesKernelKMeans", 11 | "KASBA", 12 | "ElasticSOM", 13 | "KSpectralCentroid", 14 | "DummyClusterer", 15 | ] 16 | 17 | from aeon.clustering._clara import TimeSeriesCLARA 18 | from aeon.clustering._clarans import TimeSeriesCLARANS 19 | from aeon.clustering._elastic_som import ElasticSOM 20 | from aeon.clustering._k_means import TimeSeriesKMeans 21 | from aeon.clustering._k_medoids import TimeSeriesKMedoids 22 | from aeon.clustering._k_sc import KSpectralCentroid 23 | from aeon.clustering._k_shape import TimeSeriesKShape 24 | from aeon.clustering._kasba import KASBA 25 | from aeon.clustering._kernel_k_means import TimeSeriesKernelKMeans 26 | from aeon.clustering.base import BaseClusterer 27 | from aeon.clustering.dummy import DummyClusterer 28 | -------------------------------------------------------------------------------- /aeon/clustering/averaging/__init__.py: -------------------------------------------------------------------------------- 1 | """Time series averaging metrics.""" 2 | 3 | __all__ = [ 4 | "elastic_barycenter_average", 5 | "mean_average", 6 | "petitjean_barycenter_average", 7 | "subgradient_barycenter_average", 8 | "VALID_BA_METRICS", 9 | "shift_invariant_average", 10 | "kasba_average", 11 | ] 12 | 13 | from aeon.clustering.averaging._averaging import mean_average 14 | from aeon.clustering.averaging._ba_petitjean import petitjean_barycenter_average 15 | from aeon.clustering.averaging._ba_subgradient import subgradient_barycenter_average 16 | from aeon.clustering.averaging._ba_utils import VALID_BA_METRICS 17 | from aeon.clustering.averaging._barycenter_averaging import elastic_barycenter_average 18 | from aeon.clustering.averaging._kasba_average import kasba_average 19 | from aeon.clustering.averaging._shift_scale_invariant_averaging import ( 20 | shift_invariant_average, 21 | ) 22 | -------------------------------------------------------------------------------- /aeon/clustering/averaging/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for clustering metrics.""" 2 | -------------------------------------------------------------------------------- /aeon/clustering/compose/__init__.py: -------------------------------------------------------------------------------- 1 | """Compositions for clusterers.""" 2 | 3 | __all__ = [ 4 | "ClustererPipeline", 5 | ] 6 | 7 | from aeon.clustering.compose._pipeline import ClustererPipeline 8 | -------------------------------------------------------------------------------- /aeon/clustering/compose/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for regression composable estimator.""" 2 | -------------------------------------------------------------------------------- /aeon/clustering/deep_learning/__init__.py: -------------------------------------------------------------------------------- 1 | """Deep learning based clusterers.""" 2 | 3 | __all__ = [ 4 | "BaseDeepClusterer", 5 | "AEFCNClusterer", 6 | "AEResNetClusterer", 7 | "AEDCNNClusterer", 8 | "AEDRNNClusterer", 9 | "AEAttentionBiGRUClusterer", 10 | "AEBiGRUClusterer", 11 | ] 12 | from aeon.clustering.deep_learning._ae_abgru import AEAttentionBiGRUClusterer 13 | from aeon.clustering.deep_learning._ae_bgru import AEBiGRUClusterer 14 | from aeon.clustering.deep_learning._ae_dcnn import AEDCNNClusterer 15 | from aeon.clustering.deep_learning._ae_drnn import AEDRNNClusterer 16 | from aeon.clustering.deep_learning._ae_fcn import AEFCNClusterer 17 | from aeon.clustering.deep_learning._ae_resnet import AEResNetClusterer 18 | from aeon.clustering.deep_learning.base import BaseDeepClusterer 19 | -------------------------------------------------------------------------------- /aeon/clustering/deep_learning/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Deep learning clustering tests.""" 2 | -------------------------------------------------------------------------------- /aeon/clustering/deep_learning/tests/test_deep_clusterer_features.py: -------------------------------------------------------------------------------- 1 | """Tests whether various clusterer params work well.""" 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from aeon.clustering.deep_learning import AEFCNClusterer, AEResNetClusterer 7 | from aeon.utils.validation._dependencies import _check_soft_dependencies 8 | 9 | 10 | @pytest.mark.skipif( 11 | not _check_soft_dependencies(["tensorflow"], severity="none"), 12 | reason="Tensorflow soft dependency not found.", 13 | ) 14 | def test_multi_rec_fcn(): 15 | """Tests whether multi-rec loss works fine or not.""" 16 | X = np.random.random((100, 5, 2)) 17 | clst = AEFCNClusterer(**AEFCNClusterer._get_test_params()[0], loss="multi_rec") 18 | clst.fit(X) 19 | assert isinstance(clst.history["loss"][-1], float) 20 | 21 | clst = AEResNetClusterer( 22 | **AEResNetClusterer._get_test_params()[0], loss="multi_rec" 23 | ) 24 | clst.fit(X) 25 | assert isinstance(clst.history["loss"][-1], float) 26 | -------------------------------------------------------------------------------- /aeon/clustering/feature_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Feature based time series clusterers. 2 | 3 | While a bit vague, the contents mostly consist of transformers that extract features 4 | pipelined to a vector clusterer. 5 | """ 6 | 7 | __all__ = [ 8 | "Catch22Clusterer", 9 | "SummaryClusterer", 10 | "TSFreshClusterer", 11 | ] 12 | 13 | from aeon.clustering.feature_based._catch22 import Catch22Clusterer 14 | from aeon.clustering.feature_based._summary import SummaryClusterer 15 | from aeon.clustering.feature_based._tsfresh import TSFreshClusterer 16 | -------------------------------------------------------------------------------- /aeon/clustering/feature_based/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Feature Based learning clustering tests.""" 2 | -------------------------------------------------------------------------------- /aeon/clustering/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for clustering.""" 2 | -------------------------------------------------------------------------------- /aeon/clustering/tests/test_dummy.py: -------------------------------------------------------------------------------- 1 | """Tests for DummyClusterer.""" 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from aeon.clustering import DummyClusterer 7 | 8 | 9 | @pytest.mark.parametrize("strategy", ["random", "uniform", "single_cluster"]) 10 | def test_dummy_clusterer(strategy): 11 | """Test dummy clusterer basic functionalities.""" 12 | model = DummyClusterer(strategy=strategy, n_clusters=3) 13 | data = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]]) 14 | model.fit(data) 15 | preds = model.predict(data) 16 | 17 | assert len(preds) == 3 18 | assert np.all(np.array([(pred < 3) for pred in preds])) 19 | assert np.all(np.array([(pred >= 0) for pred in preds])) 20 | -------------------------------------------------------------------------------- /aeon/clustering/tests/test_kasba.py: -------------------------------------------------------------------------------- 1 | """Test KASBA.""" 2 | 3 | import numpy as np 4 | 5 | from aeon.benchmarking.metrics.clustering import clustering_accuracy_score 6 | from aeon.clustering import KASBA 7 | from aeon.testing.data_generation import make_example_3d_numpy 8 | 9 | 10 | def test_univariate_kasba(): 11 | """Test KASBA on univariate data.""" 12 | X, y = make_example_3d_numpy(20, 1, 10, random_state=1, return_y=True) 13 | 14 | kasba = KASBA(n_clusters=len(np.unique(y)), random_state=1) 15 | 16 | kasba.fit(X) 17 | score = clustering_accuracy_score(y, kasba.labels_) 18 | assert score == 0.95 19 | 20 | 21 | def test_multivariate_kasba(): 22 | """Test KASBA on multivariate data.""" 23 | X, y = make_example_3d_numpy(20, 3, 10, random_state=1, return_y=True) 24 | 25 | kasba = KASBA(n_clusters=len(np.unique(y)), random_state=1) 26 | 27 | kasba.fit(X) 28 | score = clustering_accuracy_score(y, kasba.labels_) 29 | assert score == 0.55 30 | -------------------------------------------------------------------------------- /aeon/datasets/data/Longley/Longley.csv: -------------------------------------------------------------------------------- 1 | "Obs","TOTEMP","GNPDEFL","GNP","UNEMP","ARMED","POP","YEAR" 2 | 1,60323,83,234289,2356,1590,107608,1947 3 | 2,61122,88.5,259426,2325,1456,108632,1948 4 | 3,60171,88.2,258054,3682,1616,109773,1949 5 | 4,61187,89.5,284599,3351,1650,110929,1950 6 | 5,63221,96.2,328975,2099,3099,112075,1951 7 | 6,63639,98.1,346999,1932,3594,113270,1952 8 | 7,64989,99,365385,1870,3547,115094,1953 9 | 8,63761,100,363112,3578,3350,116219,1954 10 | 9,66019,101.2,397469,2904,3048,117388,1955 11 | 10,67857,104.6,419180,2822,2857,118734,1956 12 | 11,68169,108.4,442769,2936,2798,120445,1957 13 | 12,66513,110.8,444546,4681,2637,121950,1958 14 | 13,68655,112.6,482704,3813,2552,123366,1959 15 | 14,69564,114.2,502601,3931,2514,125368,1960 16 | 15,69331,115.7,518173,4806,2572,127852,1961 17 | 16,70551,116.9,554894,4007,2827,130081,1962 18 | -------------------------------------------------------------------------------- /aeon/datasets/data/ShampooSales/ShampooSales.csv: -------------------------------------------------------------------------------- 1 | Time,Sales 2 | 1991-01,266.0 3 | 1991-02,145.9 4 | 1991-03,183.1 5 | 1991-04,119.3 6 | 1991-05,180.3 7 | 1991-06,168.5 8 | 1991-07,231.8 9 | 1991-08,224.5 10 | 1991-09,192.8 11 | 1991-10,122.9 12 | 1991-11,336.5 13 | 1991-12,185.9 14 | 1992-01,194.3 15 | 1992-02,149.5 16 | 1992-03,210.1 17 | 1992-04,273.3 18 | 1992-05,191.4 19 | 1992-06,287.0 20 | 1992-07,226.0 21 | 1992-08,303.6 22 | 1992-09,289.9 23 | 1992-10,421.6 24 | 1992-11,264.5 25 | 1992-12,342.3 26 | 1993-01,339.7 27 | 1993-02,440.4 28 | 1993-03,315.9 29 | 1993-04,439.3 30 | 1993-05,401.3 31 | 1993-06,437.4 32 | 1993-07,575.5 33 | 1993-08,407.6 34 | 1993-09,682.0 35 | 1993-10,475.3 36 | 1993-11,581.3 37 | 1993-12,646.9 38 | -------------------------------------------------------------------------------- /aeon/datasets/data/UnitTest/UnitTestTimeStamps_TRAIN.ts: -------------------------------------------------------------------------------- 1 | %# This is a cut down version of the problem ChinaTown, useful for code examples and very slow unit tests 2 | % The Train set is the same as ChinaTown, but the test set is reduced from 340 cases to 22 cases 3 | % 4 | @problemName UnitTestTimeStamps 5 | @timeStamps True 6 | @missing false 7 | @univariate true 8 | @equalLength true 9 | @seriesLength 4 10 | @classLabel true 1 2 11 | @data 12 | (2007-01-01 00:00:00,241.97),(2007-01-01 00:01:00,241.75),(2007-01-01 00:02:00,241.64),(2007-01-01 00:03:00,241.71):1 13 | (2007-01-01 01:36:00,239.91),(2007-01-01 01:37:00,240.89),(2007-01-01 01:38:00,240.01),(2007-01-01 01:39:00,239.63):1 14 | (2008-09-09 20:56:00,4.0),(2008-09-09 20:57:00,0.0),(2008-09-09 20:58:00,1.0),(2008-09-09 20:59:00,11.0):2 15 | (2008-09-09 18:50:00,1.0),(2008-09-09 18:51:00,0.0),(2008-09-09 18:52:00,1.0),(2008-09-09 18:53:00,1.0):2 16 | -------------------------------------------------------------------------------- /aeon/datasets/data/UnitTest/UnitTest_Tsf_Loader.tsf: -------------------------------------------------------------------------------- 1 | # Dataset Information 2 | # Meta-data for unit testing 3 | # 4 | @relation test 5 | @attribute series_name string 6 | @attribute start_timestamp date 7 | @frequency yearly 8 | @horizon 4 9 | @missing false 10 | @equallength false 11 | @data 12 | T1:1979-01-01 00-00-00:25092.2284,24271.5134,25828.9883,27697.5047,27956.2276,29924.4321,30216.8321 13 | T2:1979-01-01 00-00-00:887896.51,887068.98,971549.04 14 | T3:1973-01-01 00-00-00:227921,230995,183635,238605,254186 15 | -------------------------------------------------------------------------------- /aeon/datasets/data/UnitTest/UnitTest_Tsf_Loader_hierarchical.tsf: -------------------------------------------------------------------------------- 1 | # Dataset Information 2 | # Meta-data for unit testing 3 | # 4 | @relation test 5 | @attribute series_group string 6 | @attribute series_name string 7 | @attribute start_timestamp date 8 | @frequency yearly 9 | @horizon 4 10 | @missing false 11 | @equallength false 12 | @data 13 | G1:T1:1979-01-01 00-00-00:25092.2284,24271.5134,25828.9883,27697.5047,27956.2276,29924.4321,30216.8321 14 | G1:T2:1979-01-01 00-00-00:887896.51,887068.98,971549.04 15 | G2:T3:1973-01-01 00-00-00:227921,230995,183635,238605,254186 16 | -------------------------------------------------------------------------------- /aeon/datasets/data/UnitTest/UnitTest_Tsf_Loader_no_start_timestamp.tsf: -------------------------------------------------------------------------------- 1 | # Dataset Information 2 | # Meta-data for unit testing 3 | # 4 | @relation test 5 | @attribute series_name string 6 | @frequency yearly 7 | @horizon 4 8 | @missing false 9 | @equallength false 10 | @data 11 | T1:25092.2284,24271.5134,25828.9883,27697.5047,27956.2276,29924.4321,30216.8321 12 | T2:887896.51,887068.98,971549.04 13 | T3:227921,230995,183635,238605,254186 14 | -------------------------------------------------------------------------------- /aeon/datasets/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests.""" 2 | -------------------------------------------------------------------------------- /aeon/datasets/tests/test_tsad_datasets.py: -------------------------------------------------------------------------------- 1 | """Test functions in tsad_datasets.py.""" 2 | 3 | import tempfile 4 | from pathlib import Path 5 | 6 | import pytest 7 | 8 | from aeon.datasets.tsad_datasets import ( 9 | multivariate, 10 | supervised, 11 | tsad_collections, 12 | tsad_datasets, 13 | univariate, 14 | unsupervised, 15 | ) 16 | from aeon.testing.testing_config import PR_TESTING 17 | 18 | 19 | @pytest.mark.skipif( 20 | PR_TESTING, 21 | reason="Only run on overnights because of read from internet.", 22 | ) 23 | def test_helper_functions(mocker): 24 | """Test helper functions.""" 25 | with tempfile.TemporaryDirectory() as tmp: 26 | tmp = Path(tmp) 27 | mocker.patch("aeon.datasets.tsad_datasets._DATA_FOLDER", tmp) 28 | d = tsad_collections() 29 | assert isinstance(d, dict) 30 | d = tsad_datasets() 31 | assert isinstance(d, list) 32 | d = univariate() 33 | assert isinstance(d, list) 34 | d = multivariate() 35 | assert isinstance(d, list) 36 | d = unsupervised() 37 | assert isinstance(d, list) 38 | d = supervised() 39 | assert isinstance(d, list) 40 | -------------------------------------------------------------------------------- /aeon/distances/elastic/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Elastic distance tests.""" 2 | -------------------------------------------------------------------------------- /aeon/distances/mindist/__init__.py: -------------------------------------------------------------------------------- 1 | """Mindist module.""" 2 | 3 | __all__ = [ 4 | "mindist_dft_sfa_distance", 5 | "mindist_dft_sfa_pairwise_distance", 6 | "mindist_paa_sax_distance", 7 | "mindist_paa_sax_pairwise_distance", 8 | "mindist_sax_distance", 9 | "mindist_sax_pairwise_distance", 10 | "mindist_sfa_distance", 11 | "mindist_sfa_pairwise_distance", 12 | ] 13 | from aeon.distances.mindist._dft_sfa import ( 14 | mindist_dft_sfa_distance, 15 | mindist_dft_sfa_pairwise_distance, 16 | ) 17 | from aeon.distances.mindist._paa_sax import ( 18 | mindist_paa_sax_distance, 19 | mindist_paa_sax_pairwise_distance, 20 | ) 21 | from aeon.distances.mindist._sax import ( 22 | mindist_sax_distance, 23 | mindist_sax_pairwise_distance, 24 | ) 25 | from aeon.distances.mindist._sfa import ( 26 | mindist_sfa_distance, 27 | mindist_sfa_pairwise_distance, 28 | ) 29 | -------------------------------------------------------------------------------- /aeon/distances/pointwise/__init__.py: -------------------------------------------------------------------------------- 1 | """Pointwise distances.""" 2 | 3 | __all__ = [ 4 | "euclidean_distance", 5 | "euclidean_pairwise_distance", 6 | "manhattan_distance", 7 | "manhattan_pairwise_distance", 8 | "minkowski_distance", 9 | "minkowski_pairwise_distance", 10 | "squared_distance", 11 | "squared_pairwise_distance", 12 | ] 13 | 14 | from aeon.distances.pointwise._euclidean import ( 15 | euclidean_distance, 16 | euclidean_pairwise_distance, 17 | ) 18 | from aeon.distances.pointwise._manhattan import ( 19 | manhattan_distance, 20 | manhattan_pairwise_distance, 21 | ) 22 | from aeon.distances.pointwise._minkowski import ( 23 | minkowski_distance, 24 | minkowski_pairwise_distance, 25 | ) 26 | from aeon.distances.pointwise._squared import ( 27 | squared_distance, 28 | squared_pairwise_distance, 29 | ) 30 | -------------------------------------------------------------------------------- /aeon/distances/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for distance module.""" 2 | -------------------------------------------------------------------------------- /aeon/forecasting/__init__.py: -------------------------------------------------------------------------------- 1 | """Forecasters.""" 2 | 3 | __all__ = [ 4 | "NaiveForecaster", 5 | "BaseForecaster", 6 | "RegressionForecaster", 7 | "ETSForecaster", 8 | "TVPForecaster", 9 | ] 10 | 11 | from aeon.forecasting._ets import ETSForecaster 12 | from aeon.forecasting._naive import NaiveForecaster 13 | from aeon.forecasting._regression import RegressionForecaster 14 | from aeon.forecasting._tvp import TVPForecaster 15 | from aeon.forecasting.base import BaseForecaster 16 | -------------------------------------------------------------------------------- /aeon/forecasting/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Forecaster tests.""" 2 | -------------------------------------------------------------------------------- /aeon/networks/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Deep learning networks tests.""" 2 | -------------------------------------------------------------------------------- /aeon/networks/tests/test_disjoint_cnn.py: -------------------------------------------------------------------------------- 1 | """Tests for the DisjointCNN Network.""" 2 | 3 | import pytest 4 | 5 | from aeon.networks import DisjointCNNNetwork 6 | from aeon.utils.validation._dependencies import _check_soft_dependencies 7 | 8 | 9 | @pytest.mark.skipif( 10 | not _check_soft_dependencies(["tensorflow"], severity="none"), 11 | reason="Tensorflow soft dependency unavailable.", 12 | ) 13 | def test_disjoint_cnn_netowkr_kernel_initializer(): 14 | """Test DisjointCNN for different kernel_initializer per layer.""" 15 | input_layer, output_layer = DisjointCNNNetwork( 16 | n_layers=2, 17 | kernel_initializer=["he_uniform", "glorot_uniform"], 18 | kernel_size=[2, 2], 19 | ).build_network(input_shape=((10, 2))) 20 | 21 | assert len(output_layer.shape) == 2 22 | assert len(input_layer.shape) == 3 23 | -------------------------------------------------------------------------------- /aeon/pipeline/__init__.py: -------------------------------------------------------------------------------- 1 | """Pipeline maker utility.""" 2 | 3 | __all__ = [ 4 | "make_pipeline", 5 | "sklearn_to_aeon", 6 | ] 7 | 8 | from aeon.pipeline._make_pipeline import make_pipeline 9 | from aeon.pipeline._sklearn_to_aeon import sklearn_to_aeon 10 | -------------------------------------------------------------------------------- /aeon/pipeline/_sklearn_to_aeon.py: -------------------------------------------------------------------------------- 1 | """Sklearn to aeon coercion utility.""" 2 | 3 | __maintainer__ = ["MatthewMiddlehurst"] 4 | __all__ = ["sklearn_to_aeon"] 5 | 6 | from aeon.pipeline._make_pipeline import make_pipeline 7 | from aeon.transformations.collection import Tabularizer 8 | 9 | 10 | def sklearn_to_aeon(estimator): 11 | """Coerces an sklearn estimator to the aeon pipeline interface. 12 | 13 | Creates a pipeline of two elements, the Tabularizer transformer and the estimator. 14 | The Tabularizer transformer acts as adapter and holds aeon base class logic, as 15 | well as converting aeon datatypes to a feature vector format. Multivariate series 16 | will be concatenated into a single feature vector. Data must be of equal length. 17 | 18 | Parameters 19 | ---------- 20 | estimator : sklearn compatible estimator 21 | Can be a classifier, regressor, clusterer, or transformer. 22 | 23 | Returns 24 | ------- 25 | pipe : aeon pipeline estimator 26 | A pipeline of the Tabularizer transformer and input estimator. 27 | """ 28 | return make_pipeline(Tabularizer(), estimator) 29 | -------------------------------------------------------------------------------- /aeon/pipeline/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for pipeline utilities.""" 2 | -------------------------------------------------------------------------------- /aeon/pipeline/tests/test_sklearn_to_aeon.py: -------------------------------------------------------------------------------- 1 | """Tests for the sklearn_to_aeon function.""" 2 | 3 | import pytest 4 | from sklearn.cluster import KMeans 5 | from sklearn.ensemble import RandomForestClassifier, RandomForestRegressor 6 | from sklearn.preprocessing import StandardScaler 7 | 8 | from aeon.base import BaseAeonEstimator 9 | from aeon.pipeline import sklearn_to_aeon 10 | from aeon.testing.data_generation import make_example_3d_numpy 11 | 12 | 13 | @pytest.mark.parametrize( 14 | "estimator", 15 | [ 16 | RandomForestClassifier(n_estimators=5), 17 | RandomForestRegressor(n_estimators=5), 18 | KMeans(n_clusters=2, max_iter=10), 19 | StandardScaler(), 20 | ], 21 | ) 22 | def test_sklearn_to_aeon(estimator): 23 | """Test that sklearn_to_aeon works for different types of sklearn estimators.""" 24 | X, y = make_example_3d_numpy() 25 | 26 | est = sklearn_to_aeon(estimator) 27 | 28 | assert isinstance(est, BaseAeonEstimator) 29 | 30 | est.fit(X, y) 31 | 32 | if hasattr(est, "predict"): 33 | est.predict(X) 34 | else: 35 | est.transform(X) 36 | -------------------------------------------------------------------------------- /aeon/regression/__init__.py: -------------------------------------------------------------------------------- 1 | """Regression Base.""" 2 | 3 | __all__ = [ 4 | "BaseRegressor", 5 | "DummyRegressor", 6 | ] 7 | 8 | from aeon.regression._dummy import DummyRegressor 9 | from aeon.regression.base import BaseRegressor 10 | -------------------------------------------------------------------------------- /aeon/regression/compose/__init__.py: -------------------------------------------------------------------------------- 1 | """Implement composite time series regression estimators.""" 2 | 3 | __all__ = [ 4 | "RegressorEnsemble", 5 | "RegressorPipeline", 6 | ] 7 | 8 | from aeon.regression.compose._ensemble import RegressorEnsemble 9 | from aeon.regression.compose._pipeline import RegressorPipeline 10 | -------------------------------------------------------------------------------- /aeon/regression/compose/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for regression composable estimator.""" 2 | -------------------------------------------------------------------------------- /aeon/regression/convolution_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Convolution-based time series extrinsic regressors.""" 2 | 3 | __all__ = [ 4 | "RocketRegressor", 5 | "MiniRocketRegressor", 6 | "MultiRocketRegressor", 7 | "HydraRegressor", 8 | "MultiRocketHydraRegressor", 9 | ] 10 | 11 | from aeon.regression.convolution_based._hydra import HydraRegressor 12 | from aeon.regression.convolution_based._minirocket import MiniRocketRegressor 13 | from aeon.regression.convolution_based._mr_hydra import MultiRocketHydraRegressor 14 | from aeon.regression.convolution_based._multirocket import MultiRocketRegressor 15 | from aeon.regression.convolution_based._rocket import RocketRegressor 16 | -------------------------------------------------------------------------------- /aeon/regression/deep_learning/__init__.py: -------------------------------------------------------------------------------- 1 | """Deep learning based regressors.""" 2 | 3 | __all__ = [ 4 | "BaseDeepRegressor", 5 | "TimeCNNRegressor", 6 | "FCNRegressor", 7 | "InceptionTimeRegressor", 8 | "IndividualInceptionRegressor", 9 | "ResNetRegressor", 10 | "IndividualLITERegressor", 11 | "LITETimeRegressor", 12 | "EncoderRegressor", 13 | "MLPRegressor", 14 | "DisjointCNNRegressor", 15 | "RecurrentRegressor", 16 | ] 17 | 18 | from aeon.regression.deep_learning._cnn import TimeCNNRegressor 19 | from aeon.regression.deep_learning._disjoint_cnn import DisjointCNNRegressor 20 | from aeon.regression.deep_learning._encoder import EncoderRegressor 21 | from aeon.regression.deep_learning._fcn import FCNRegressor 22 | from aeon.regression.deep_learning._inception_time import ( 23 | InceptionTimeRegressor, 24 | IndividualInceptionRegressor, 25 | ) 26 | from aeon.regression.deep_learning._lite_time import ( 27 | IndividualLITERegressor, 28 | LITETimeRegressor, 29 | ) 30 | from aeon.regression.deep_learning._mlp import MLPRegressor 31 | from aeon.regression.deep_learning._resnet import ResNetRegressor 32 | from aeon.regression.deep_learning._rnn import RecurrentRegressor 33 | from aeon.regression.deep_learning.base import BaseDeepRegressor 34 | -------------------------------------------------------------------------------- /aeon/regression/deep_learning/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for series deep learning regressors.""" 2 | -------------------------------------------------------------------------------- /aeon/regression/distance_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Distance based time series regressors.""" 2 | 3 | __all__ = ["KNeighborsTimeSeriesRegressor"] 4 | 5 | from aeon.regression.distance_based._time_series_neighbors import ( 6 | KNeighborsTimeSeriesRegressor, 7 | ) 8 | -------------------------------------------------------------------------------- /aeon/regression/distance_based/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Distance based test code.""" 2 | -------------------------------------------------------------------------------- /aeon/regression/feature_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Feature based time series regressors. 2 | 3 | While a bit vague, the contents mostly consist of transformers that extract features 4 | pipelined to a vector regressor. 5 | """ 6 | 7 | __all__ = [ 8 | "Catch22Regressor", 9 | "FreshPRINCERegressor", 10 | "SummaryRegressor", 11 | "TSFreshRegressor", 12 | ] 13 | 14 | from aeon.regression.feature_based._catch22 import Catch22Regressor 15 | from aeon.regression.feature_based._fresh_prince import FreshPRINCERegressor 16 | from aeon.regression.feature_based._summary import SummaryRegressor 17 | from aeon.regression.feature_based._tsfresh import TSFreshRegressor 18 | -------------------------------------------------------------------------------- /aeon/regression/hybrid/__init__.py: -------------------------------------------------------------------------------- 1 | """Hybrid time series regressors.""" 2 | 3 | __all__ = [ 4 | "RISTRegressor", 5 | ] 6 | 7 | from aeon.regression.hybrid._rist import RISTRegressor 8 | -------------------------------------------------------------------------------- /aeon/regression/interval_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Implement interval-based time series regression estimators.""" 2 | 3 | __all__ = [ 4 | "CanonicalIntervalForestRegressor", 5 | "DrCIFRegressor", 6 | "IntervalForestRegressor", 7 | "RandomIntervalRegressor", 8 | "RandomIntervalSpectralEnsembleRegressor", 9 | "TimeSeriesForestRegressor", 10 | "QUANTRegressor", 11 | ] 12 | 13 | from aeon.regression.interval_based._cif import CanonicalIntervalForestRegressor 14 | from aeon.regression.interval_based._drcif import DrCIFRegressor 15 | from aeon.regression.interval_based._interval_forest import IntervalForestRegressor 16 | from aeon.regression.interval_based._interval_pipelines import RandomIntervalRegressor 17 | from aeon.regression.interval_based._quant import QUANTRegressor 18 | from aeon.regression.interval_based._rise import RandomIntervalSpectralEnsembleRegressor 19 | from aeon.regression.interval_based._tsf import TimeSeriesForestRegressor 20 | -------------------------------------------------------------------------------- /aeon/regression/interval_based/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for interval-based regressors.""" 2 | -------------------------------------------------------------------------------- /aeon/regression/interval_based/tests/test_interval_pipelines.py: -------------------------------------------------------------------------------- 1 | """Test interval pipelines.""" 2 | 3 | import pytest 4 | from sklearn.svm import SVR 5 | 6 | from aeon.regression.interval_based import RandomIntervalRegressor 7 | from aeon.testing.testing_data import EQUAL_LENGTH_UNIVARIATE_REGRESSION 8 | from aeon.testing.utils.estimator_checks import _assert_predict_labels 9 | 10 | 11 | @pytest.mark.parametrize("cls", [RandomIntervalRegressor]) 12 | def test_interval_pipeline_classifiers(cls): 13 | """Test the random interval regressors.""" 14 | X_train, y_train = EQUAL_LENGTH_UNIVARIATE_REGRESSION["numpy3D"]["train"] 15 | X_test, y_test = EQUAL_LENGTH_UNIVARIATE_REGRESSION["numpy3D"]["test"] 16 | 17 | params = cls._get_test_params() 18 | if isinstance(params, list): 19 | params = params[0] 20 | params.update({"estimator": SVR()}) 21 | 22 | reg = cls(**params) 23 | reg.fit(X_train, y_train) 24 | prob = reg.predict(X_test) 25 | _assert_predict_labels(prob, X_test) 26 | -------------------------------------------------------------------------------- /aeon/regression/shapelet_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Shapelet based time series regressors.""" 2 | 3 | __all__ = [ 4 | "RDSTRegressor", 5 | ] 6 | 7 | from aeon.regression.shapelet_based._rdst import RDSTRegressor 8 | -------------------------------------------------------------------------------- /aeon/regression/sklearn/__init__.py: -------------------------------------------------------------------------------- 1 | """Vector sklearn classifiers.""" 2 | 3 | __all__ = [ 4 | "RotationForestRegressor", 5 | "SklearnRegressorWrapper", 6 | ] 7 | 8 | from aeon.regression.sklearn._rotation_forest_regressor import RotationForestRegressor 9 | from aeon.regression.sklearn._wrapper import SklearnRegressorWrapper 10 | -------------------------------------------------------------------------------- /aeon/regression/sklearn/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """sklearn regressor test code.""" 2 | -------------------------------------------------------------------------------- /aeon/regression/sklearn/tests/test_all_regressors.py: -------------------------------------------------------------------------------- 1 | """Unit tests for sklearn regressors.""" 2 | 3 | __maintainer__ = ["MatthewMiddlehurst"] 4 | 5 | from sklearn.utils.estimator_checks import parametrize_with_checks 6 | 7 | from aeon.regression.sklearn import RotationForestRegressor 8 | 9 | 10 | @parametrize_with_checks([RotationForestRegressor(n_estimators=3)]) 11 | def test_sklearn_compatible_estimator(estimator, check): 12 | """Test that sklearn estimators adhere to sklearn conventions.""" 13 | check(estimator) 14 | -------------------------------------------------------------------------------- /aeon/regression/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """tests for all time series regressors.""" 2 | -------------------------------------------------------------------------------- /aeon/regression/tests/test_dummy.py: -------------------------------------------------------------------------------- 1 | """Test function of DummyRegressor.""" 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from aeon.regression import DummyRegressor 7 | 8 | 9 | @pytest.mark.parametrize("strategy", ["mean", "median", "quantile", "constant"]) 10 | def test_dummy_regressor_strategies(strategy): 11 | """Test DummyRegressor strategies.""" 12 | X = np.ones(shape=(10, 10)) 13 | y_train = np.random.rand(10) 14 | 15 | dummy = DummyRegressor(strategy=strategy, constant=0.5, quantile=0.5) 16 | dummy.fit(X, y_train) 17 | 18 | pred = dummy.predict(X) 19 | assert isinstance(pred, np.ndarray) 20 | assert all(0 <= i <= 1 for i in pred) 21 | 22 | 23 | def test_dummy_regressor_default(): 24 | """Test function for DummyRegressor.""" 25 | X = np.ones(shape=(10, 10)) 26 | y_train = np.array([1.5, 2, 1, 4, 5, 1, 1, 1.5, 0, 0.5]) 27 | 28 | dummy = DummyRegressor() 29 | dummy.fit(X, y_train) 30 | 31 | pred = dummy.predict(X) 32 | assert np.all(np.isclose(pred, 1.75)) 33 | -------------------------------------------------------------------------------- /aeon/segmentation/__init__.py: -------------------------------------------------------------------------------- 1 | """Time Series Segmentation.""" 2 | 3 | __all__ = [ 4 | "BaseSegmenter", 5 | "BinSegmenter", 6 | "FLUSSSegmenter", 7 | "ClaSPSegmenter", 8 | "find_dominant_window_sizes", 9 | "GreedyGaussianSegmenter", 10 | "InformationGainSegmenter", 11 | "entropy", 12 | "RandomSegmenter", 13 | "EAggloSegmenter", 14 | "HMMSegmenter", 15 | "HidalgoSegmenter", 16 | ] 17 | 18 | from aeon.segmentation._binseg import BinSegmenter 19 | from aeon.segmentation._clasp import ClaSPSegmenter, find_dominant_window_sizes 20 | from aeon.segmentation._eagglo import EAggloSegmenter 21 | from aeon.segmentation._fluss import FLUSSSegmenter 22 | from aeon.segmentation._ggs import GreedyGaussianSegmenter 23 | from aeon.segmentation._hidalgo import HidalgoSegmenter 24 | from aeon.segmentation._hmm import HMMSegmenter 25 | from aeon.segmentation._igts import InformationGainSegmenter, entropy 26 | from aeon.segmentation._random import RandomSegmenter 27 | from aeon.segmentation.base import BaseSegmenter 28 | -------------------------------------------------------------------------------- /aeon/segmentation/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for segmentation.""" 2 | -------------------------------------------------------------------------------- /aeon/segmentation/tests/test_binseg.py: -------------------------------------------------------------------------------- 1 | """Simple BinSeg test.""" 2 | 3 | __maintainer__ = [] 4 | __all__ = [] 5 | 6 | import pytest 7 | 8 | from aeon.datasets import load_gun_point_segmentation 9 | from aeon.segmentation import BinSegmenter 10 | from aeon.utils.validation._dependencies import _check_soft_dependencies 11 | 12 | 13 | @pytest.mark.skipif( 14 | not _check_soft_dependencies(["ruptures"], severity="none"), 15 | reason="skip test if required soft dependency not available", 16 | ) 17 | def test_binseg_sparse(): 18 | """Test BinSeg sparse segmentation. 19 | 20 | Check if the predicted change points match. 21 | """ 22 | # load the test dataset 23 | ts, _, cps = load_gun_point_segmentation() 24 | 25 | # compute a BinSeg segmentation 26 | binseg = BinSegmenter(n_cps=1) 27 | found_cps = binseg.fit_predict(ts) 28 | 29 | assert len(found_cps) == 1 and found_cps[0] == 1870 30 | -------------------------------------------------------------------------------- /aeon/segmentation/tests/test_clasp.py: -------------------------------------------------------------------------------- 1 | """Simple ClaSP test.""" 2 | 3 | __maintainer__ = [] 4 | __all__ = [] 5 | 6 | from aeon.datasets import load_gun_point_segmentation 7 | from aeon.segmentation import ClaSPSegmenter 8 | 9 | 10 | def test_clasp_sparse(): 11 | """Test ClaSP sparse segmentation. 12 | 13 | Check if the predicted change points match. 14 | """ 15 | # load the test dataset 16 | ts, period_size, cps = load_gun_point_segmentation() 17 | 18 | # compute a ClaSP segmentation 19 | clasp = ClaSPSegmenter(period_size, n_cps=1) 20 | clasp.fit(ts) 21 | found_cps = clasp.predict(ts) 22 | scores = clasp.predict_scores(ts) 23 | 24 | assert len(found_cps) == 1 and found_cps[0] == 893 25 | assert len(scores) == 1 and scores[0] > 0.74 26 | -------------------------------------------------------------------------------- /aeon/segmentation/tests/test_fluss.py: -------------------------------------------------------------------------------- 1 | """Simple FLUSS test.""" 2 | 3 | __maintainer__ = [] 4 | __all__ = [] 5 | 6 | import pytest 7 | 8 | from aeon.datasets import load_gun_point_segmentation 9 | from aeon.segmentation import FLUSSSegmenter 10 | from aeon.utils.validation._dependencies import _check_soft_dependencies 11 | 12 | 13 | @pytest.mark.skipif( 14 | not _check_soft_dependencies(["stumpy"], severity="none"), 15 | reason="skip test if required soft dependency not available", 16 | ) 17 | def test_fluss_sparse(): 18 | """Test FLUSS segmentation. 19 | 20 | Check if the predicted change points match. 21 | """ 22 | # load the test dataset 23 | ts, period_size, cps = load_gun_point_segmentation() 24 | 25 | # compute a FLUSS segmentation 26 | fluss = FLUSSSegmenter(period_size, n_regimes=2) 27 | found_cps = fluss.fit_predict(ts) 28 | scores = fluss.predict_scores(ts) 29 | 30 | assert len(found_cps) == 1 and found_cps[0] == 889 31 | assert len(scores) == 1 and 0.53 > scores[0] > 0.52 32 | -------------------------------------------------------------------------------- /aeon/segmentation/tests/test_ggs.py: -------------------------------------------------------------------------------- 1 | """Tests for _ggs module.""" 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from aeon.segmentation import GreedyGaussianSegmenter 7 | from aeon.segmentation._ggs import _GGS 8 | 9 | 10 | @pytest.fixture 11 | def univariate_mean_shift(): 12 | """Generate simple mean shift time series.""" 13 | x = np.concatenate(tuple(np.ones(5) * i**2 for i in range(4))) 14 | return x[:, np.newaxis] 15 | 16 | 17 | def test_GGS_find_change_points(univariate_mean_shift): 18 | """Test the _GGS core estimator.""" 19 | ggs = _GGS(k_max=10, lamb=1.0) 20 | pred = ggs.find_change_points(univariate_mean_shift) 21 | assert isinstance(pred, list) 22 | assert len(pred) == 5 23 | 24 | 25 | def test_GreedyGaussianSegmentation(univariate_mean_shift): 26 | """Test the GreedyGaussianSegmentation.""" 27 | ggs = GreedyGaussianSegmenter(k_max=5, lamb=0.5) 28 | assert ggs.get_params() == { 29 | "k_max": 5, 30 | "lamb": 0.5, 31 | "verbose": False, 32 | "max_shuffles": 250, 33 | "random_state": None, 34 | } 35 | -------------------------------------------------------------------------------- /aeon/segmentation/tests/test_hidalgo.py: -------------------------------------------------------------------------------- 1 | """Test Hidalgo segmenter.""" 2 | 3 | from aeon.segmentation._hidalgo import _binom, _partition_function 4 | 5 | 6 | def test_partition_function(): 7 | """Test Hidalgo segmenter partition function.""" 8 | p = _partition_function(10, 2, 0, 1) 9 | assert p == 8.0 10 | b = _binom(10, 2) 11 | assert b == 45.0 12 | -------------------------------------------------------------------------------- /aeon/segmentation/tests/test_random.py: -------------------------------------------------------------------------------- 1 | """Tests for random segmenter.""" 2 | 3 | import numpy as np 4 | import pandas as pd 5 | 6 | from aeon.segmentation import RandomSegmenter 7 | 8 | 9 | def test_random(): 10 | """Test random segmenter.""" 11 | data = np.random.random((5, 100)) # 5 series of length 100 12 | segmenter = RandomSegmenter() 13 | segmenter.fit(data) 14 | segs = segmenter.predict(data) 15 | assert len(segs) == 1 16 | segmenter = RandomSegmenter(random_state=49, n_segments=10) 17 | segmenter.fit(data) 18 | assert segmenter.n_segments == 10 19 | segs2 = segmenter.predict(data) 20 | np.array_equal(segs, segs2) 21 | assert len(segs) == 1 22 | segs = segmenter.fit_predict(data) 23 | assert len(segs) == 9 24 | 25 | df = pd.DataFrame(data) 26 | segmenter = RandomSegmenter(random_state=49, n_segments=10) 27 | segmenter.fit(df) 28 | segs2 = segmenter.predict(df) 29 | np.array_equal(segs, segs2) 30 | -------------------------------------------------------------------------------- /aeon/similarity_search/__init__.py: -------------------------------------------------------------------------------- 1 | """Similarity search module.""" 2 | 3 | __all__ = ["BaseSimilaritySearch"] 4 | 5 | from aeon.similarity_search._base import BaseSimilaritySearch 6 | -------------------------------------------------------------------------------- /aeon/similarity_search/collection/__init__.py: -------------------------------------------------------------------------------- 1 | """Similarity search for time series collection.""" 2 | 3 | __all__ = [ 4 | "BaseCollectionSimilaritySearch", 5 | "RandomProjectionIndexANN", 6 | ] 7 | 8 | from aeon.similarity_search.collection._base import BaseCollectionSimilaritySearch 9 | from aeon.similarity_search.collection.neighbors._rp_cosine_lsh import ( 10 | RandomProjectionIndexANN, 11 | ) 12 | -------------------------------------------------------------------------------- /aeon/similarity_search/collection/motifs/__init__.py: -------------------------------------------------------------------------------- 1 | """Motif discovery for time series collection.""" 2 | -------------------------------------------------------------------------------- /aeon/similarity_search/collection/neighbors/__init__.py: -------------------------------------------------------------------------------- 1 | """Neighbors search for time series collection.""" 2 | 3 | __all__ = ["RandomProjectionIndexANN"] 4 | 5 | from aeon.similarity_search.collection.neighbors._rp_cosine_lsh import ( 6 | RandomProjectionIndexANN, 7 | ) 8 | -------------------------------------------------------------------------------- /aeon/similarity_search/collection/neighbors/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for similarity search for time series collection neighbors module.""" 2 | -------------------------------------------------------------------------------- /aeon/similarity_search/collection/neighbors/tests/test_rp_cosine_lsh.py: -------------------------------------------------------------------------------- 1 | """Tests for RandomProjectionIndexANN.""" 2 | -------------------------------------------------------------------------------- /aeon/similarity_search/collection/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for similarity search for time series collection base class and commons.""" 2 | -------------------------------------------------------------------------------- /aeon/similarity_search/collection/tests/test_base.py: -------------------------------------------------------------------------------- 1 | """Test for collection similarity search base class.""" 2 | 3 | __maintainer__ = ["baraline"] 4 | 5 | from aeon.testing.mock_estimators._mock_similarity_searchers import ( 6 | MockCollectionSimilaritySearch, 7 | ) 8 | from aeon.testing.testing_data import FULL_TEST_DATA_DICT, _get_datatypes_for_estimator 9 | 10 | 11 | def test_input_shape_fit_predict_collection(): 12 | """Test input shapes.""" 13 | estimator = MockCollectionSimilaritySearch() 14 | datatypes = _get_datatypes_for_estimator(estimator) 15 | # dummy data to pass to fit when testing predict/predict_proba 16 | for datatype in datatypes: 17 | X_train, y_train = FULL_TEST_DATA_DICT[datatype]["train"] 18 | X_test, y_test = FULL_TEST_DATA_DICT[datatype]["test"] 19 | estimator.fit(X_train, y_train).predict(X_test) 20 | -------------------------------------------------------------------------------- /aeon/similarity_search/series/__init__.py: -------------------------------------------------------------------------------- 1 | """Similarity search for series.""" 2 | 3 | __all__ = [ 4 | "BaseSeriesSimilaritySearch", 5 | "MassSNN", 6 | "StompMotif", 7 | "DummySNN", 8 | ] 9 | 10 | from aeon.similarity_search.series._base import ( 11 | BaseSeriesSimilaritySearch, 12 | ) 13 | from aeon.similarity_search.series.motifs._stomp import StompMotif 14 | from aeon.similarity_search.series.neighbors._dummy import DummySNN 15 | from aeon.similarity_search.series.neighbors._mass import MassSNN 16 | -------------------------------------------------------------------------------- /aeon/similarity_search/series/motifs/__init__.py: -------------------------------------------------------------------------------- 1 | """Motif discovery for single series.""" 2 | 3 | __all__ = [ 4 | "StompMotif", 5 | ] 6 | 7 | from aeon.similarity_search.series.motifs._stomp import StompMotif 8 | -------------------------------------------------------------------------------- /aeon/similarity_search/series/motifs/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for series motif search methods.""" 2 | -------------------------------------------------------------------------------- /aeon/similarity_search/series/neighbors/__init__.py: -------------------------------------------------------------------------------- 1 | """Subsequence Neighbor search for series.""" 2 | 3 | __all__ = [ 4 | "DummySNN", 5 | "MassSNN", 6 | ] 7 | 8 | from aeon.similarity_search.series.neighbors._dummy import DummySNN 9 | from aeon.similarity_search.series.neighbors._mass import MassSNN 10 | -------------------------------------------------------------------------------- /aeon/similarity_search/series/neighbors/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for series neighbors search methods.""" 2 | -------------------------------------------------------------------------------- /aeon/similarity_search/series/neighbors/tests/test_dummy.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for stomp algorithm. 3 | 4 | We do not test equality for returned indexes due to the unstable nature of argsort 5 | and the fact that the "kind=stable" parameter is not yet supported in numba. We instead 6 | test that the returned index match the expected distance value. 7 | """ 8 | 9 | __maintainer__ = ["baraline"] 10 | 11 | import numpy as np 12 | from numpy.testing import assert_almost_equal 13 | 14 | from aeon.similarity_search.series.neighbors._dummy import ( 15 | _naive_squared_distance_profile, 16 | ) 17 | from aeon.testing.data_generation import make_example_2d_numpy_series 18 | from aeon.utils.numba.general import get_all_subsequences 19 | 20 | 21 | def test__naive_squared_distance_profile(): 22 | """Test Euclidean distance with brute force.""" 23 | L = 3 24 | X = make_example_2d_numpy_series(n_channels=1, n_timepoints=10) 25 | Q = make_example_2d_numpy_series(n_channels=1, n_timepoints=L) 26 | 27 | dist_profile = _naive_squared_distance_profile(get_all_subsequences(X, L, 1), Q) 28 | 29 | for i_t in range(X.shape[1] - L + 1): 30 | S = X[:, i_t : i_t + L] 31 | assert_almost_equal(dist_profile[i_t], np.sum((S - Q) ** 2)) 32 | -------------------------------------------------------------------------------- /aeon/similarity_search/series/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for base class and commons functions.""" 2 | -------------------------------------------------------------------------------- /aeon/similarity_search/series/tests/test_base.py: -------------------------------------------------------------------------------- 1 | """Test for series similarity search base class.""" 2 | 3 | __maintainer__ = ["baraline"] 4 | 5 | from aeon.testing.mock_estimators._mock_similarity_searchers import ( 6 | MockSeriesSimilaritySearch, 7 | ) 8 | from aeon.testing.testing_data import FULL_TEST_DATA_DICT, _get_datatypes_for_estimator 9 | 10 | 11 | def test_input_shape_fit_predict_collection_motifs(): 12 | """Test input shapes.""" 13 | estimator = MockSeriesSimilaritySearch() 14 | datatypes = _get_datatypes_for_estimator(estimator) 15 | # dummy data to pass to fit when testing predict/predict_proba 16 | for datatype in datatypes: 17 | X_train, y_train = FULL_TEST_DATA_DICT[datatype]["train"] 18 | X_test, y_test = FULL_TEST_DATA_DICT[datatype]["test"] 19 | estimator.fit(X_train, y_train).predict(X_test) 20 | -------------------------------------------------------------------------------- /aeon/similarity_search/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Similarity search Tests.""" 2 | -------------------------------------------------------------------------------- /aeon/testing/__init__.py: -------------------------------------------------------------------------------- 1 | """Package wide tests and testing utilities for aeon.""" 2 | -------------------------------------------------------------------------------- /aeon/testing/data_generation/__init__.py: -------------------------------------------------------------------------------- 1 | """Data generators.""" 2 | 3 | __all__ = [ 4 | # collections 5 | "make_example_3d_numpy", 6 | "make_example_2d_numpy_collection", 7 | "make_example_3d_numpy_list", 8 | "make_example_2d_numpy_list", 9 | "make_example_dataframe_list", 10 | "make_example_2d_dataframe_collection", 11 | "make_example_multi_index_dataframe", 12 | # series 13 | "make_example_1d_numpy", 14 | "make_example_2d_numpy_series", 15 | "make_example_pandas_series", 16 | "make_example_dataframe_series", 17 | ] 18 | 19 | 20 | from aeon.testing.data_generation._collection import ( 21 | make_example_2d_dataframe_collection, 22 | make_example_2d_numpy_collection, 23 | make_example_2d_numpy_list, 24 | make_example_3d_numpy, 25 | make_example_3d_numpy_list, 26 | make_example_dataframe_list, 27 | make_example_multi_index_dataframe, 28 | ) 29 | from aeon.testing.data_generation._series import ( 30 | make_example_1d_numpy, 31 | make_example_2d_numpy_series, 32 | make_example_dataframe_series, 33 | make_example_pandas_series, 34 | ) 35 | -------------------------------------------------------------------------------- /aeon/testing/data_generation/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests.""" 2 | -------------------------------------------------------------------------------- /aeon/testing/estimator_checking/__init__.py: -------------------------------------------------------------------------------- 1 | """Estimator checks.""" 2 | 3 | __all__ = [ 4 | "check_estimator", 5 | "parametrize_with_checks", 6 | ] 7 | 8 | from aeon.testing.estimator_checking._estimator_checking import ( 9 | check_estimator, 10 | parametrize_with_checks, 11 | ) 12 | -------------------------------------------------------------------------------- /aeon/testing/estimator_checking/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for estimator checking.""" 2 | -------------------------------------------------------------------------------- /aeon/testing/expected_results/__init__.py: -------------------------------------------------------------------------------- 1 | """Expected results for estimators and utilities to generate them.""" 2 | -------------------------------------------------------------------------------- /aeon/testing/expected_results/results_reproduction/__init__.py: -------------------------------------------------------------------------------- 1 | """Utilities for reproducing expected results.""" 2 | -------------------------------------------------------------------------------- /aeon/testing/expected_results/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for expected estimator results.""" 2 | -------------------------------------------------------------------------------- /aeon/testing/mock_estimators/_mock_collection_transformers.py: -------------------------------------------------------------------------------- 1 | """Mock collection transformers useful for testing and debugging.""" 2 | 3 | __maintainer__ = [] 4 | __all__ = [ 5 | "MockCollectionTransformer", 6 | ] 7 | 8 | from aeon.transformations.collection import BaseCollectionTransformer 9 | 10 | 11 | class MockCollectionTransformer(BaseCollectionTransformer): 12 | """BasecollectionTransformer for testing tags.""" 13 | 14 | _tags = { 15 | "capability:multivariate": True, 16 | } 17 | 18 | def __init__(self) -> None: 19 | super().__init__() 20 | 21 | def _fit(self, X, y=None): 22 | """Mock fit.""" 23 | return self 24 | 25 | def _transform(self, X, y=None): 26 | """Mock transform.""" 27 | return X 28 | -------------------------------------------------------------------------------- /aeon/testing/mock_estimators/_mock_forecasters.py: -------------------------------------------------------------------------------- 1 | """Mock forecasters useful for testing and debugging.""" 2 | 3 | __maintainer__ = ["TonyBagnall"] 4 | __all__ = [ 5 | "MockForecaster", 6 | ] 7 | 8 | 9 | from aeon.forecasting.base import BaseForecaster 10 | 11 | 12 | class MockForecaster(BaseForecaster): 13 | """Mock forecaster for testing.""" 14 | 15 | def __init__(self): 16 | super().__init__() 17 | 18 | def _fit(self, y, X=None): 19 | return self 20 | 21 | def _predict(self, y): 22 | return 1.0 23 | 24 | def _forecast(self, y, X=None): 25 | return 1.0 26 | -------------------------------------------------------------------------------- /aeon/testing/mock_estimators/_mock_similarity_searchers.py: -------------------------------------------------------------------------------- 1 | """Mock series transformers useful for testing and debugging.""" 2 | 3 | __maintainer__ = ["baraline"] 4 | __all__ = [ 5 | "MockSeriesSimilaritySearch", 6 | "MockCollectionSimilaritySearch", 7 | ] 8 | 9 | from aeon.similarity_search.collection._base import BaseCollectionSimilaritySearch 10 | from aeon.similarity_search.series._base import BaseSeriesSimilaritySearch 11 | 12 | 13 | class MockSeriesSimilaritySearch(BaseSeriesSimilaritySearch): 14 | """Mock estimator for BaseMatrixProfile.""" 15 | 16 | def __init__(self): 17 | super().__init__() 18 | 19 | def _fit(self, X, y=None): 20 | return self 21 | 22 | def _predict(self, X): 23 | """top-1 motif start timestamp index in X, and distances to the match in X_.""" 24 | return [0], [0.1] 25 | 26 | 27 | class MockCollectionSimilaritySearch(BaseCollectionSimilaritySearch): 28 | """Mock estimator for BaseMatrixProfile.""" 29 | 30 | def __init__(self): 31 | super().__init__() 32 | 33 | def _fit(self, X, y=None): 34 | return self 35 | 36 | def _predict(self, X): 37 | """top-1 motif start timestamp index in X, and distances to the match in X_.""" 38 | return [0, 0], [0.1] 39 | -------------------------------------------------------------------------------- /aeon/testing/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for the aeon package and testing module utilities.""" 2 | 3 | import pkgutil 4 | 5 | import aeon 6 | 7 | # collect all modules 8 | ALL_AEON_MODULES = pkgutil.walk_packages(aeon.__path__, aeon.__name__ + ".") 9 | ALL_AEON_MODULES = [x[1] for x in ALL_AEON_MODULES] 10 | 11 | ALL_AEON_MODULES_NO_TESTS = [ 12 | x for x in ALL_AEON_MODULES if not any(part == "tests" for part in x.split(".")) 13 | ] 14 | -------------------------------------------------------------------------------- /aeon/testing/tests/test_core_imports.py: -------------------------------------------------------------------------------- 1 | """Tests that non-core dependencies are handled correctly in modules.""" 2 | 3 | import re 4 | from importlib import import_module 5 | 6 | from aeon.testing.tests import ALL_AEON_MODULES_NO_TESTS 7 | 8 | if __name__ == "__main__": 9 | """Test imports in aeon modules with core dependencies only. 10 | 11 | Imports all modules and catch exceptions due to missing dependencies. 12 | """ 13 | for module in ALL_AEON_MODULES_NO_TESTS: 14 | try: 15 | import_module(module) 16 | except ModuleNotFoundError as e: # pragma: no cover 17 | dependency = "unknown" 18 | match = re.search(r"\'(.+?)\'", str(e)) 19 | if match: 20 | dependency = match.group(1) 21 | 22 | raise ModuleNotFoundError( 23 | f"The module: {module} should not require any non-core dependencies, " 24 | f"but tried importing: '{dependency}'. Make sure non-core dependencies " 25 | f"are properly isolated outside of tests/ directories." 26 | ) from e 27 | -------------------------------------------------------------------------------- /aeon/testing/utils/__init__.py: -------------------------------------------------------------------------------- 1 | """Testing utils.""" 2 | -------------------------------------------------------------------------------- /aeon/testing/utils/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for testing utils.""" 2 | -------------------------------------------------------------------------------- /aeon/transformations/__init__.py: -------------------------------------------------------------------------------- 1 | """Time series transformations.""" 2 | 3 | __all__ = [ 4 | "BaseTransformer", 5 | ] 6 | 7 | from aeon.transformations.base import BaseTransformer 8 | -------------------------------------------------------------------------------- /aeon/transformations/collection/channel_selection/__init__.py: -------------------------------------------------------------------------------- 1 | """Channel selection transformations. 2 | 3 | Channel selection transformers select a subset of channels for a collection by a 4 | method described in fit (if supervised), then return only those channels for a 5 | collection using transform. 6 | """ 7 | 8 | __all__ = [ 9 | "ChannelScorer", 10 | "ElbowClassPairwise", 11 | "ElbowClassSum", 12 | "RandomChannelSelector", 13 | ] 14 | 15 | 16 | from aeon.transformations.collection.channel_selection._channel_scorer import ( 17 | ChannelScorer, 18 | ) 19 | from aeon.transformations.collection.channel_selection._elbow_class import ( 20 | ElbowClassPairwise, 21 | ElbowClassSum, 22 | ) 23 | from aeon.transformations.collection.channel_selection._random import ( 24 | RandomChannelSelector, 25 | ) 26 | -------------------------------------------------------------------------------- /aeon/transformations/collection/channel_selection/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Channel selection tests.""" 2 | -------------------------------------------------------------------------------- /aeon/transformations/collection/channel_selection/tests/test_random.py: -------------------------------------------------------------------------------- 1 | """Test RandomChannelSelector.""" 2 | 3 | import pytest 4 | 5 | from aeon.testing.data_generation import make_example_3d_numpy 6 | from aeon.transformations.collection.channel_selection._random import ( 7 | RandomChannelSelector, 8 | ) 9 | 10 | 11 | def test_random_channel_selector(): 12 | """Test random channel selection.""" 13 | X = make_example_3d_numpy( 14 | n_cases=10, n_channels=10, n_timepoints=20, return_y=False 15 | ) 16 | # Standard case, select 4 17 | r = RandomChannelSelector() 18 | X2 = r.fit_transform(X) 19 | assert X2.shape == (10, 4, 20) 20 | # Should round up to number of channels 21 | r = RandomChannelSelector(p=0.55) 22 | X2 = r.fit_transform(X) 23 | assert X2.shape == (10, 6, 20) 24 | r = RandomChannelSelector(p=0.91) 25 | X2 = r.fit_transform(X) 26 | assert X2.shape == X.shape 27 | r = RandomChannelSelector(p=1.0) 28 | X2 = r.fit_transform(X) 29 | assert X2.shape == X.shape 30 | with pytest.raises( 31 | ValueError, match="Proportion of channels to select should be in the range." 32 | ): 33 | RandomChannelSelector(p=0) 34 | -------------------------------------------------------------------------------- /aeon/transformations/collection/compose/__init__.py: -------------------------------------------------------------------------------- 1 | """Compositions for collection transforms.""" 2 | 3 | __all__ = [ 4 | "CollectionTransformerPipeline", 5 | "CollectionId", 6 | ] 7 | 8 | from aeon.transformations.collection.compose._identity import CollectionId 9 | from aeon.transformations.collection.compose._pipeline import ( 10 | CollectionTransformerPipeline, 11 | ) 12 | -------------------------------------------------------------------------------- /aeon/transformations/collection/compose/_identity.py: -------------------------------------------------------------------------------- 1 | """Identity transformer.""" 2 | 3 | from aeon.transformations.collection import BaseCollectionTransformer 4 | from aeon.utils.data_types import COLLECTIONS_DATA_TYPES 5 | 6 | 7 | class CollectionId(BaseCollectionTransformer): 8 | """Identity transformer, returns data unchanged in transform/inverse_transform.""" 9 | 10 | _tags = { 11 | "X_inner_type": COLLECTIONS_DATA_TYPES, 12 | "fit_is_empty": True, 13 | "capability:inverse_transform": True, 14 | "capability:multivariate": True, 15 | "capability:unequal_length": True, 16 | "capability:missing_values": True, 17 | } 18 | 19 | def __init__(self): 20 | super().__init__() 21 | 22 | def _transform(self, X, y=None): 23 | return X 24 | 25 | def _inverse_transform(self, X, y=None): 26 | return X 27 | -------------------------------------------------------------------------------- /aeon/transformations/collection/compose/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for composable collection transformers.""" 2 | -------------------------------------------------------------------------------- /aeon/transformations/collection/convolution_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Rocket transformers.""" 2 | 3 | __all__ = [ 4 | "Rocket", 5 | "MiniRocket", 6 | "MultiRocket", 7 | "HydraTransformer", 8 | ] 9 | 10 | from ._hydra import HydraTransformer 11 | from ._minirocket import MiniRocket 12 | from ._multirocket import MultiRocket 13 | from ._rocket import Rocket 14 | -------------------------------------------------------------------------------- /aeon/transformations/collection/convolution_based/rocketGPU/__init__.py: -------------------------------------------------------------------------------- 1 | """Rocket transformers for GPU.""" 2 | 3 | __maintainer__ = ["hadifawaz1999"] 4 | __all__ = ["ROCKETGPU"] 5 | 6 | from aeon.transformations.collection.convolution_based.rocketGPU._rocket_gpu import ( 7 | ROCKETGPU, 8 | ) 9 | -------------------------------------------------------------------------------- /aeon/transformations/collection/convolution_based/rocketGPU/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Rocket GPU unit tests.""" 2 | -------------------------------------------------------------------------------- /aeon/transformations/collection/convolution_based/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Rocket unit tests.""" 2 | -------------------------------------------------------------------------------- /aeon/transformations/collection/dictionary_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Transformers.""" 2 | 3 | __all__ = ["PAA", "SFA", "SFAFast", "SFAWhole", "SAX", "BORF"] 4 | 5 | from aeon.transformations.collection.dictionary_based._borf import BORF 6 | from aeon.transformations.collection.dictionary_based._paa import PAA 7 | from aeon.transformations.collection.dictionary_based._sax import SAX 8 | from aeon.transformations.collection.dictionary_based._sfa import SFA 9 | from aeon.transformations.collection.dictionary_based._sfa_fast import SFAFast 10 | from aeon.transformations.collection.dictionary_based._sfa_whole import SFAWhole 11 | -------------------------------------------------------------------------------- /aeon/transformations/collection/dictionary_based/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for dictionary based collection transformers.""" 2 | -------------------------------------------------------------------------------- /aeon/transformations/collection/feature_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Feature based collection transformations.""" 2 | 3 | __all__ = [ 4 | "Catch22", 5 | "TSFresh", 6 | "TSFreshRelevant", 7 | "SevenNumberSummary", 8 | ] 9 | 10 | from aeon.transformations.collection.feature_based._catch22 import Catch22 11 | from aeon.transformations.collection.feature_based._summary import SevenNumberSummary 12 | from aeon.transformations.collection.feature_based._tsfresh import ( 13 | TSFresh, 14 | TSFreshRelevant, 15 | ) 16 | -------------------------------------------------------------------------------- /aeon/transformations/collection/feature_based/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for feature based collection transformers.""" 2 | -------------------------------------------------------------------------------- /aeon/transformations/collection/feature_based/tests/test_summary.py: -------------------------------------------------------------------------------- 1 | """Test summary features transformer.""" 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from aeon.transformations.collection.feature_based import SevenNumberSummary 7 | 8 | 9 | @pytest.mark.parametrize("summary_stats", ["default", "quantiles", "bowley", "tukey"]) 10 | def test_summary_features(summary_stats): 11 | """Test different summary_stats options.""" 12 | sns = SevenNumberSummary() 13 | t = sns.fit_transform(np.ones((10, 2, 5))) 14 | assert t.shape == (10, 14) 15 | 16 | 17 | def test_summary_features_invalid(): 18 | """Test invalid summary_stats option.""" 19 | with pytest.raises(ValueError, match="Summary function input invalid"): 20 | sns = SevenNumberSummary(summary_stats="invalid") 21 | sns.fit_transform(np.ones((10, 2, 5))) 22 | -------------------------------------------------------------------------------- /aeon/transformations/collection/interval_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Interval based collection transformations.""" 2 | 3 | __all__ = [ 4 | "RandomIntervals", 5 | "SupervisedIntervals", 6 | "QUANTTransformer", 7 | ] 8 | 9 | from aeon.transformations.collection.interval_based._quant import QUANTTransformer 10 | from aeon.transformations.collection.interval_based._random_intervals import ( 11 | RandomIntervals, 12 | ) 13 | from aeon.transformations.collection.interval_based._supervised_intervals import ( 14 | SupervisedIntervals, 15 | ) 16 | -------------------------------------------------------------------------------- /aeon/transformations/collection/interval_based/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for interval based collection transformers.""" 2 | -------------------------------------------------------------------------------- /aeon/transformations/collection/self_supervised/__init__.py: -------------------------------------------------------------------------------- 1 | """Self Supervised deep learning transformers.""" 2 | 3 | __all__ = ["TRILITE"] 4 | 5 | from aeon.transformations.collection.self_supervised._trilite import TRILITE 6 | -------------------------------------------------------------------------------- /aeon/transformations/collection/self_supervised/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Self-Supervised tests.""" 2 | -------------------------------------------------------------------------------- /aeon/transformations/collection/shapelet_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Shapelet based transformers.""" 2 | 3 | __all__ = ["RandomShapeletTransform", "RandomDilatedShapeletTransform", "SAST", "RSAST"] 4 | 5 | from aeon.transformations.collection.shapelet_based._dilated_shapelet_transform import ( 6 | RandomDilatedShapeletTransform, 7 | ) 8 | from aeon.transformations.collection.shapelet_based._rsast import RSAST 9 | from aeon.transformations.collection.shapelet_based._sast import SAST 10 | from aeon.transformations.collection.shapelet_based._shapelet_transform import ( 11 | RandomShapeletTransform, 12 | ) 13 | -------------------------------------------------------------------------------- /aeon/transformations/collection/shapelet_based/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for shapelet based collection transformers.""" 2 | -------------------------------------------------------------------------------- /aeon/transformations/collection/shapelet_based/tests/test_shapelet_transform.py: -------------------------------------------------------------------------------- 1 | """Test shapelet transform.""" 2 | 3 | import pytest 4 | 5 | from aeon.testing.data_generation import make_example_3d_numpy 6 | from aeon.transformations.collection.shapelet_based import RandomShapeletTransform 7 | 8 | 9 | def test_shapelet_transform(): 10 | """Test edge cases for RandomShapeletTransform.""" 11 | X, y = make_example_3d_numpy(n_cases=10, n_timepoints=20, n_labels=4) 12 | rst = RandomShapeletTransform(max_shapelets=3, remove_self_similar=True) 13 | rst._fit(X, y) 14 | # Assert at least one shapelet per class when max_shapelets < n_labels 15 | assert len(rst.shapelets) == 4 16 | X, y = make_example_3d_numpy( 17 | n_cases=3, n_timepoints=rst.max_shapelet_length_ - 1, n_labels=4 18 | ) 19 | with pytest.raises( 20 | ValueError, 21 | match="The shortest series in transform is " 22 | "smaller than the min shapelet length", 23 | ): 24 | rst._transform(X) 25 | -------------------------------------------------------------------------------- /aeon/transformations/collection/signature_based/__init__.py: -------------------------------------------------------------------------------- 1 | """Signature based Collection transformations.""" 2 | 3 | __all__ = [ 4 | "SignatureTransformer", 5 | ] 6 | 7 | from aeon.transformations.collection.signature_based._signature_method import ( 8 | SignatureTransformer, 9 | ) 10 | -------------------------------------------------------------------------------- /aeon/transformations/collection/signature_based/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for signature based collection transformers.""" 2 | -------------------------------------------------------------------------------- /aeon/transformations/collection/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for transformations collections.""" 2 | -------------------------------------------------------------------------------- /aeon/transformations/collection/tests/test_acf.py: -------------------------------------------------------------------------------- 1 | """Test for incorrect input for ARCoefficientTransformer transformer.""" 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from aeon.transformations.collection import AutocorrelationFunctionTransformer 7 | 8 | 9 | def test_acf(): 10 | """Test ACF Transformer exceptions.""" 11 | X = np.random.random((2, 1, 10)) 12 | acf = AutocorrelationFunctionTransformer(n_lags=100) 13 | with pytest.raises(ValueError, match=r"must be smaller than n_timepoints - 1"): 14 | acf.fit_transform(X) 15 | -------------------------------------------------------------------------------- /aeon/transformations/collection/tests/test_ar_coefficient.py: -------------------------------------------------------------------------------- 1 | """Test for incorrect input for ARCoefficientTransformer transformer.""" 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from aeon.transformations.collection import ARCoefficientTransformer 7 | from aeon.utils.validation._dependencies import _check_soft_dependencies 8 | 9 | 10 | @pytest.mark.skipif( 11 | not _check_soft_dependencies("statsmodels", severity="none"), 12 | reason="skip test if required soft dependency statsmodels not available", 13 | ) 14 | def test_ar_coefficient(): 15 | """Test AR Coefficient Transformer exceptions.""" 16 | X = np.random.random((2, 1, 10)) 17 | ar = ARCoefficientTransformer(order=0) 18 | Xt = ar.fit_transform(X) 19 | assert Xt.shape[2] == 1 20 | ar = ARCoefficientTransformer(order=-10) 21 | Xt = ar.fit_transform(X) 22 | assert Xt.shape[2] == 1 23 | ar = ARCoefficientTransformer(order=100) 24 | with pytest.raises(ValueError, match=r"must be smaller than n_timepoints - 1"): 25 | ar.fit_transform(X) 26 | ar = ARCoefficientTransformer(order=6, min_values=5) 27 | Xt = ar.fit_transform(X) 28 | assert Xt.shape[2] == 5 29 | -------------------------------------------------------------------------------- /aeon/transformations/collection/tests/test_tabularizer.py: -------------------------------------------------------------------------------- 1 | """Tests for tabularizer.""" 2 | 3 | import numpy as np 4 | 5 | from aeon.transformations.collection import Tabularizer 6 | 7 | 8 | def test_tabularizer(): 9 | """Test Tabularizer.""" 10 | tab = Tabularizer() 11 | arr = np.random.random(size=(10, 3, 100)) 12 | res = tab.fit_transform(arr) 13 | assert res.shape == (10, 300) 14 | res = tab.fit_transform(arr) 15 | assert res.shape == (10, 300) 16 | -------------------------------------------------------------------------------- /aeon/transformations/collection/unequal_length/__init__.py: -------------------------------------------------------------------------------- 1 | """Transformations for unequal length collections.""" 2 | 3 | __all__ = [ 4 | "Padder", 5 | "Resizer", 6 | "Truncator", 7 | ] 8 | 9 | from aeon.transformations.collection.unequal_length._pad import Padder 10 | from aeon.transformations.collection.unequal_length._resize import Resizer 11 | from aeon.transformations.collection.unequal_length._truncate import Truncator 12 | -------------------------------------------------------------------------------- /aeon/transformations/collection/unequal_length/_commons.py: -------------------------------------------------------------------------------- 1 | """Common functions for unequal length transformations. 2 | 3 | These should ideally be incorporated into the collection data utilities in utils/ in 4 | the future. 5 | """ 6 | 7 | import numpy as np 8 | 9 | 10 | def _get_min_length(X): 11 | if isinstance(X, np.ndarray): 12 | return X.shape[2] 13 | else: 14 | return min([x.shape[1] for x in X]) 15 | 16 | 17 | def _get_max_length(X): 18 | if isinstance(X, np.ndarray): 19 | return X.shape[2] 20 | else: 21 | return max([x.shape[1] for x in X]) 22 | -------------------------------------------------------------------------------- /aeon/transformations/collection/unequal_length/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for unequal length transformations.""" 2 | -------------------------------------------------------------------------------- /aeon/transformations/series/compose/__init__.py: -------------------------------------------------------------------------------- 1 | """Compositions for series transforms.""" 2 | 3 | __all__ = [ 4 | "SeriesTransformerPipeline", 5 | "SeriesId", 6 | ] 7 | 8 | from aeon.transformations.series.compose._identity import SeriesId 9 | from aeon.transformations.series.compose._pipeline import SeriesTransformerPipeline 10 | -------------------------------------------------------------------------------- /aeon/transformations/series/compose/_identity.py: -------------------------------------------------------------------------------- 1 | """Identity transformer.""" 2 | 3 | from aeon.transformations.series import BaseSeriesTransformer 4 | from aeon.utils.data_types import VALID_SERIES_INNER_TYPES 5 | 6 | 7 | class SeriesId(BaseSeriesTransformer): 8 | """Identity transformer, returns data unchanged in transform/inverse_transform.""" 9 | 10 | _tags = { 11 | "X_inner_type": VALID_SERIES_INNER_TYPES, 12 | "fit_is_empty": True, 13 | "capability:inverse_transform": True, 14 | "capability:multivariate": True, 15 | "capability:missing_values": True, 16 | } 17 | 18 | def __init__(self): 19 | super().__init__(axis=1) 20 | 21 | def _transform(self, X, y=None): 22 | return X 23 | 24 | def _inverse_transform(self, X, y=None): 25 | return X 26 | -------------------------------------------------------------------------------- /aeon/transformations/series/compose/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for composable series transformers.""" 2 | -------------------------------------------------------------------------------- /aeon/transformations/series/compose/tests/test_pipeline.py: -------------------------------------------------------------------------------- 1 | """Unit tests for series transform pipeline.""" 2 | 3 | import pytest 4 | from numpy.testing import assert_array_almost_equal 5 | 6 | from aeon.testing.data_generation import make_example_2d_numpy_series 7 | from aeon.transformations.series import AutoCorrelationSeriesTransformer, LogTransformer 8 | from aeon.transformations.series.compose import SeriesTransformerPipeline 9 | 10 | 11 | @pytest.mark.parametrize( 12 | "transformers", 13 | [ 14 | LogTransformer(), 15 | [LogTransformer(), AutoCorrelationSeriesTransformer()], 16 | ], 17 | ) 18 | def test_series_transform_pipeline(transformers): 19 | """Test the collection transform pipeline.""" 20 | X = make_example_2d_numpy_series(n_timepoints=12) 21 | 22 | pipeline = SeriesTransformerPipeline(transformers=transformers) 23 | pipeline.fit(X) 24 | Xt = pipeline.transform(X) 25 | 26 | pipeline2 = SeriesTransformerPipeline(transformers=transformers) 27 | Xt2 = pipeline2.fit_transform(X) 28 | 29 | if not isinstance(transformers, list): 30 | transformers = [transformers] 31 | for t in transformers: 32 | X = t.fit_transform(X) 33 | 34 | assert_array_almost_equal(Xt, X) 35 | assert_array_almost_equal(Xt2, X) 36 | -------------------------------------------------------------------------------- /aeon/transformations/series/smoothing/__init__.py: -------------------------------------------------------------------------------- 1 | """Series smoothing transformers.""" 2 | 3 | __all__ = [ 4 | "DiscreteFourierApproximation", 5 | "ExponentialSmoothing", 6 | "GaussianFilter", 7 | "MovingAverage", 8 | "SavitzkyGolayFilter", 9 | "RecursiveMedianSieve", 10 | ] 11 | 12 | from aeon.transformations.series.smoothing._dfa import DiscreteFourierApproximation 13 | from aeon.transformations.series.smoothing._exp_smoothing import ExponentialSmoothing 14 | from aeon.transformations.series.smoothing._gauss import GaussianFilter 15 | from aeon.transformations.series.smoothing._moving_average import MovingAverage 16 | from aeon.transformations.series.smoothing._rms import RecursiveMedianSieve 17 | from aeon.transformations.series.smoothing._sg import SavitzkyGolayFilter 18 | -------------------------------------------------------------------------------- /aeon/transformations/series/smoothing/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for series smoothing transformations.""" 2 | -------------------------------------------------------------------------------- /aeon/transformations/series/smoothing/tests/test_dft.py: -------------------------------------------------------------------------------- 1 | """Tests for DFT transformation.""" 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from aeon.transformations.series.smoothing._dfa import DiscreteFourierApproximation 7 | 8 | 9 | @pytest.mark.parametrize("r", [0.00, 0.50, 1.00]) 10 | @pytest.mark.parametrize("sort", [True, False]) 11 | def test_dft(r, sort): 12 | """Test the functionality of DFT transformation.""" 13 | n_samples = 100 14 | t = np.linspace(0, 10, n_samples) 15 | x1 = ( 16 | 0.5 * np.sin(2 * np.pi * 1 * t) 17 | + 0.2 * np.sin(2 * np.pi * 5 * t) 18 | + 0.1 * np.sin(2 * np.pi * 10 * t) 19 | ) 20 | x2 = ( 21 | 0.4 * np.sin(2 * np.pi * 1.5 * t) 22 | + 0.3 * np.sin(2 * np.pi * 4 * t) 23 | + 0.1 * np.sin(2 * np.pi * 8 * t) 24 | ) 25 | x12 = np.array([x1, x2]) 26 | 27 | dft = DiscreteFourierApproximation(r=r, sort=sort) 28 | x_1 = dft.fit_transform(x1) 29 | x_2 = dft.fit_transform(x2) 30 | x_12 = dft.fit_transform(x12) 31 | 32 | np.testing.assert_almost_equal(x_1[0], x_12[0], decimal=4) 33 | np.testing.assert_almost_equal(x_2[0], x_12[1], decimal=4) 34 | -------------------------------------------------------------------------------- /aeon/transformations/series/smoothing/tests/test_gauss.py: -------------------------------------------------------------------------------- 1 | """Tests for Gauss transformation.""" 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from aeon.transformations.series.smoothing import GaussianFilter 7 | 8 | 9 | @pytest.mark.parametrize("sigma", [0.1, 1, 10]) 10 | @pytest.mark.parametrize("order", [0, 1, 2]) 11 | def test_gauss(sigma, order): 12 | """Test the functionality of Gauss transformation.""" 13 | n_samples = 100 14 | t = np.linspace(0, 10, n_samples) 15 | x1 = ( 16 | 0.5 * np.sin(2 * np.pi * 1 * t) 17 | + 0.2 * np.sin(2 * np.pi * 5 * t) 18 | + 0.1 * np.sin(2 * np.pi * 10 * t) 19 | ) 20 | x2 = ( 21 | 0.4 * np.sin(2 * np.pi * 1.5 * t) 22 | + 0.3 * np.sin(2 * np.pi * 4 * t) 23 | + 0.1 * np.sin(2 * np.pi * 8 * t) 24 | ) 25 | x12 = np.array([x1, x2]) 26 | 27 | sg = GaussianFilter(sigma=sigma, order=order) 28 | x_1 = sg.fit_transform(x1) 29 | x_2 = sg.fit_transform(x2) 30 | x_12 = sg.fit_transform(x12) 31 | 32 | np.testing.assert_almost_equal(x_1[0], x_12[0], decimal=4) 33 | np.testing.assert_almost_equal(x_2[0], x_12[1], decimal=4) 34 | -------------------------------------------------------------------------------- /aeon/transformations/series/smoothing/tests/test_rms.py: -------------------------------------------------------------------------------- 1 | """Tests for SIV transformation.""" 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from aeon.transformations.series.smoothing import RecursiveMedianSieve 7 | 8 | 9 | @pytest.mark.parametrize( 10 | "window_length", [1, 2, 3, 5, 7, 10, 11, [2, 3], [3, 5], [3, 5, 7], [3, 5, 7, 11]] 11 | ) 12 | def test_siv(window_length): 13 | """Test the functionality of SIV transformation.""" 14 | n_samples = 100 15 | t = np.linspace(0, 10, n_samples) 16 | x1 = ( 17 | 0.5 * np.sin(2 * np.pi * 1 * t) 18 | + 0.2 * np.sin(2 * np.pi * 5 * t) 19 | + 0.1 * np.sin(2 * np.pi * 10 * t) 20 | ) 21 | x2 = ( 22 | 0.4 * np.sin(2 * np.pi * 1.5 * t) 23 | + 0.3 * np.sin(2 * np.pi * 4 * t) 24 | + 0.1 * np.sin(2 * np.pi * 8 * t) 25 | ) 26 | x12 = np.array([x1, x2]) 27 | 28 | siv = RecursiveMedianSieve(window_length=window_length) 29 | x_1 = siv.fit_transform(x1) 30 | x_2 = siv.fit_transform(x2) 31 | x_12 = siv.fit_transform(x12) 32 | 33 | np.testing.assert_almost_equal(x_1[0], x_12[0], decimal=4) 34 | np.testing.assert_almost_equal(x_2[0], x_12[1], decimal=4) 35 | -------------------------------------------------------------------------------- /aeon/transformations/series/smoothing/tests/test_sg.py: -------------------------------------------------------------------------------- 1 | """Tests for SG transformation.""" 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from aeon.transformations.series.smoothing import SavitzkyGolayFilter 7 | 8 | 9 | @pytest.mark.parametrize("window_length", [5, 9, 17]) 10 | @pytest.mark.parametrize("polyorder", [2, 3, 4]) 11 | def test_sg(window_length, polyorder): 12 | """Test the functionality of SG transformation.""" 13 | n_samples = 100 14 | t = np.linspace(0, 10, n_samples) 15 | x1 = ( 16 | 0.5 * np.sin(2 * np.pi * 1 * t) 17 | + 0.2 * np.sin(2 * np.pi * 5 * t) 18 | + 0.1 * np.sin(2 * np.pi * 10 * t) 19 | ) 20 | x2 = ( 21 | 0.4 * np.sin(2 * np.pi * 1.5 * t) 22 | + 0.3 * np.sin(2 * np.pi * 4 * t) 23 | + 0.1 * np.sin(2 * np.pi * 8 * t) 24 | ) 25 | x12 = np.array([x1, x2]) 26 | 27 | sg = SavitzkyGolayFilter(window_length=window_length, polyorder=polyorder) 28 | x_1 = sg.fit_transform(x1) 29 | x_2 = sg.fit_transform(x2) 30 | x_12 = sg.fit_transform(x12) 31 | 32 | np.testing.assert_almost_equal(x_1[0], x_12[0], decimal=4) 33 | np.testing.assert_almost_equal(x_2[0], x_12[1], decimal=4) 34 | -------------------------------------------------------------------------------- /aeon/transformations/series/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for series transformations.""" 2 | -------------------------------------------------------------------------------- /aeon/transformations/series/tests/test_clasp.py: -------------------------------------------------------------------------------- 1 | """Test ClaSP series transformer.""" 2 | 3 | import numpy as np 4 | 5 | from aeon.transformations.series import ClaSPTransformer 6 | 7 | 8 | def test_clasp(): 9 | """Test ClaSP series transformer returned size.""" 10 | for dtype in [np.float64, np.float32, np.float16]: 11 | series = np.arange(100, dtype=dtype) 12 | clasp = ClaSPTransformer() 13 | profile = clasp.fit_transform(series) 14 | 15 | m = len(series) - clasp.window_length + 1 16 | assert np.float64 == profile.dtype 17 | assert m == len(profile) 18 | -------------------------------------------------------------------------------- /aeon/transformations/series/tests/test_diff.py: -------------------------------------------------------------------------------- 1 | """Tests for Difference transformation.""" 2 | 3 | import numpy as np 4 | 5 | from aeon.transformations.series._diff import DifferenceTransformer 6 | 7 | 8 | def test_diff(): 9 | """Tests basic first and second order differencing.""" 10 | X = np.array([[1.0, 4.0, 9.0, 16.0, 25.0, 36.0]]) 11 | 12 | dt1 = DifferenceTransformer(order=1) 13 | Xt1 = dt1.fit_transform(X) 14 | expected1 = np.array([[3.0, 5.0, 7.0, 9.0, 11.0]]) 15 | 16 | np.testing.assert_allclose( 17 | Xt1, expected1, equal_nan=True, err_msg="Value mismatch for order 1" 18 | ) 19 | 20 | dt2 = DifferenceTransformer(order=2) 21 | Xt2 = dt2.fit_transform(X) 22 | expected2 = np.array([[2.0, 2.0, 2.0, 2.0]]) 23 | 24 | np.testing.assert_allclose( 25 | Xt2, expected2, equal_nan=True, err_msg="Value mismatch for order 2" 26 | ) 27 | 28 | Y = np.array([[1, 2, 3, 4], [5, 3, 1, 8]]) 29 | 30 | Yt1 = dt1.fit_transform(Y) 31 | expected3 = np.array([[1, 1, 1], [-2, -2, 7]]) 32 | np.testing.assert_allclose( 33 | Yt1, 34 | expected3, 35 | equal_nan=True, 36 | err_msg="Value mismatch for order 1,multivariate", 37 | ) 38 | -------------------------------------------------------------------------------- /aeon/transformations/series/tests/test_matrix_profile.py: -------------------------------------------------------------------------------- 1 | """Tests for MatrixProfileSeriesTransformer.""" 2 | 3 | import numpy as np 4 | import pandas as pd 5 | import pytest 6 | 7 | from aeon.transformations.series import MatrixProfileSeriesTransformer 8 | from aeon.utils.validation._dependencies import _check_soft_dependencies 9 | 10 | 11 | @pytest.mark.skipif( 12 | not _check_soft_dependencies("stumpy", severity="none"), 13 | reason="skip test if required soft dependency stumpy is not available", 14 | ) 15 | def test_matrix_profile(): 16 | """Test on example in stumpy documentation.""" 17 | series = np.array([584.0, -11.0, 23.0, 79.0, 1001.0, 0.0, -19.0]) 18 | series2 = pd.Series([584.0, -11.0, 23.0, 79.0, 1001.0, 0.0, -19.0]) 19 | mp = MatrixProfileSeriesTransformer(window_length=3) 20 | res1 = mp.fit_transform(series) 21 | res2 = mp.fit_transform(series2) 22 | expected = np.array( 23 | [ 24 | 0.11633857113691416, 25 | 2.694073918063438, 26 | 3.0000926340485923, 27 | 2.694073918063438, 28 | 0.11633857113691416, 29 | ] 30 | ) 31 | np.testing.assert_allclose(res1, expected, rtol=1e-04, atol=1e-04) 32 | np.testing.assert_allclose(res1, res2, rtol=1e-04, atol=1e-04) 33 | -------------------------------------------------------------------------------- /aeon/transformations/series/tests/test_pca.py: -------------------------------------------------------------------------------- 1 | """Tests for PCATransformer.""" 2 | 3 | __maintainer__ = ["TonyBagnall"] 4 | 5 | from aeon.testing.data_generation import make_example_dataframe_series 6 | from aeon.transformations.series._pca import PCASeriesTransformer 7 | 8 | 9 | def test_pca(): 10 | """Test PCA transformer.""" 11 | X = make_example_dataframe_series(n_channels=3) 12 | transformer = PCASeriesTransformer(n_components=2) 13 | Xt = transformer.fit_transform(X, axis=0) 14 | # test that the shape is correct 15 | assert Xt.shape == (X.shape[0], 2) 16 | # test that the column names are correct 17 | assert "PC_0" in Xt.columns 18 | assert "PC_1" in Xt.columns 19 | -------------------------------------------------------------------------------- /aeon/transformations/series/tests/test_warping.py: -------------------------------------------------------------------------------- 1 | """Test Warping series transformer.""" 2 | 3 | __maintainer__ = ["hadifawaz1999"] 4 | 5 | import pytest 6 | 7 | from aeon.clustering.averaging import VALID_BA_METRICS 8 | from aeon.distances import get_alignment_path_function 9 | from aeon.testing.data_generation import make_example_2d_numpy_series 10 | from aeon.transformations.series import WarpingSeriesTransformer 11 | 12 | 13 | @pytest.mark.parametrize("distance", VALID_BA_METRICS) 14 | def test_warping_path_transformer(distance): 15 | """Test the functionality of Warping transformation.""" 16 | x = make_example_2d_numpy_series(n_timepoints=20, n_channels=2) 17 | y = make_example_2d_numpy_series(n_timepoints=20, n_channels=2) 18 | 19 | alignment_path_function = get_alignment_path_function(method=distance) 20 | 21 | warping_path = alignment_path_function(x, y)[0] 22 | 23 | new_x = WarpingSeriesTransformer( 24 | series_index=0, warping_path=warping_path 25 | ).fit_transform(x) 26 | new_y = WarpingSeriesTransformer( 27 | series_index=1, warping_path=warping_path 28 | ).fit_transform(y) 29 | 30 | assert int(new_x.shape[1]) == len(warping_path) 31 | assert int(new_y.shape[1]) == len(warping_path) 32 | -------------------------------------------------------------------------------- /aeon/transformations/series/tests/test_yeojohnson.py: -------------------------------------------------------------------------------- 1 | """Tests for YeoJohnsonTransformer.""" 2 | 3 | __maintainer__ = [] 4 | __all__ = [] 5 | 6 | import numpy as np 7 | from scipy.stats import yeojohnson 8 | 9 | from aeon.datasets import load_airline 10 | from aeon.transformations.series._yeojohnson import YeoJohnsonTransformer 11 | 12 | 13 | def test_yeojohnson_against_scipy(): 14 | """Test YeoJohnsonTransformer against scipy implementation.""" 15 | y = load_airline() 16 | 17 | t = YeoJohnsonTransformer() 18 | actual = t.fit_transform(y) 19 | 20 | excepted, expected_lambda = yeojohnson(y) 21 | np.testing.assert_almost_equal(actual, excepted, decimal=12) 22 | assert t._lambda == expected_lambda 23 | -------------------------------------------------------------------------------- /aeon/utils/__init__.py: -------------------------------------------------------------------------------- 1 | """Utility functionality.""" 2 | 3 | __all__ = [ 4 | "show_versions", # github debug util 5 | ] 6 | 7 | from aeon.utils.show_versions import show_versions 8 | -------------------------------------------------------------------------------- /aeon/utils/base/__init__.py: -------------------------------------------------------------------------------- 1 | """Base class collections and utilities.""" 2 | 3 | __all__ = [ 4 | "BASE_CLASS_REGISTER", 5 | "VALID_ESTIMATOR_BASES", 6 | "get_identifier", 7 | ] 8 | 9 | from aeon.utils.base._identifier import get_identifier 10 | from aeon.utils.base._register import BASE_CLASS_REGISTER, VALID_ESTIMATOR_BASES 11 | -------------------------------------------------------------------------------- /aeon/utils/base/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for base class utilities.""" 2 | -------------------------------------------------------------------------------- /aeon/utils/base/tests/test_register.py: -------------------------------------------------------------------------------- 1 | """Test the base class registers follow the correct format.""" 2 | 3 | from aeon.base import BaseAeonEstimator 4 | from aeon.utils.base._register import BASE_CLASS_REGISTER, VALID_ESTIMATOR_BASES 5 | 6 | 7 | def test_base_class_register(): 8 | """Test the base class registers follow the correct format.""" 9 | assert isinstance(BASE_CLASS_REGISTER, dict) 10 | assert len(BASE_CLASS_REGISTER) > 0 11 | assert all(isinstance(k, str) for k in BASE_CLASS_REGISTER.keys()) 12 | assert all( 13 | issubclass(v, BaseAeonEstimator) or isinstance(v, BaseAeonEstimator) 14 | for v in BASE_CLASS_REGISTER.values() 15 | ) 16 | 17 | assert len(VALID_ESTIMATOR_BASES) < len(BASE_CLASS_REGISTER) 18 | assert BaseAeonEstimator not in VALID_ESTIMATOR_BASES.values() 19 | -------------------------------------------------------------------------------- /aeon/utils/conversion/__init__.py: -------------------------------------------------------------------------------- 1 | """Conversion utilities.""" 2 | 3 | __all__ = [ 4 | "resolve_equal_length_inner_type", 5 | "resolve_unequal_length_inner_type", 6 | "convert_collection", 7 | "convert_series", 8 | ] 9 | 10 | from aeon.utils.conversion._convert_collection import ( 11 | convert_collection, 12 | resolve_equal_length_inner_type, 13 | resolve_unequal_length_inner_type, 14 | ) 15 | from aeon.utils.conversion._convert_series import convert_series 16 | -------------------------------------------------------------------------------- /aeon/utils/conversion/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Conversion tests.""" 2 | -------------------------------------------------------------------------------- /aeon/utils/networks/__init__.py: -------------------------------------------------------------------------------- 1 | """Utils for tensorflow_addons.""" 2 | -------------------------------------------------------------------------------- /aeon/utils/numba/__init__.py: -------------------------------------------------------------------------------- 1 | """Numba utility functionality.""" 2 | -------------------------------------------------------------------------------- /aeon/utils/numba/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for numba utils.""" 2 | -------------------------------------------------------------------------------- /aeon/utils/self_supervised/__init__.py: -------------------------------------------------------------------------------- 1 | """Utils for self_supervised.""" 2 | -------------------------------------------------------------------------------- /aeon/utils/self_supervised/general.py: -------------------------------------------------------------------------------- 1 | """General utils for self_supervised.""" 2 | 3 | __all__ = ["z_normalization"] 4 | 5 | import numpy as np 6 | 7 | 8 | def z_normalization(X, axis=1): 9 | """Z-Normalize collection of time series. 10 | 11 | Parameters 12 | ---------- 13 | X : np.ndarray 14 | The input collection of time series of shape 15 | (n_cases, n_channels, n_timepoints). 16 | axis : int, default = 1 17 | The axis of time, on which z-normalization 18 | is performed. 19 | 20 | Returns 21 | ------- 22 | Normalized version of X. 23 | """ 24 | stds = np.std(X, axis=axis, keepdims=True) 25 | if len(stds[stds == 0.0]) > 0: 26 | stds[stds == 0.0] = 1.0 27 | return (X - X.mean(axis=axis, keepdims=True)) / stds 28 | return (X - X.mean(axis=axis, keepdims=True)) / (X.std(axis=axis, keepdims=True)) 29 | -------------------------------------------------------------------------------- /aeon/utils/split.py: -------------------------------------------------------------------------------- 1 | """Split function.""" 2 | 3 | 4 | def split_series(X, n_intervals): 5 | """Split a time series into approximately equal intervals. 6 | 7 | Adopted from = https://stackoverflow.com/questions/2130016/ 8 | splitting-a-list-into-n-parts-of-approximately 9 | -equal-length 10 | 11 | Parameters 12 | ---------- 13 | X : a numpy array of shape = [n_timepoints] 14 | 15 | Returns 16 | ------- 17 | output : a numpy array of shape = [self.n_intervals,interval_size] 18 | """ 19 | avg = len(X) / float(n_intervals) 20 | output = [] 21 | beginning = 0.0 22 | 23 | while beginning < len(X): 24 | output.append(X[int(beginning) : int(beginning + avg)]) 25 | beginning += avg 26 | 27 | return output 28 | -------------------------------------------------------------------------------- /aeon/utils/tags/__init__.py: -------------------------------------------------------------------------------- 1 | """Estimator tags and tag utilities.""" 2 | 3 | __all__ = [ 4 | "ESTIMATOR_TAGS", 5 | "check_valid_tags", 6 | "all_tags_for_estimator", 7 | ] 8 | 9 | from aeon.utils.tags._discovery import all_tags_for_estimator 10 | from aeon.utils.tags._tags import ESTIMATOR_TAGS 11 | from aeon.utils.tags._validate import check_valid_tags 12 | -------------------------------------------------------------------------------- /aeon/utils/tags/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for estimator tags and tag utilities.""" 2 | -------------------------------------------------------------------------------- /aeon/utils/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for utils.""" 2 | -------------------------------------------------------------------------------- /aeon/utils/tests/test_show_versions.py: -------------------------------------------------------------------------------- 1 | """Test the show versions function.""" 2 | 3 | from aeon.testing.utils.output_suppression import suppress_output 4 | from aeon.utils.show_versions import show_versions 5 | 6 | 7 | @suppress_output() 8 | def test_show_versions(): 9 | """Test show versions function.""" 10 | show_versions() 11 | 12 | s = show_versions(as_str=True) 13 | assert isinstance(s, str) 14 | assert "System" in s 15 | assert "Python dependencies" in s 16 | -------------------------------------------------------------------------------- /aeon/utils/tests/test_split.py: -------------------------------------------------------------------------------- 1 | """Tests for SplitsTimeSeries.""" 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from aeon.utils.split import split_series 7 | 8 | X = np.arange(10) 9 | testdata = [ 10 | (X, 2, [np.array([0, 1, 2, 3, 4]), np.array([5, 6, 7, 8, 9])]), 11 | (X, 3, [np.array([0, 1, 2]), np.array([3, 4, 5]), np.array([6, 7, 8, 9])]), 12 | ] 13 | 14 | 15 | @pytest.mark.parametrize("X,n_intervals,expected", testdata) 16 | def test_split_series(X, n_intervals, expected): 17 | """Test the splitting of a time series into multiple intervals.""" 18 | res = split_series(X, n_intervals) 19 | 20 | assert len(res) == n_intervals 21 | for x, y in zip(res, expected): 22 | np.testing.assert_array_equal(x, y) 23 | -------------------------------------------------------------------------------- /aeon/utils/validation/labels.py: -------------------------------------------------------------------------------- 1 | """Validation functions for target labels.""" 2 | 3 | import numpy as np 4 | import pandas as pd 5 | from sklearn.utils.multiclass import type_of_target 6 | 7 | 8 | def check_classification_y(y): 9 | """Check y label input is valid for classification tasks. 10 | 11 | Parameters 12 | ---------- 13 | y : pd.Series or np.ndarray 14 | Target variable array. 15 | 16 | Raises 17 | ------ 18 | TypeError 19 | If y is not a 1D pd.Series or np.ndarray. 20 | ValueError 21 | If y is not a binary or multiclass target. 22 | """ 23 | if not isinstance(y, (pd.Series, np.ndarray)): 24 | raise TypeError( 25 | f"y must be a np.array or a pd.Series, but found type: {type(y)}" 26 | ) 27 | if isinstance(y, np.ndarray) and y.ndim > 1: 28 | raise TypeError(f"y must be 1-dimensional, found {y.ndim} dimensions") 29 | 30 | y_type = type_of_target(y, input_name="y") 31 | if y_type != "binary" and y_type != "multiclass": 32 | raise ValueError( 33 | f"y type is {y_type} which is not valid for classification. " 34 | f"Should be binary or multiclass according to " 35 | f"sklearn.utils.multiclass.type_of_target" 36 | ) 37 | -------------------------------------------------------------------------------- /aeon/utils/validation/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for validation.""" 2 | -------------------------------------------------------------------------------- /aeon/utils/validation/tests/test_check_imports.py: -------------------------------------------------------------------------------- 1 | """Unit test to check for imports.""" 2 | 3 | import pytest 4 | 5 | from aeon.utils.validation._dependencies import _check_soft_dependencies 6 | 7 | 8 | def test_check_soft_dependencies_raises_error(): 9 | """Test the _check_soft_dependencies() function.""" 10 | with pytest.raises(ModuleNotFoundError, match=r".* soft dependency .*"): 11 | _check_soft_dependencies("unavailable_module") 12 | 13 | with pytest.raises(ModuleNotFoundError, match=r".* soft dependency .*"): 14 | _check_soft_dependencies("unavailable_module_1", "unavailable_module_2") 15 | -------------------------------------------------------------------------------- /aeon/utils/validation/tests/test_init.py: -------------------------------------------------------------------------------- 1 | """Test for check_window_length.""" 2 | 3 | import pytest 4 | 5 | from aeon.utils.validation import check_window_length 6 | 7 | 8 | @pytest.mark.parametrize( 9 | "window_length, n_timepoints, expected", 10 | [ 11 | (0.2, 33, 7), 12 | (43, 23, 43), 13 | (33, 1, 33), 14 | (33, None, 33), 15 | (None, 19, None), 16 | (None, None, None), 17 | (67, 0.3, 67), # bad arg 18 | ], 19 | ) 20 | def test_check_window_length(window_length, n_timepoints, expected): 21 | """Test check_window_length function with various inputs.""" 22 | assert check_window_length(window_length, n_timepoints) == expected 23 | 24 | 25 | @pytest.mark.parametrize( 26 | "window_length, n_timepoints", 27 | [ 28 | ("string", 34), 29 | ("string", "string"), 30 | (6.2, 33), 31 | (-5, 34), 32 | (-0.5, 15), 33 | (6.1, 0.3), 34 | (0.3, 0.1), 35 | (-2.4, 10), 36 | (0.2, None), 37 | ], 38 | ) 39 | def test_window_length_bad_arg(window_length, n_timepoints): 40 | """Test check_window_length function with invalid inputs.""" 41 | with pytest.raises(ValueError): 42 | check_window_length(window_length, n_timepoints) 43 | -------------------------------------------------------------------------------- /aeon/visualisation/distances/__init__.py: -------------------------------------------------------------------------------- 1 | """Plotting tools for distances.""" 2 | -------------------------------------------------------------------------------- /aeon/visualisation/distances/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Testing for distances specific plotting.""" 2 | -------------------------------------------------------------------------------- /aeon/visualisation/distances/tests/test_pairwise_distance_matrix.py: -------------------------------------------------------------------------------- 1 | """Test pairwise distance matrix plotting.""" 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from aeon.utils.validation._dependencies import _check_soft_dependencies 7 | from aeon.visualisation import plot_pairwise_distance_matrix 8 | 9 | 10 | @pytest.mark.skipif( 11 | not _check_soft_dependencies(["matplotlib", "seaborn"], severity="none"), 12 | reason="skip test if required soft dependency not available", 13 | ) 14 | def test_plot_pairwise_distance_matrix(): 15 | """Test whether plot_pairwise_distance_matrix runs without error.""" 16 | import matplotlib 17 | import matplotlib.pyplot as plt 18 | 19 | matplotlib.use("Agg") 20 | 21 | distance_matrix = np.array([[0.0, 1.0], [1.0, 0.0]]) 22 | a = np.array([1.0, 2.0]) 23 | b = np.array([1.5, 2.5]) 24 | path = [(0, 0), (1, 1)] 25 | 26 | ax = plot_pairwise_distance_matrix(distance_matrix, a, b, path) 27 | fig = plt.gcf() 28 | plt.gcf().canvas.draw_idle() 29 | 30 | assert isinstance(fig, plt.Figure) 31 | assert isinstance(ax, plt.Axes) 32 | assert len(fig.axes) > 0 33 | 34 | plt.close() 35 | -------------------------------------------------------------------------------- /aeon/visualisation/estimator/__init__.py: -------------------------------------------------------------------------------- 1 | """Plotting tools for estimators.""" 2 | -------------------------------------------------------------------------------- /aeon/visualisation/estimator/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Testing for estimator specific plotting.""" 2 | -------------------------------------------------------------------------------- /aeon/visualisation/estimator/tests/test_clasp_plotting.py: -------------------------------------------------------------------------------- 1 | """Test ClaSP plotting.""" 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from aeon.segmentation import ClaSPSegmenter 7 | from aeon.testing.data_generation import make_example_pandas_series 8 | from aeon.utils.validation._dependencies import _check_soft_dependencies 9 | from aeon.visualisation import plot_series_with_profiles 10 | 11 | 12 | @pytest.mark.skipif( 13 | not _check_soft_dependencies(["matplotlib", "seaborn"], severity="none"), 14 | reason="skip test if required soft dependency not available", 15 | ) 16 | def test_plot_series_with_profiles(): 17 | """Test whether plot_series_with_profiles runs without error.""" 18 | import matplotlib 19 | import matplotlib.pyplot as plt 20 | 21 | matplotlib.use("Agg") 22 | 23 | series = make_example_pandas_series(n_timepoints=50) 24 | clasp = ClaSPSegmenter() 25 | clasp.fit_predict(series) 26 | 27 | fig, ax = plot_series_with_profiles( 28 | series, clasp.profiles, true_cps=[25], found_cps=clasp.found_cps 29 | ) 30 | plt.gcf().canvas.draw_idle() 31 | 32 | assert ( 33 | isinstance(fig, plt.Figure) 34 | and isinstance(ax, np.ndarray) 35 | and all([isinstance(ax_, plt.Axes) for ax_ in ax]) 36 | ) 37 | 38 | plt.close() 39 | -------------------------------------------------------------------------------- /aeon/visualisation/estimator/tests/test_cluster_plotting.py: -------------------------------------------------------------------------------- 1 | """Test cluster plotting.""" 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from aeon.clustering import TimeSeriesKMeans 7 | from aeon.testing.data_generation import make_example_3d_numpy 8 | from aeon.utils.validation._dependencies import _check_soft_dependencies 9 | from aeon.visualisation import plot_cluster_algorithm 10 | 11 | 12 | @pytest.mark.skipif( 13 | not _check_soft_dependencies(["matplotlib", "seaborn"], severity="none"), 14 | reason="skip test if required soft dependency not available", 15 | ) 16 | def test_plot_cluster_algorithm(): 17 | """Test whether plot_cluster_algorithm runs without error.""" 18 | import matplotlib 19 | import matplotlib.pyplot as plt 20 | 21 | matplotlib.use("Agg") 22 | 23 | data = make_example_3d_numpy() 24 | kmeans = TimeSeriesKMeans(n_clusters=2, distance="euclidean", max_iter=5) 25 | kmeans.fit(data[0]) 26 | 27 | fig, ax = plot_cluster_algorithm(kmeans, data[0], 2) 28 | plt.gcf().canvas.draw_idle() 29 | 30 | assert ( 31 | isinstance(fig, plt.Figure) 32 | and isinstance(ax, np.ndarray) 33 | and all([isinstance(ax_, plt.Axes) for ax_ in ax]) 34 | ) 35 | 36 | plt.close() 37 | -------------------------------------------------------------------------------- /aeon/visualisation/estimator/tests/test_tic_plotting.py: -------------------------------------------------------------------------------- 1 | """Test temporal importance curve plotting.""" 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from aeon.utils.validation._dependencies import _check_soft_dependencies 7 | from aeon.visualisation import plot_temporal_importance_curves 8 | 9 | 10 | @pytest.mark.skipif( 11 | not _check_soft_dependencies(["matplotlib", "seaborn"], severity="none"), 12 | reason="skip test if required soft dependency not available", 13 | ) 14 | def test_plot_temporal_importance_curves(): 15 | """Test whether plot_temporal_importance_curves runs without error.""" 16 | import matplotlib 17 | import matplotlib.pyplot as plt 18 | 19 | matplotlib.use("Agg") 20 | 21 | names = ["Mean", "Median"] 22 | curves = [np.random.rand(50), np.random.rand(50)] 23 | 24 | fig, ax = plot_temporal_importance_curves(curves, names) 25 | plt.gcf().canvas.draw_idle() 26 | 27 | assert isinstance(fig, plt.Figure) and isinstance(ax, plt.Axes) 28 | 29 | plt.close() 30 | -------------------------------------------------------------------------------- /aeon/visualisation/learning_task/__init__.py: -------------------------------------------------------------------------------- 1 | """Learning task specific plotting functions.""" 2 | -------------------------------------------------------------------------------- /aeon/visualisation/learning_task/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for learning task plotting.""" 2 | -------------------------------------------------------------------------------- /aeon/visualisation/learning_task/tests/test_forecasting_plotting.py: -------------------------------------------------------------------------------- 1 | """Test the plotting functions for forecasting.""" 2 | 3 | import pytest 4 | from sklearn.model_selection import TimeSeriesSplit 5 | 6 | from aeon.testing.data_generation import make_example_pandas_series 7 | from aeon.utils.validation._dependencies import _check_soft_dependencies 8 | from aeon.visualisation import plot_series_windows 9 | 10 | 11 | @pytest.mark.skipif( 12 | not _check_soft_dependencies(["matplotlib", "seaborn"], severity="none"), 13 | reason="skip test if required soft dependency not available", 14 | ) 15 | def test_plot_series_windows(): 16 | """Test whether plot_series_windows runs without error.""" 17 | import matplotlib 18 | import matplotlib.pyplot as plt 19 | 20 | matplotlib.use("Agg") 21 | 22 | series = make_example_pandas_series(n_timepoints=50) 23 | cv = TimeSeriesSplit(n_splits=4) 24 | 25 | fig, ax = plot_series_windows(series, cv) 26 | plt.gcf().canvas.draw_idle() 27 | 28 | assert isinstance(fig, plt.Figure) and isinstance(ax, plt.Axes) 29 | 30 | plt.close() 31 | -------------------------------------------------------------------------------- /aeon/visualisation/learning_task/tests/test_segmentation_plotting.py: -------------------------------------------------------------------------------- 1 | """Test the plotting functions for segmentation.""" 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from aeon.testing.data_generation import make_example_pandas_series 7 | from aeon.utils.validation._dependencies import _check_soft_dependencies 8 | from aeon.visualisation import plot_series_with_change_points 9 | 10 | 11 | @pytest.mark.skipif( 12 | not _check_soft_dependencies(["matplotlib", "seaborn"], severity="none"), 13 | reason="skip test if required soft dependency not available", 14 | ) 15 | def test_plot_series_with_change_points(): 16 | """Test whether plot_series_with_change_points runs without error.""" 17 | import matplotlib 18 | import matplotlib.pyplot as plt 19 | 20 | matplotlib.use("Agg") 21 | 22 | series = make_example_pandas_series(n_timepoints=50) 23 | chp = np.random.randint(0, len(series), 3) 24 | 25 | fig, ax = plot_series_with_change_points(series, chp) 26 | plt.gcf().canvas.draw_idle() 27 | 28 | assert isinstance(fig, plt.Figure) and isinstance(ax, plt.Axes) 29 | 30 | plt.close() 31 | -------------------------------------------------------------------------------- /aeon/visualisation/results/__init__.py: -------------------------------------------------------------------------------- 1 | """Plotting tools for estimator results.""" 2 | -------------------------------------------------------------------------------- /aeon/visualisation/results/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Results plotting tests.""" 2 | -------------------------------------------------------------------------------- /aeon/visualisation/series/__init__.py: -------------------------------------------------------------------------------- 1 | """Plotting tools for time series data.""" 2 | -------------------------------------------------------------------------------- /aeon/visualisation/series/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Series plotting tests.""" 2 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -T 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -T 21 | -------------------------------------------------------------------------------- /docs/_templates/class.rst: -------------------------------------------------------------------------------- 1 | {{ objname }} 2 | {{ underline }} 3 | 4 | .. currentmodule:: {{ module }} 5 | 6 | .. autoclass:: {{ objname }} 7 | -------------------------------------------------------------------------------- /docs/_templates/function.rst: -------------------------------------------------------------------------------- 1 | {{ objname }} 2 | {{ underline }} 3 | 4 | .. currentmodule:: {{ module }} 5 | 6 | .. autofunction:: {{ objname }} 7 | -------------------------------------------------------------------------------- /docs/_templates/numpydoc_docstring.rst: -------------------------------------------------------------------------------- 1 | {{index}} 2 | {{summary}} 3 | {{extended_summary}} 4 | {{parameters}} 5 | {{returns}} 6 | {{yields}} 7 | {{other_parameters}} 8 | {{attributes}} 9 | {{raises}} 10 | {{warns}} 11 | {{warnings}} 12 | {{see_also}} 13 | {{notes}} 14 | {{references}} 15 | {{examples}} 16 | {{methods}} 17 | -------------------------------------------------------------------------------- /docs/about/code_of_conduct_moderators.md: -------------------------------------------------------------------------------- 1 | 2 |