├── .all-contributorsrc ├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md ├── stale.yml └── workflows │ └── pythonpublish.yml ├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── Makefile ├── README.md ├── bin └── simple-viewer ├── docs ├── .gitignore ├── .jekyll-cache │ └── Jekyll │ │ └── Cache │ │ └── Jekyll--Cache │ │ └── b7 │ │ └── 9606fb3afea5bd1609ed40b622142f1c98125abcfe89a76a661b0e8e343910 ├── 404.html ├── CNAME ├── Gemfile ├── Gemfile.lock ├── _config.yml ├── _data │ ├── authors.yml │ ├── comments │ │ ├── chocolate-chip-cookies │ │ │ ├── comment-1473870213530.yml │ │ │ ├── comment-1478213467992.yml │ │ │ ├── comment-1500181304581.yml │ │ │ └── comment-1500214855350.yml │ │ ├── gemified-theme-beta │ │ │ ├── comment-1479508047505.yml │ │ │ ├── comment-1480591890264.yml │ │ │ ├── comment-1482532165381.yml │ │ │ ├── comment-1483456786593.yml │ │ │ ├── comment-1483457152038.yml │ │ │ └── comment-1519412839827.yml │ │ ├── layout-comments │ │ │ ├── comment-1470944006665.yml │ │ │ ├── comment-1470944162041.yml │ │ │ ├── comment-1472308473018.yml │ │ │ ├── comment-1514406795156.yml │ │ │ ├── comment-1514407115153.yml │ │ │ └── comment-1538482988032.yml │ │ ├── layout-header-image-horizontal │ │ │ ├── comment-1483124729757.yml │ │ │ └── comment-1483128389943.yml │ │ ├── layout-header-image-text-readability │ │ │ ├── comment-1474306861206.yml │ │ │ ├── comment-1479253931238.yml │ │ │ └── comment-1479265677846.yml │ │ ├── layout-header-overlay-image │ │ │ ├── comment-1512840683260.yml │ │ │ ├── comment-1513110608614.yml │ │ │ ├── comment-1513111329875.yml │ │ │ └── comment-1513111563922.yml │ │ ├── layout-related-posts │ │ │ ├── comment-1500183131535.yml │ │ │ └── comment-1500214974083.yml │ │ ├── layout-sidebar-custom │ │ │ ├── comment-1519247076880.yml │ │ │ ├── comment-1519247290410.yml │ │ │ └── comment-1520748170396.yml │ │ ├── layout-sidebar-nav-list │ │ │ ├── comment-1492811460488.yml │ │ │ └── comment-1492812977693.yml │ │ ├── layout-table-of-contents-post │ │ │ ├── comment-1512118683486.yml │ │ │ ├── comment-1520683848241.yml │ │ │ ├── comment-1527082094887.yml │ │ │ ├── comment-1527500055863.yml │ │ │ ├── comment-1527690060032.yml │ │ │ ├── comment-1527690281769.yml │ │ │ └── comment-1540422628114.yml │ │ ├── markup-image-alignment │ │ │ └── comment-1534823211504.yml │ │ ├── markup-more-images │ │ │ ├── comment-1472040323579.yml │ │ │ └── comment-1472146638519.yml │ │ ├── markup-syntax-highlighting │ │ │ ├── comment-1470969665387.yml │ │ │ ├── comment-1478928407894.yml │ │ │ ├── comment-1487758246637.yml │ │ │ ├── comment-1505403032256.yml │ │ │ ├── comment-1505403241808.yml │ │ │ └── comment-1514836962551.yml │ │ ├── post-future-date │ │ │ ├── comment-1472064560364.yml │ │ │ └── comment-1472786137736.yml │ │ ├── post-gallery │ │ │ ├── comment-1500055247314.yml │ │ │ └── comment-1500056210776.yml │ │ ├── post-modified │ │ │ ├── comment-1497284119888.yml │ │ │ ├── comment-1497284892766.yml │ │ │ └── comment-1520673777110.yml │ │ ├── post-video-youtube │ │ │ ├── comment-1506623182288.yml │ │ │ ├── comment-1506623710918.yml │ │ │ └── comment-1506632190623.yml │ │ └── welcome-to-jekyll │ │ │ ├── comment-1470942205700.yml │ │ │ ├── comment-1470942247755.yml │ │ │ ├── comment-1470942265819.yml │ │ │ ├── comment-1470942493518.yml │ │ │ ├── comment-1471823346931.yml │ │ │ ├── comment-1471834988411.yml │ │ │ ├── comment-1472786599470.yml │ │ │ ├── comment-1474328950155.yml │ │ │ ├── comment-1500505983331.yml │ │ │ ├── comment-1507141538771.yml │ │ │ ├── comment-1529792272424.yml │ │ │ └── comment-1529794012288.yml │ └── navigation.yml ├── _docs │ ├── 01-installation.md │ ├── 02-upgrading.md │ ├── 03-usage.md │ ├── 03.5-tips-and-tricks.md │ ├── 04-classification-specifics.md │ ├── 05-classification-models.md │ ├── 06-classification-data-formats.md │ ├── 07-binary-classification.md │ ├── 08-multi-class-classification.md │ ├── 09-regression.md │ ├── 10-sentence-pair-classification.md │ ├── 11-multi-label-classification.md │ ├── 12-ner-specifics.md │ ├── 13-ner-model.md │ ├── 14-ner-data-formats.md │ ├── 15-ner-minimal-start.md │ ├── 16-qa-specifics.md │ ├── 17-qa-model.md │ ├── 18-qa-data-formats.md │ ├── 19-qa-minimal-start.md │ ├── 20-lm-specifics.md │ ├── 21-lm-model.md │ ├── 22-lm-data-formats.md │ ├── 23-lm-minimal-start.md │ ├── 24-t5-specifics.md │ ├── 25-t5-model.md │ ├── 26-t5-data-formats.md │ ├── 27-t5-minimal-start.md │ ├── 28-seq2seq-specifics.md │ ├── 29-seq2seq-model.md │ ├── 30-seq2seq-data-formats.md │ ├── 31-seq2seq-minimal-start.md │ ├── 32-convAI-specifics.md │ ├── 33-convAI-model.md │ ├── 34-convAI-data-formats.md │ ├── 35-convAI-minimal-start.md │ ├── 36-text-rep-examples.md │ ├── 37-text-rep-model.md │ ├── 38-language-generation-specifics.md │ ├── 39-language-generation-model.md │ ├── 40-language-generation-minimal-start.md │ ├── 41-multi-modal-classification-specifics.md │ ├── 42-multi-modal-classification-model.md │ ├── 43-multi-modal-classification-data-formats.md │ ├── 49-history.md │ ├── 50-contributing.md │ ├── 51-docs-old.md │ ├── 52-license.md │ ├── 53-terms.md │ ├── 54-retrieval-specifics.md │ ├── 55-retrieval-model.md │ ├── 56-retrieval-data-formats.md │ └── 57-retrieval-minimal-start.md ├── _drafts │ └── post-draft.md ├── _pages │ ├── 404.md │ ├── about.md │ ├── archive-layout-with-content.md │ ├── category-archive.md │ ├── collection-archive.html │ ├── edge-case.md │ ├── home.md │ ├── lorem-ipsum.md │ ├── markup.md │ ├── page-a.md │ ├── page-archive.html │ ├── page-b.md │ ├── pets.md │ ├── portfolio-archive.md │ ├── post-archive-feature-rows.html │ ├── recipes-archive.md │ ├── sample-page.md │ ├── sitemap.md │ ├── splash-page.md │ ├── tag-archive.md │ ├── terms.md │ ├── tutorials.md │ └── year-archive.md ├── _sass │ ├── minimal-mistakes.scss │ └── minimal-mistakes │ │ ├── _animations.scss │ │ ├── _archive.scss │ │ ├── _base.scss │ │ ├── _buttons.scss │ │ ├── _footer.scss │ │ ├── _forms.scss │ │ ├── _masthead.scss │ │ ├── _mixins.scss │ │ ├── _navigation.scss │ │ ├── _notices.scss │ │ ├── _page.scss │ │ ├── _print.scss │ │ ├── _reset.scss │ │ ├── _search.scss │ │ ├── _sidebar.scss │ │ ├── _syntax.scss │ │ ├── _tables.scss │ │ ├── _utilities.scss │ │ ├── _variables.scss │ │ └── skins │ │ ├── _air.scss │ │ ├── _aqua.scss │ │ ├── _contrast.scss │ │ ├── _dark.scss │ │ ├── _default.scss │ │ ├── _dirt.scss │ │ ├── _mint.scss │ │ ├── _neon.scss │ │ ├── _plum.scss │ │ └── _sunrise.scss ├── assets │ └── images │ │ ├── 3953273590_704e3899d5_m.jpg │ │ ├── 500x300.png │ │ ├── air-skin-archive-large.png │ │ ├── air-skin-archive.png │ │ ├── air-skin-post-large.png │ │ ├── air-skin-post.png │ │ ├── android-chrome-144x144.png │ │ ├── android-chrome-192x192.png │ │ ├── android-chrome-36x36.png │ │ ├── android-chrome-48x48.png │ │ ├── android-chrome-72x72.png │ │ ├── android-chrome-96x96.png │ │ ├── apple-touch-icon-114x114.png │ │ ├── apple-touch-icon-120x120.png │ │ ├── apple-touch-icon-144x144.png │ │ ├── apple-touch-icon-152x152.png │ │ ├── apple-touch-icon-180x180.png │ │ ├── apple-touch-icon-57x57.png │ │ ├── apple-touch-icon-60x60.png │ │ ├── apple-touch-icon-72x72.png │ │ ├── apple-touch-icon-76x76.png │ │ ├── apple-touch-icon-precomposed.png │ │ ├── apple-touch-icon.png │ │ ├── aqua-skin-archive-large.png │ │ ├── aqua-skin-archive.png │ │ ├── aqua-skin-post-large.png │ │ ├── aqua-skin-post.png │ │ ├── bio-photo-2.jpg │ │ ├── bio-photo.jpg │ │ ├── browserconfig.xml │ │ ├── contrast-code-block.jpg │ │ ├── contrast-skin-archive-large.png │ │ ├── contrast-skin-archive.png │ │ ├── contrast-skin-post-large.png │ │ ├── contrast-skin-post.png │ │ ├── dark-code-block.jpg │ │ ├── dark-skin-archive-large.png │ │ ├── dark-skin-archive.png │ │ ├── dark-skin-post-large.png │ │ ├── dark-skin-post.png │ │ ├── default-code-block.jpg │ │ ├── dirt-skin-archive-large.png │ │ ├── dirt-skin-archive.png │ │ ├── dirt-skin-post-large.png │ │ ├── dirt-skin-post.png │ │ ├── dracula-code-block.jpg │ │ ├── facebook-share-example.jpg │ │ ├── favicon-16x16.png │ │ ├── favicon-32x32.png │ │ ├── favicon-96x96.png │ │ ├── favicon.ico │ │ ├── foo-bar-identity-th.jpg │ │ ├── foo-bar-identity.jpg │ │ ├── google-custom-search-engine-layout.png │ │ ├── home-1.jpg │ │ ├── home.jpeg │ │ ├── image-alignment-1200x4002.jpg │ │ ├── image-alignment-150x150.jpg │ │ ├── image-alignment-300x200.jpg │ │ ├── image-alignment-580x300.jpg │ │ ├── manifest.json │ │ ├── markup-syntax-highlighting-teaser.jpg │ │ ├── masthead-search.gif │ │ ├── michael-rose.jpg │ │ ├── mint-skin-archive-large.png │ │ ├── mint-skin-archive.png │ │ ├── mint-skin-post-large.png │ │ ├── mint-skin-post.png │ │ ├── mm-archive-grid-view-example.jpg │ │ ├── mm-author-profile-reddit-color.png │ │ ├── mm-author-profile-reddit-gs.png │ │ ├── mm-author-sidebar-example.jpg │ │ ├── mm-breadcrumbs-example.jpg │ │ ├── mm-browser-mockups.png │ │ ├── mm-bundle-install.gif │ │ ├── mm-custom-sidebar-example.jpg │ │ ├── mm-custom-sidebar-nav.jpg │ │ ├── mm-customizable-feature.png │ │ ├── mm-free-feature.png │ │ ├── mm-gh-pages.gif │ │ ├── mm-github-copy-repo-url.jpg │ │ ├── mm-github-edit-config.gif │ │ ├── mm-header-overlay-black-filter.jpg │ │ ├── mm-header-overlay-red-filter.jpg │ │ ├── mm-home-page-feature.jpg │ │ ├── mm-home-post-pagination-example.jpg │ │ ├── mm-layout-archive-taxonomy.png │ │ ├── mm-layout-archive.png │ │ ├── mm-layout-examples.png │ │ ├── mm-layout-single-header.png │ │ ├── mm-layout-single-meta.png │ │ ├── mm-layout-single.png │ │ ├── mm-layout-splash.png │ │ ├── mm-masthead-logo.png │ │ ├── mm-paragraph-indent-example.jpg │ │ ├── mm-portfolio-collection-example.jpg │ │ ├── mm-priority-plus-masthead.gif │ │ ├── mm-read-time-example.jpg │ │ ├── mm-responsive-feature.png │ │ ├── mm-single-header-example.jpg │ │ ├── mm-single-header-overlay-example.jpg │ │ ├── mm-single-header-overlay-fill-example.jpg │ │ ├── mm-social-share-links-default.png │ │ ├── mm-social-share-links-reddit-color.png │ │ ├── mm-social-share-links-reddit-gs.png │ │ ├── mm-staticman-pr-webhook.jpg │ │ ├── mm-susy-grid-overlay.jpg │ │ ├── mm-teaser-images-example.jpg │ │ ├── mm-theme-fork-repo.png │ │ ├── mm-theme-post-600.jpg │ │ ├── mm-theme-post-750.jpg │ │ ├── mm-toc-helper-example.jpg │ │ ├── mm-twitter-card-summary-image.jpg │ │ ├── mm-twitter-card-summary-large.jpg │ │ ├── mm-ui-text-labels.jpg │ │ ├── mstile-144x144.png │ │ ├── mstile-150x150.png │ │ ├── mstile-310x150.png │ │ ├── mstile-310x310.png │ │ ├── mstile-70x70.png │ │ ├── neon-code-block.jpg │ │ ├── neon-skin-archive-large.png │ │ ├── neon-skin-archive.png │ │ ├── neon-skin-post-large.png │ │ ├── neon-skin-post.png │ │ ├── noun_Lightning.svg │ │ ├── page-header-image.png │ │ ├── page-header-og-image.png │ │ ├── page-header-overlay-image.png │ │ ├── page-header-teaser.png │ │ ├── paragraph-indent.png │ │ ├── paragraph-no-indent.png │ │ ├── plum-code-block.jpg │ │ ├── plum-skin-archive-large.png │ │ ├── plum-skin-archive.png │ │ ├── plum-skin-post-large.png │ │ ├── plum-skin-post.png │ │ ├── robot-face-emoji-by-google.png │ │ ├── safari-pinned-tab.svg │ │ ├── search-layout-example.png │ │ ├── site-logo.png │ │ ├── social-media-preview.png │ │ ├── solarized-light-code-block.jpg │ │ ├── sunrise-code-block.jpg │ │ ├── sunrise-skin-archive-large.png │ │ ├── sunrise-skin-archive.png │ │ ├── sunrise-skin-post-large.png │ │ ├── sunrise-skin-post.png │ │ ├── thilina-rajapakse.jpeg │ │ ├── unsplash-gallery-image-1-th.jpg │ │ ├── unsplash-gallery-image-1.jpg │ │ ├── unsplash-gallery-image-2-th.jpg │ │ ├── unsplash-gallery-image-2.jpg │ │ ├── unsplash-gallery-image-3-th.jpg │ │ ├── unsplash-gallery-image-3.jpg │ │ ├── unsplash-gallery-image-4-th.jpg │ │ ├── unsplash-gallery-image-4.jpg │ │ ├── unsplash-image-1.jpg │ │ ├── unsplash-image-10.jpg │ │ ├── unsplash-image-11.jpg │ │ ├── unsplash-image-2.jpg │ │ ├── unsplash-image-3.jpg │ │ ├── unsplash-image-4.jpg │ │ ├── unsplash-image-5.jpg │ │ ├── unsplash-image-6.jpg │ │ ├── unsplash-image-7.jpg │ │ ├── unsplash-image-8.jpg │ │ └── unsplash-image-9.jpg ├── favicon.ico ├── index.html ├── screenshot-layouts.png └── screenshot.png ├── examples ├── hyperparameter tuning │ ├── extended-tuning │ │ ├── data_prep.py │ │ ├── readme.md │ │ ├── sweep_layerwise.py │ │ ├── sweep_vanilla.py │ │ ├── train_default.py │ │ ├── train_layerwise.py │ │ ├── train_vanilla.py │ │ └── utils.py │ └── sweeps.py ├── language_generation │ ├── data_prep.py │ ├── fine_tune.py │ ├── generate.py │ └── train_new_lm.py ├── language_representation │ ├── binary_classification_dummy.py │ └── classification_yelp_polarity │ │ ├── classification_yelp.py │ │ └── data_download.sh ├── llms │ ├── download_squad.ipynb │ └── train.py ├── named_entity_recognition │ └── named_entity_recognition.py ├── question_answering │ ├── lazy_qa.py │ └── question_answering.py ├── retrieval │ ├── download_msmarco.py │ └── train_dpr_base.py ├── seq2seq │ ├── minimal_seq2seq.py │ └── paraphrasing │ │ ├── data_download.sh │ │ ├── predict.py │ │ ├── readme.md │ │ ├── train.py │ │ └── utils.py ├── t5 │ ├── mixed_tasks │ │ ├── data_prep.ipynb │ │ ├── test.py │ │ └── train.py │ ├── mt5 │ │ ├── data_prep.ipynb │ │ ├── test.py │ │ ├── test_multi_lang.py │ │ ├── train.py │ │ ├── translate_dataset.py │ │ └── translation_models.py │ ├── mt5_translation │ │ ├── data_prep.ipynb │ │ ├── readme.md │ │ ├── test.py │ │ └── train.py │ └── training_on_a_new_task │ │ ├── data_prep.py │ │ ├── predict.py │ │ ├── test.py │ │ └── train.py └── text_classification │ ├── binary_classification.py │ ├── lazy_loading_regression.py │ ├── multiclass_classification.py │ ├── multilabel_classification.py │ └── yelp_reviews_polarity │ ├── data_download.sh │ ├── run_trainers.sh │ └── train.py ├── requirements-dev.txt ├── setup.cfg ├── setup.py ├── simpletransformers ├── __init__.py ├── classification │ ├── __init__.py │ ├── classification_model.py │ ├── classification_utils.py │ ├── multi_label_classification_model.py │ ├── multi_modal_classification_model.py │ └── transformer_models │ │ ├── __init__.py │ │ ├── albert_model.py │ │ ├── bert_model.py │ │ ├── camembert_model.py │ │ ├── distilbert_model.py │ │ ├── electra_model.py │ │ ├── flaubert_model.py │ │ ├── layoutlm_model.py │ │ ├── longformer_model.py │ │ ├── mmbt_model.py │ │ ├── mobilebert_model.py │ │ ├── roberta_model.py │ │ ├── xlm_model.py │ │ ├── xlm_roberta_model.py │ │ └── xlnet_model.py ├── config │ ├── __init__.py │ ├── global_args.py │ ├── model_args.py │ └── utils.py ├── conv_ai │ ├── __init__.py │ ├── conv_ai_model.py │ └── conv_ai_utils.py ├── custom_models │ ├── __init__.py │ ├── large_representation_retrieval_model.py │ ├── models.py │ ├── pretrain_retrieval_model.py │ ├── reranking_model.py │ └── retrieval_autoencoder.py ├── experimental │ ├── __init__.py │ └── classification │ │ ├── __init__.py │ │ ├── classification_model.py │ │ ├── classification_utils.py │ │ ├── multi_label_classification_model.py │ │ └── transformer_models │ │ ├── __init__.py │ │ ├── albert_model.py │ │ ├── bert_model.py │ │ ├── camembert_model.py │ │ ├── distilbert_model.py │ │ ├── roberta_model.py │ │ ├── xlm_model.py │ │ └── xlnet_model.py ├── language_generation │ ├── __init__.py │ ├── language_generation_model.py │ └── language_generation_utils.py ├── language_modeling │ ├── __init__.py │ ├── language_modeling_model.py │ └── language_modeling_utils.py ├── language_representation │ ├── __init__.py │ ├── representation_model.py │ └── transformer_models │ │ ├── __init__.py │ │ ├── bert_model.py │ │ └── gpt2_model.py ├── losses │ ├── __init__.py │ ├── dice_loss.py │ ├── focal_loss.py │ ├── loss_utils.py │ └── tversky_loss.py ├── model.py ├── ner │ ├── __init__.py │ ├── ner_dataset_loading_script │ │ └── ner_dataset_loading_script.py │ ├── ner_model.py │ └── ner_utils.py ├── pretrain_retrieval │ ├── __init__.py │ ├── pretrain_retrieval_dataset_loading_script │ │ └── pretrain_retrieval_dataset_loading_script.py │ ├── pretrain_retrieval_model.py │ └── pretrain_retrieval_utils.py ├── question_answering │ ├── __init__.py │ ├── qa_dataset_loading_script │ │ ├── qa_dataset_loading_script.py │ │ └── qa_dataset_loading_script.py.lock │ ├── question_answering_model.py │ └── question_answering_utils.py ├── retrieval │ ├── __init__.py │ ├── beir_evaluation.py │ ├── pytrec_eval_utils.py │ ├── retrieval_dataset_loading_script │ │ ├── retrieval_dataset_loading_script.py │ │ └── retrieval_dataset_loading_script.py.lock │ ├── retrieval_model.py │ ├── retrieval_tools.py │ └── retrieval_utils.py ├── seq2seq │ ├── __init__.py │ ├── seq2seq_model.py │ └── seq2seq_utils.py ├── streamlit │ ├── __init__.py │ ├── classification_view.py │ ├── ner_view.py │ ├── qa_view.py │ ├── simple_view.py │ ├── streamlit_utils.py │ └── t5_view.py └── t5 │ ├── __init__.py │ ├── t5_model.py │ └── t5_utils.py ├── tests ├── language_modeling │ └── test_language_modeling_only.py ├── test_classification.py ├── test_language_modeling.py ├── test_language_representation.py ├── test_named_entity_recognition.py ├── test_question_answering.py ├── test_seq2seq.py └── test_t5.py └── train.txt /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] 4 | patreon: thilinarajapakse 5 | open_collective: # Replace with a single Open Collective username 6 | ko_fi: # Replace with a single Ko-fi username 7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 9 | liberapay: # Replace with a single Liberapay username 10 | issuehunt: # Replace with a single IssueHunt username 11 | otechie: # Replace with a single Otechie username 12 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] 13 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. Please specify the class causing the issue. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 16 | **Expected behavior** 17 | A clear and concise description of what you expected to happen. 18 | 19 | **Screenshots** 20 | If applicable, add screenshots to help explain your problem. 21 | 22 | **Desktop (please complete the following information):** 23 | - OS 24 | 25 | **Additional context** 26 | Add any other context about the problem here. 27 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/stale.yml: -------------------------------------------------------------------------------- 1 | # Number of days of inactivity before an issue becomes stale 2 | daysUntilStale: 60 3 | # Number of days of inactivity before a stale issue is closed 4 | daysUntilClose: 7 5 | # Issues with these labels will never be considered stale 6 | exemptLabels: 7 | - pinned 8 | - security 9 | # Label to use when marking an issue as stale 10 | staleLabel: stale 11 | # Comment to post when marking an issue as stale. Set to `false` to disable 12 | markComment: > 13 | This issue has been automatically marked as stale because it has not had 14 | recent activity. It will be closed if no further activity occurs. Thank you 15 | for your contributions. 16 | # Comment to post when closing a stale issue. Set to `false` to disable 17 | closeComment: false 18 | -------------------------------------------------------------------------------- /.github/workflows/pythonpublish.yml: -------------------------------------------------------------------------------- 1 | name: Upload Python Package 2 | 3 | on: push 4 | 5 | jobs: 6 | deploy: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v1 10 | - name: Set up Python 11 | uses: actions/setup-python@v1 12 | with: 13 | python-version: '3.x' 14 | - name: Install dependencies 15 | run: | 16 | python -m pip install --upgrade pip 17 | pip install setuptools wheel twine 18 | python setup.py sdist bdist_wheel 19 | - name: Publish package 20 | if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags') 21 | uses: pypa/gh-action-pypi-publish@release/v1 22 | with: 23 | user: __token__ 24 | password: ${{ secrets.PYPI_PASSWORD }} 25 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | install: 2 | pip install -e . 3 | pip install -r requirements-dev.txt 4 | pip list 5 | 6 | clean: 7 | find . -name '*.pyc' -exec rm -f {} + 8 | find . -name '*.pyo' -exec rm -f {} + 9 | find . -name '*~' -exec rm -f {} + 10 | 11 | clean-test: 12 | -rm -r .coverage* 13 | -rm -r data 14 | -rm -r runs 15 | -rm -r outputs 16 | -rm -r cache_dir 17 | -rm -r wandb 18 | -rm train.txt 19 | 20 | formatter: 21 | black --line-length 119 simpletransformers tests --exclude simpletransformers/experimental\ 22 | 23 | lint: clean 24 | flake8 simpletransformers tests --exclude=simpletransformers/experimental 25 | black --check --line-length 119 . simpletransformers tests --exclude simpletransformers/experimental 26 | 27 | types: 28 | pytype --keep-going simpletransformers --exclude simpletransformers/experimental 29 | 30 | test: clean 31 | pytest tests --cov simpletransformers/classification simpletransformers/ner simpletransformers/question_answering simpletransformers/language_modeling simpletransformers/t5 simpletransformers/seq2seq 32 | 33 | test-lm: clean 34 | pytest tests/language_modeling --cov simpletransformers/language_modeling 35 | 36 | # if this runs through we can be sure the readme is properly shown on pypi 37 | check-readme: 38 | python setup.py check --restructuredtext 39 | -------------------------------------------------------------------------------- /bin/simple-viewer: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cat >run_simple_transformers_streamlit_app.py <<'END_SCRIPT' 3 | #!/usr/bin/env python 4 | from simpletransformers.streamlit.simple_view import streamlit_runner 5 | 6 | 7 | streamlit_runner() 8 | 9 | END_SCRIPT 10 | 11 | # Run 12 | streamlit run run_simple_transformers_streamlit_app.py 13 | 14 | rm run_simple_transformers_streamlit_app.py 15 | -------------------------------------------------------------------------------- /docs/.gitignore: -------------------------------------------------------------------------------- 1 | _site 2 | .sass-cache 3 | .jekyll-cache 4 | .jekyll-metadata 5 | vendor 6 | -------------------------------------------------------------------------------- /docs/.jekyll-cache/Jekyll/Cache/Jekyll--Cache/b7/9606fb3afea5bd1609ed40b622142f1c98125abcfe89a76a661b0e8e343910: -------------------------------------------------------------------------------- 1 | I"%{"source"=>"/home/thilina/Projects/simple-transformers/docs", "destination"=>"/home/thilina/Projects/simple-transformers/docs/_site", "collections_dir"=>"", "cache_dir"=>".jekyll-cache", "plugins_dir"=>"_plugins", "layouts_dir"=>"_layouts", "data_dir"=>"_data", "includes_dir"=>"_includes", "collections"=>{"posts"=>{"output"=>true, "permalink"=>"/:categories/:year/:month/:day/:title:output_ext"}}, "safe"=>false, "include"=>[".htaccess"], "exclude"=>[".sass-cache", ".jekyll-cache", "gemfiles", "Gemfile", "Gemfile.lock", "node_modules", "vendor/bundle/", "vendor/cache/", "vendor/gems/", "vendor/ruby/"], "keep_files"=>[".git", ".svn"], "encoding"=>"utf-8", "markdown_ext"=>"markdown,mkdown,mkdn,mkd,md", "strict_front_matter"=>false, "show_drafts"=>nil, "limit_posts"=>0, "future"=>false, "unpublished"=>false, "whitelist"=>[], "plugins"=>[], "markdown"=>"kramdown", "highlighter"=>"rouge", "lsi"=>false, "excerpt_separator"=>"\n\n", "incremental"=>false, "detach"=>false, "port"=>"4000", "host"=>"127.0.0.1", "baseurl"=>nil, "show_dir_listing"=>false, "permalink"=>"date", "paginate_path"=>"/page:num", "timezone"=>nil, "quiet"=>false, "verbose"=>false, "defaults"=>[], "liquid"=>{"error_mode"=>"warn", "strict_filters"=>false, "strict_variables"=>false}, "kramdown"=>{"auto_ids"=>true, "toc_levels"=>"1..6", "entity_output"=>"as_char", "smart_quotes"=>"lsquo,rsquo,ldquo,rdquo", "input"=>"GFM", "hard_wrap"=>false, "guess_lang"=>true, "footnote_nr"=>1, "show_warnings"=>false}, "livereload_port"=>35729, "serving"=>true, "watch"=>true, "url"=>"http://localhost:4000"}:ET -------------------------------------------------------------------------------- /docs/404.html: -------------------------------------------------------------------------------- 1 | --- 2 | permalink: /404.html 3 | layout: default 4 | --- 5 | 6 | 19 | 20 |
21 |

404

22 | 23 |

Page not found :(

24 |

The requested page could not be found.

25 |
26 | -------------------------------------------------------------------------------- /docs/CNAME: -------------------------------------------------------------------------------- 1 | simpletransformers.ai -------------------------------------------------------------------------------- /docs/Gemfile: -------------------------------------------------------------------------------- 1 | source "https://rubygems.org" 2 | 3 | gem "github-pages", group: :jekyll_plugins 4 | gem 'jekyll-include-cache' -------------------------------------------------------------------------------- /docs/_data/authors.yml: -------------------------------------------------------------------------------- 1 | # Authors 2 | 3 | Billy Rick: 4 | name : "Billy Rick" 5 | bio : "What do you want, jewels? I am a very extravagant man." 6 | avatar : "/assets/images/bio-photo-2.jpg" 7 | links: 8 | - label: "Email" 9 | icon: "fas fa-fw fa-envelope-square" 10 | url: "mailto:billyrick@rick.com" 11 | - label: "Website" 12 | icon: "fas fa-fw fa-link" 13 | url: "https://thewhip.com" 14 | - label: "Twitter" 15 | icon: "fab fa-fw fa-twitter-square" 16 | url: "https://twitter.com/extravagantman" 17 | 18 | Cornelius Fiddlebone: 19 | name : "Cornelius Fiddlebone" 20 | bio : "I ordered what?" 21 | avatar : "/assets/images/bio-photo.jpg" 22 | links: 23 | - label: "Email" 24 | icon: "fas fa-fw fa-envelope-square" 25 | url: "mailto:cornelius@thewhip.com" 26 | - label: "Twitter" 27 | icon: "fab fa-fw fa-twitter-square" 28 | url: "https://twitter.com/rhymeswithsackit" -------------------------------------------------------------------------------- /docs/_data/comments/chocolate-chip-cookies/comment-1473870213530.yml: -------------------------------------------------------------------------------- 1 | message: Cooooookies! Yum! (Thanks for this awesome them.. and... recipe..) 2 | name: Markus 3 | email: f6f9be6ae6e174661ea7c87a520ffef8 4 | url: 'http://www.markusgiesen.com' 5 | hidden: '' 6 | date: '2016-09-14T16:23:32.840Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/chocolate-chip-cookies/comment-1478213467992.yml: -------------------------------------------------------------------------------- 1 | message: "DELETE ME\r\n\r\njust trying out the comment system" 2 | name: Donald Duck 3 | email: cfedc848417c6ed0ce1eed35e741b26e 4 | url: 'http://disney.com' 5 | hidden: '' 6 | date: '2016-11-03T22:51:07.174Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/chocolate-chip-cookies/comment-1500181304581.yml: -------------------------------------------------------------------------------- 1 | _id: dcd6d950-69e3-11e7-8901-815fa61174ff 2 | message: >- 3 | I'm exploring this theme. Apparently replies are not supported on this theme 4 | (yet). Am I correct? 5 | name: Harry 6 | email: 58dfe5ef6aaf9bf9af5a0b17b52e6168 7 | url: 'http://www.deepfriedbrainproject.com' 8 | hidden: '' 9 | date: '2017-07-16T05:01:44.574Z' 10 | -------------------------------------------------------------------------------- /docs/_data/comments/chocolate-chip-cookies/comment-1500214855350.yml: -------------------------------------------------------------------------------- 1 | _id: faa63db0-6a31-11e7-8901-815fa61174ff 2 | message: >- 3 | @Harry - That's correct. See [this issue on 4 | GitHub](https://github.com/mmistakes/minimal-mistakes/issues/803) for more 5 | context as to why replies aren't added yet. 6 | name: Michael Rose 7 | email: 1ce71bc10b86565464b612093d89707e 8 | url: 'https://mademistakes.com' 9 | hidden: '' 10 | date: '2017-07-16T14:20:55.348Z' 11 | -------------------------------------------------------------------------------- /docs/_data/comments/gemified-theme-beta/comment-1479508047505.yml: -------------------------------------------------------------------------------- 1 | message: wonderful theme! 2 | name: burner 3 | email: 7327e2b38683d37634ada1dfa1d9f6e7 4 | url: '' 5 | hidden: '' 6 | date: '2016-11-18T22:27:26.851Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/gemified-theme-beta/comment-1480591890264.yml: -------------------------------------------------------------------------------- 1 | _id: b3f80ef0-b7b9-11e6-9b81-0bc3350a75b6 2 | message: Awesome.... 3 | name: Joko 4 | email: e173d909aa244a3fbf81908da947ff94 5 | url: '' 6 | hidden: '' 7 | date: '2016-12-01T11:31:30.231Z' 8 | -------------------------------------------------------------------------------- /docs/_data/comments/gemified-theme-beta/comment-1482532165381.yml: -------------------------------------------------------------------------------- 1 | _id: 4218daa0-c95f-11e6-b347-77ada084d3be 2 | message: Agreed 3 | name: Bob 4 | email: d73914c0f3c7350b9fbc0790e1fc0412 5 | url: '' 6 | hidden: '' 7 | date: '2016-12-23T22:29:25.380Z' 8 | data: '2016-12-23T22:29:25.380Z' 9 | -------------------------------------------------------------------------------- /docs/_data/comments/gemified-theme-beta/comment-1483456786593.yml: -------------------------------------------------------------------------------- 1 | _id: 0ef265d0-d1c8-11e6-b0a9-efc179b06d97 2 | message: "Got a little problem here,\r\n\r\nIf I'm using Github pages, should I delete those Theme files?\r\n\r\nbecause after I `bundle update`, they didn't generated as expected, or I've misunderstanding.\r\n\r\nOr, I should follow the instruction in Installation:\r\n> To maintain a local Jekyll environment in sync with GitHub Pages replace the gem \"jekyll\" line with gem \"github-pages\", group: :jekyll_plugins and run the following:\r\n\r\nThanks : )" 3 | name: Specialvict 4 | email: e568a82db250c5130842f4fa42d5cacf 5 | url: 'https://oaleeapp.info' 6 | hidden: '' 7 | date: '2017-01-03T15:19:46.590Z' 8 | -------------------------------------------------------------------------------- /docs/_data/comments/gemified-theme-beta/comment-1483457152038.yml: -------------------------------------------------------------------------------- 1 | _id: e91c28e0-d1c8-11e6-b0a9-efc179b06d97 2 | message: "@Specialvict - follow [these instructions](https://mmistakes.github.io/minimal-mistakes/docs/quick-start-guide/#github-pages-compatible-method) in the docs. The theme gem install instructions don't apply to you since GitHub Pages doesn't support 3rd party gem themes yet.\r\n\r\nYou need to install the old way of forking the theme files." 3 | name: Michael Rose 4 | email: 1ce71bc10b86565464b612093d89707e 5 | url: 'https://mademistakes.com' 6 | hidden: '' 7 | date: '2017-01-03T15:25:52.037Z' 8 | -------------------------------------------------------------------------------- /docs/_data/comments/gemified-theme-beta/comment-1519412839827.yml: -------------------------------------------------------------------------------- 1 | _id: c4c722a0-18cc-11e8-81e7-ad1d9008e648 2 | message: great theme! 3 | name: vern 4 | email: a6f5c444240028e515acd8e8808cd130 5 | url: '' 6 | hidden: '' 7 | date: '2018-02-23T19:07:19.827Z' 8 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-comments/comment-1470944006665.yml: -------------------------------------------------------------------------------- 1 | message: "![Bill Murray](http://www.fillmurray.com/400/300)\r\n\r\n“It's hard to be an artist. It's hard to be anything. It's hard to be.”" 2 | name: Bill Murray 3 | email: b0caa2a71f5066b3d90711c224578c21 4 | url: '' 5 | hidden: '' 6 | date: '2016-08-11T19:33:25.928Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-comments/comment-1470944162041.yml: -------------------------------------------------------------------------------- 1 | message: "> “I never had seen Seinfeld, and they said, ‘Oh, it’s the last episode.’ And I said, ‘Oh, I’ll watch Seinfeld.’ And it was terrible.”\r\n>\r\n> *— From a 2014 interview with Howard Stern*" 2 | name: Anonymous 3 | email: 8c7e898f1b570760f834ecc03edf6b35 4 | url: '' 5 | hidden: '' 6 | date: '2016-08-11T19:36:01.033Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-comments/comment-1472308473018.yml: -------------------------------------------------------------------------------- 1 | message: test 2 | name: test 3 | email: c028c75814332d38e088e43a252b7092 4 | url: '' 5 | hidden: '' 6 | date: '2016-08-27T14:34:32.281Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-comments/comment-1514406795156.yml: -------------------------------------------------------------------------------- 1 | _id: 29e7b010-eb45-11e7-a44f-935f90061bb7 2 | message: >- 3 | I am not able to figure out the staticman v2 comments. Please help. Even after 4 | updating the end points also its throws me a message that there is an error 5 | with submission 6 | name: Krishna 7 | email: 72e637576fd8729a323d648719daf631 8 | url: '' 9 | hidden: '' 10 | date: '2017-12-27T20:33:15.155Z' 11 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-comments/comment-1514407115153.yml: -------------------------------------------------------------------------------- 1 | _id: e8aa5fc0-eb45-11e7-a44f-935f90061bb7 2 | message: "Please open issue on GitHub and provide a link to a public repo.\r\n\r\n" 3 | name: Michael Rose 4 | email: 1ce71bc10b86565464b612093d89707e 5 | url: 'https://mademistakes.com' 6 | hidden: '' 7 | date: '2017-12-27T20:38:35.152Z' 8 | timestamp: 1514407115 9 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-comments/comment-1538482988032.yml: -------------------------------------------------------------------------------- 1 | _id: eb2fc040-c63d-11e8-a3e5-1590186c2f0d 2 | message: >- 3 | Took a while to get commenting working for me, but got it in the end. I used 4 | the layout for the comments here on my own site. Very clean. 5 | name: Bobby 6 | email: 2f9024d5ac2e697176aab289c8c7ab76 7 | url: '' 8 | hidden: '' 9 | date: '2018-10-02T12:23:08.031Z' 10 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-header-image-horizontal/comment-1483124729757.yml: -------------------------------------------------------------------------------- 1 | _id: edfee0e0-cec2-11e6-9fec-f15f86beaa6c 2 | message: "How do I add a header image to the minimal mistakes theme? It's given in documentation to add a YAML front matter. But it's not given where to add it? \r\nHeader documentation" 3 | name: shivam mitra 4 | email: 38e378dc28daa978e5e7296723b3a65c 5 | url: 'https://codophobia.github.io/' 6 | hidden: '' 7 | date: '2016-12-30T19:05:29.755Z' 8 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-header-image-horizontal/comment-1483128389943.yml: -------------------------------------------------------------------------------- 1 | _id: 73977ed0-cecb-11e6-9fec-f15f86beaa6c 2 | message: >- 3 | You add it to the YAML Front Matter of the post or page you want the header 4 | on. It's not globally assigned. If you want the same header on every page you 5 | could set it with [front matter 6 | defaults](https://jekyllrb.com/docs/configuration/#front-matter-defaults). 7 | name: Michael Rose 8 | email: 1ce71bc10b86565464b612093d89707e 9 | url: 'https://mademistakes.com' 10 | hidden: '' 11 | date: '2016-12-30T20:06:29.940Z' 12 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-header-image-text-readability/comment-1474306861206.yml: -------------------------------------------------------------------------------- 1 | message: Test message 2 | name: Artur 3 | email: 1cbebf1e64617de54d7858ffc6d96935 4 | url: '' 5 | hidden: '' 6 | date: '2016-09-19T17:41:00.416Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-header-image-text-readability/comment-1479253931238.yml: -------------------------------------------------------------------------------- 1 | message: 'Cool, now how do I make a splash page my default home page?' 2 | name: Brett 3 | email: 374ca0d969bee21fb740b11da4182306 4 | url: '' 5 | hidden: '' 6 | date: '2016-11-15T23:52:10.556Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-header-image-text-readability/comment-1479265677846.yml: -------------------------------------------------------------------------------- 1 | message: "Simple. Save it in your site root folder as `index.md` or you can also save it in `_pages` folder with whatever name you want and set `permalink: /` in its YAML Front Matter.\r\n\r\nIf you want to see an example check [`_pages/home.md`](https://github.com/mmistakes/minimal-mistakes/blob/master/docs/_pages/home.md) used for this demo site." 2 | name: Michael Rose 3 | email: 1ce71bc10b86565464b612093d89707e 4 | url: 'https://mademistakes.com' 5 | hidden: '' 6 | date: '2016-11-16T03:07:57.173Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-header-overlay-image/comment-1512840683260.yml: -------------------------------------------------------------------------------- 1 | _id: c6489fc0-dd06-11e7-be15-a3469e549c00 2 | message: >- 3 | Have you think about adding an "height"-limit feature to the tos, allowing 4 | users to use very heigh images, and not struggling with custom CSS and 5 | classes?:-) 6 | name: Tobias Nordahl Kristensen 7 | email: 86a84379052b26c55c912dc27ddd8647 8 | url: '' 9 | hidden: '' 10 | date: '2017-12-09T17:31:23.259Z' 11 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-header-overlay-image/comment-1513110608614.yml: -------------------------------------------------------------------------------- 1 | _id: 3db51ae0-df7b-11e7-8cc2-7d7640aa7c35 2 | message: >- 3 | Sorry for the noob question but what is the proper way to increase the image 4 | containers height? 5 | name: Marc 6 | email: 23569aaeded782b63941771dc067ca28 7 | url: '' 8 | hidden: '' 9 | date: '2017-12-12T20:30:08.613Z' 10 | timestamp: 1513110608 11 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-header-overlay-image/comment-1513111329875.yml: -------------------------------------------------------------------------------- 1 | _id: ec74b620-df7c-11e7-8cc2-7d7640aa7c35 2 | message: "@Marc - you can set a height via CSS on `.page__hero--overlay`. It currently stretches to fit the text content inside of it, but if you want to make it taller just add something like `height: 500px;`.\r\n\r\nThis styling is in [`_sass/_page.scss`](https://github.com/mmistakes/minimal-mistakes/blob/4.8.0/_sass/minimal-mistakes/_page.scss#L136-L147).\r\n\r\nFor further context check [issue #542](https://github.com/mmistakes/minimal-mistakes/issues/542) on GitHub... probably a better place to ask/leave questions than the comments here." 3 | name: Michael Rose 4 | email: 1ce71bc10b86565464b612093d89707e 5 | url: 'https://mademistakes.com' 6 | hidden: '' 7 | date: '2017-12-12T20:42:09.874Z' 8 | timestamp: 1513111329 9 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-header-overlay-image/comment-1513111563922.yml: -------------------------------------------------------------------------------- 1 | _id: 77e56d80-df7d-11e7-8cc2-7d7640aa7c35 2 | message: "@Tobias - No, I have not thought about that. In general I'm not a fan of setting max-heights or widths as they fall apart in a responsive world. What might be 500px high on desktop could be smaller or larger on mobile (and everywhere in between).\r\n\r\nJavaScript could probably be used to determine some of that, but in this case I don't really see the benefit by adding more complexity." 3 | name: Michael Rose 4 | email: 1ce71bc10b86565464b612093d89707e 5 | url: 'https://mademistakes.com' 6 | hidden: '' 7 | date: '2017-12-12T20:46:03.921Z' 8 | timestamp: 1513111563 9 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-related-posts/comment-1500183131535.yml: -------------------------------------------------------------------------------- 1 | _id: 1dda62b0-69e8-11e7-8901-815fa61174ff 2 | message: I don't see any related posts here. 3 | name: Harry 4 | email: 58dfe5ef6aaf9bf9af5a0b17b52e6168 5 | url: 'http://www.deepfriedbrainproject.com' 6 | hidden: '' 7 | date: '2017-07-16T05:32:11.534Z' 8 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-related-posts/comment-1500214974083.yml: -------------------------------------------------------------------------------- 1 | _id: 417a69a0-6a32-11e7-8901-815fa61174ff 2 | message: >- 3 | @Harry It's a bug in how Jekyll handles related posts. See this [pull request 4 | on GitHub](https://github.com/mmistakes/minimal-mistakes/pull/978) for the 5 | fix. I haven't merged it yet for a variety of reasons. 6 | name: Michael Rose 7 | email: 1ce71bc10b86565464b612093d89707e 8 | url: 'https://mademistakes.com' 9 | hidden: '' 10 | date: '2017-07-16T14:22:54.082Z' 11 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-sidebar-custom/comment-1519247076880.yml: -------------------------------------------------------------------------------- 1 | _id: d262cff0-174a-11e8-b61b-7b344215416b 2 | message: "Hey. I'm a newbie to github pages and jekyll, but I am liking the setup and loving your theme!\r\n\r\nI have an image that I want to add to the sidebar, but it is a little bit too big. Is there some way I can resize the image in the sidebar YAML Front Matter without needing another copy of it in github? I know I could resize it myself if I was adding it to the body of the text, but I can't find a way to do it in the YAML Front Matter.\r\nThx" 3 | name: Brynjar Smári 4 | email: 07e1d805e3e0472d0d7762ac8f7b2496 5 | url: 'https://binnisb.github.io' 6 | hidden: '' 7 | date: '2018-02-21T21:04:36.879Z' 8 | approved: false 9 | title: Comment 10 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-sidebar-custom/comment-1519247290410.yml: -------------------------------------------------------------------------------- 1 | _id: 51aa9540-174b-11e8-b61b-7b344215416b 2 | message: "No there isn't a native way of doing this. The theme and Jekyll core have no ability to transform your assets.\r\n\r\nYou'll need to manually do it, use a task runner like Gulp or Grunt, or a Jekyll plugin to resize the image for you." 3 | name: Michael Rose 4 | email: 1ce71bc10b86565464b612093d89707e 5 | url: 'https://mademistakes.com' 6 | hidden: '' 7 | date: '2018-02-21T21:08:10.409Z' 8 | approved: false 9 | title: Comment 10 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-sidebar-custom/comment-1520748170396.yml: -------------------------------------------------------------------------------- 1 | _id: d3bbd220-24f1-11e8-af65-d13c8029700e 2 | message: "Hello Rose,\r\nCan you please add a feature for adding a custom sidebar on right hand side or let me know if it already exits or how can I implement it ? \r\nThanks" 3 | name: Vipin Kumar 4 | email: eeafc03a07852a239754fb68cca903b3 5 | url: '' 6 | hidden: '' 7 | date: '2018-03-11T06:02:50.395Z' 8 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-sidebar-nav-list/comment-1492811460488.yml: -------------------------------------------------------------------------------- 1 | _id: 9b722860-26dc-11e7-ba90-7b0064600583 2 | message: >- 3 | Is there a way to have a sidebar link jump to a particular part of the current 4 | page? I have a long post that I want to let a viewer jump to different parts 5 | without creating separate pages. 6 | name: Josh 7 | email: b1d267b408432e054759f6b16d4ff24b 8 | url: 'https://stregerdev.github.io' 9 | hidden: '' 10 | date: '2017-04-21T21:51:00.487Z' 11 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-sidebar-nav-list/comment-1492812977693.yml: -------------------------------------------------------------------------------- 1 | _id: 23c51da0-26e0-11e7-ba90-7b0064600583 2 | message: "@Josh Kramdown auto creates id's on all of your page headlines in a post which you could use for this purpose. If you look at the source on [this page](https://mmistakes.github.io/minimal-mistakes/markup/markup-html-tags-and-formatting/) you'll see what I mean. For example the first heading named **Header one**:\r\n\r\n```html\r\n

Header one

\r\n```\r\n\r\nIf you added `url: \"#header-one\"` to your sidebar nav YAML it would jump to that anchor because of what is set on the `id` attribute.\r\n\r\nYou can also insert your own anchors with `` and target the same way... `#whatever-you-want`.\r\n\r\nThere's also several JavaScript solutions out there to things like this too." 3 | name: Michael Rose 4 | email: 1ce71bc10b86565464b612093d89707e 5 | url: 'https://mademistakes.com' 6 | hidden: '' 7 | date: '2017-04-21T22:16:17.691Z' 8 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-table-of-contents-post/comment-1512118683486.yml: -------------------------------------------------------------------------------- 1 | _id: bc6cbcd0-d675-11e7-920e-d53b2bdd726d 2 | message: >- 3 | it would be nice if the table of content was sticky so it scrolls down with 4 | the user to ease navigation 5 | name: jean 6 | email: c569662e2d1490cd1d090450d8b5babc 7 | url: '' 8 | hidden: '' 9 | date: '2017-12-01T08:58:03.485Z' 10 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-table-of-contents-post/comment-1520683848241.yml: -------------------------------------------------------------------------------- 1 | _id: 10db9fa0-245c-11e8-8f62-99e42cdc233b 2 | message: >- 3 | +1 to jean. If only we can have toc in sidebar navigation so it sticks to the 4 | page end. 5 | name: Albus 6 | email: b2162ec2c865decb4c337acee7694a4c 7 | url: '' 8 | hidden: '' 9 | date: '2018-03-10T12:10:48.240Z' 10 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-table-of-contents-post/comment-1527082094887.yml: -------------------------------------------------------------------------------- 1 | _id: 254049d0-5e8d-11e8-bff3-cf20643057a6 2 | message: "+1 to jean. \r\nFurthermore, for posts with no table to contents, it would be great to have a \"sticky\" post-specific sidebar with CTAs (like different 'shares' for the post - Twitter, Linkedin, FB, etc., #comments, post categories, post tags and subscribe - RSS, newsletter). \r\nHere is an example of such sidebar, despite non-sticky: https://www.feld.com/archives/2018/05/misty-ii-teardown.html\r\nAnd here is another example (with less CTAs, and also non-sticky): https://www.gatesnotes.com/Books/Leonardo-da-Vinci" 3 | name: Mora 4 | email: aead23df2f7cf50789a29b3d9b5a6d5f 5 | url: '' 6 | hidden: '' 7 | date: '2018-05-23T13:28:14.886Z' 8 | timestamp: 1527082094 9 | tags: 10 | - comment-subscription 11 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-table-of-contents-post/comment-1527500055863.yml: -------------------------------------------------------------------------------- 1 | _id: 497911d0-625a-11e8-afe5-3feaa7bb1d43 2 | message: >- 3 | Making a sticky TOC sidebar is quite easy. I have made a separate [blog 4 | post](https://shaharkadmiel.github.io/Sticky-TOC-Sidebar/) about it. 5 | name: Shahar Shani-Kadmiel 6 | email: 2dd06215bf688e5bacc62f90b15105fc 7 | url: 'https://shaharkadmiel.github.io' 8 | hidden: '' 9 | date: '2018-05-28T09:34:15.862Z' 10 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-table-of-contents-post/comment-1527690060032.yml: -------------------------------------------------------------------------------- 1 | _id: acc930b0-6414-11e8-85f3-dd0128460d26 2 | message: >- 3 | Shahars's sticky sidebar is great on firefox, but doesn't work on Safari. I 4 | haven't tested other browsers. 5 | name: Matthew Dorey 6 | email: 69b0700825ff5bf5df1d9d2d6582cc5e 7 | url: 'https://mattischrome.com' 8 | hidden: '' 9 | date: '2018-05-30T14:21:00.032Z' 10 | tags: [] 11 | timestamp: 1527690060 12 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-table-of-contents-post/comment-1527690281769.yml: -------------------------------------------------------------------------------- 1 | _id: 30e73630-6415-11e8-85f3-dd0128460d26 2 | message: "@Matthew - that's likely due to vendor prefixes missing from the CSS. `position: sticky` isn't enough for webkit browsers like Safari.\r\n\r\n```css\r\nposition: sticky;\r\nposition: -webkit-sticky;\r\nposition: -moz-sticky;\r\nposition: -ms-sticky;\r\nposition: -o-sticky;\r\n```" 3 | name: Michael Rose 4 | email: 1ce71bc10b86565464b612093d89707e 5 | url: 'https://mademistakes.com' 6 | hidden: '' 7 | date: '2018-05-30T14:24:41.768Z' 8 | tags: [] 9 | timestamp: 1527690281 10 | -------------------------------------------------------------------------------- /docs/_data/comments/layout-table-of-contents-post/comment-1540422628114.yml: -------------------------------------------------------------------------------- 1 | _id: feba5d40-d7e1-11e8-a9f1-05b1ca7b5c0d 2 | message: >- 3 | @Shahar not sure if this was added later but you can define toc_sticky: true 4 | in the yaml. 5 | name: Jair G 6 | email: a39a2b0f5ac5fe0ebd38ac9950ab81b8 7 | url: '' 8 | hidden: '' 9 | date: '2018-10-24T23:10:28.113Z' 10 | -------------------------------------------------------------------------------- /docs/_data/comments/markup-image-alignment/comment-1534823211504.yml: -------------------------------------------------------------------------------- 1 | _id: d66726e0-a4f4-11e8-9f86-137f5085ada7 2 | message: "Hey man,\r\nI still feel I haven't conquered the usage of your site at times :D \r\nYou have pretty examples here, but is there a better way to see how they are implemented than to look it up on github?" 3 | name: Richard Rich Steinmetz 4 | email: c9b21f5e950c2a1f44b82b3be25c0a22 5 | url: 'http://datagoodie.com' 6 | hidden: '' 7 | date: '2018-08-21T03:46:51.504Z' 8 | -------------------------------------------------------------------------------- /docs/_data/comments/markup-more-images/comment-1472040323579.yml: -------------------------------------------------------------------------------- 1 | message: test 2 | name: test 3 | email: 01540d5a1cdb4d03edb23805df684762 4 | url: '' 5 | hidden: '' 6 | date: '2016-08-24T12:05:22.844Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/markup-more-images/comment-1472146638519.yml: -------------------------------------------------------------------------------- 1 | message: test 2 | name: ppmeng 3 | email: b9c981f67166172c8804b5f9066a404a 4 | url: '' 5 | hidden: '' 6 | date: '2016-08-25T17:37:17.780Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/markup-syntax-highlighting/comment-1470969665387.yml: -------------------------------------------------------------------------------- 1 | message: "Here's a test comment with a Markdown code block:\r\n\r\n```scss\r\nh1, h2, h3, h4, h5, h6 {\r\n margin: 2em 0 0.5em;\r\n line-height: 1.2;\r\n font-family: $header-font-family;\r\n font-weight: bold;\r\n}\r\n```" 2 | name: Michael Rose 3 | email: 1ce71bc10b86565464b612093d89707e 4 | url: 'https://mademistakes.com' 5 | hidden: '' 6 | date: '2016-08-12T02:41:04.706Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/markup-syntax-highlighting/comment-1478928407894.yml: -------------------------------------------------------------------------------- 1 | message: Is there a way to add custom syntax? 2 | name: Anbu 3 | email: 559d2c680d83bc6246b4483a8d9ad7fa 4 | url: '' 5 | hidden: '' 6 | date: '2016-11-12T05:26:47.184Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/markup-syntax-highlighting/comment-1487758246637.yml: -------------------------------------------------------------------------------- 1 | _id: 2d426a80-f8e7-11e6-9b83-8d0f1ec5628f 2 | message: >- 3 | It would be great to be able to easily switch between light and dark 4 | highlighting. 5 | name: looeee 6 | email: 08df1cce15065b1b2547889573b76414 7 | url: '' 8 | hidden: '' 9 | date: '2017-02-22T10:10:46.636Z' 10 | -------------------------------------------------------------------------------- /docs/_data/comments/markup-syntax-highlighting/comment-1505403032256.yml: -------------------------------------------------------------------------------- 1 | _id: a5095860-9961-11e7-be3c-b52cab09c6a8 2 | message: The code blocks look lovely ! Is there a way to use dark theme ? 3 | name: Kanha 4 | email: 266c737250ef78d52dee3db901ea9e1d 5 | url: '' 6 | hidden: '' 7 | date: '2017-09-14T15:30:32.254Z' 8 | timestamp: 1505403032 9 | -------------------------------------------------------------------------------- /docs/_data/comments/markup-syntax-highlighting/comment-1505403241808.yml: -------------------------------------------------------------------------------- 1 | _id: 21f79d50-9962-11e7-be3c-b52cab09c6a8 2 | message: "@Kanha - the theme includes skins now and there is a \"dark\" version you can use.\r\n\r\n" 3 | name: Michael Rose 4 | email: 1ce71bc10b86565464b612093d89707e 5 | url: 'https://mademistakes.com' 6 | hidden: '' 7 | date: '2017-09-14T15:34:01.807Z' 8 | timestamp: 1505403241 9 | -------------------------------------------------------------------------------- /docs/_data/comments/markup-syntax-highlighting/comment-1514836962551.yml: -------------------------------------------------------------------------------- 1 | _id: b9622300-ef2e-11e7-8e1a-f72b5c3fcf79 2 | message: >- 3 | I'm planning to switch to your theme soon. Is it possible to impose some kind 4 | of maximum-height constraint to fenced code blocks? If I'm posting 200 lines 5 | of code, I'd rather have something like 20 or 30 lines visible with a 6 | scrollbar... 7 | name: MV10 8 | email: ae371690b3859dd1515ccf3e9ddc2ec8 9 | url: '' 10 | hidden: '' 11 | date: '2018-01-01T20:02:42.550Z' 12 | -------------------------------------------------------------------------------- /docs/_data/comments/post-future-date/comment-1472064560364.yml: -------------------------------------------------------------------------------- 1 | message: mm 2 | name: mm 3 | email: 9d0057d30e7a5e44f6378ea2c9c11f5d 4 | url: '' 5 | hidden: '' 6 | date: '2016-08-24T18:49:19.649Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/post-future-date/comment-1472786137736.yml: -------------------------------------------------------------------------------- 1 | message: This is a tst 2 | name: GnCavalry 3 | email: 5669e6e45ccab46a7384a8c8ab88edd2 4 | url: '' 5 | hidden: '' 6 | date: '2016-09-02T03:15:37.068Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/post-gallery/comment-1500055247314.yml: -------------------------------------------------------------------------------- 1 | _id: 5cbffb50-68be-11e7-89b4-79fbd5ed8e2b 2 | message: >- 3 | Can you explain how can I add 4 | this(https://github.com/sachinchoolur/lightGallery) gallery to your theme ? 5 | name: Albus 6 | email: 73823e210b38f5b5fd2d6fba1970fed0 7 | url: '' 8 | hidden: '' 9 | date: '2017-07-14T18:00:47.312Z' 10 | -------------------------------------------------------------------------------- /docs/_data/comments/post-gallery/comment-1500056210776.yml: -------------------------------------------------------------------------------- 1 | _id: 9b21c2a0-68c0-11e7-89b4-79fbd5ed8e2b 2 | message: "@Albus - To start I would follow that repo's instructions. It should be as simple as adding they're JavaScript as instructed. I haven't used that lightbox gallery so don't have experience with it.\r\n\r\nMM ships with [MagnificPopup's lightbox gallery](http://dimsemenov.com/plugins/magnific-popup/) so you might have to rip that out to use lightGallery." 3 | name: Michael Rose 4 | email: 1ce71bc10b86565464b612093d89707e 5 | url: 'https://mademistakes.com' 6 | hidden: '' 7 | date: '2017-07-14T18:16:50.776Z' 8 | -------------------------------------------------------------------------------- /docs/_data/comments/post-modified/comment-1497284119888.yml: -------------------------------------------------------------------------------- 1 | _id: 54171710-4f8a-11e7-8049-afd01bcce0e9 2 | message: "When I initially commented I appear to have clicked \r\nthe -Notify me when new comments are added- checkbox and now each time a comment is added I recieve \r\n4 emails with the same comment. Perhaps there \r\nis a way you are able to remove me from that service? Thank you!" 3 | name: Valentina 4 | email: 67028d1b5ddbe6a540aadbd93816c4b4 5 | url: '' 6 | hidden: '' 7 | date: '2017-06-12T16:15:19.871Z' 8 | -------------------------------------------------------------------------------- /docs/_data/comments/post-modified/comment-1497284892766.yml: -------------------------------------------------------------------------------- 1 | _id: 20cd7140-4f8c-11e7-8049-afd01bcce0e9 2 | message: "@Valentina - Perhaps you're thinking of another site? There is no \"notify me when new comments are added\" checkbox.\r\n\r\nThat said I'm pretty sure there is an unsubscribe link in those emails you can click on to remove your email." 3 | name: Michael Rose 4 | email: 1ce71bc10b86565464b612093d89707e 5 | url: 'https://mademistakes.com' 6 | hidden: '' 7 | date: '2017-06-12T16:28:12.765Z' 8 | -------------------------------------------------------------------------------- /docs/_data/comments/post-modified/comment-1520673777110.yml: -------------------------------------------------------------------------------- 1 | _id: 9dff9930-2444-11e8-8f62-99e42cdc233b 2 | message: Lol thank you 3 | name: Casimiro Corrales 4 | email: fe98e599bd24fb12fd2f42bb6d77d3a8 5 | url: '' 6 | hidden: '' 7 | date: '2018-03-10T09:22:57.110Z' 8 | -------------------------------------------------------------------------------- /docs/_data/comments/post-video-youtube/comment-1506623182288.yml: -------------------------------------------------------------------------------- 1 | _id: 87276200-a47a-11e7-adb3-e700074bdbd8 2 | message: "Thanks for an awesome theme. I've really enjoyed getting to use it.\r\n\r\nI'm not sure if this is the right place to ask but I'm trying to get a magnific popup working for a youtube iframe.\r\n\r\nNeither the [magnific youtube popup](http://dimsemenov.com/plugins/magnific-popup/) example nor the [code pen example](https://codepen.io/dimsemenov/pen/zjtbr) work for me.\r\n\r\nI added them directly to a markdown page with the javascript wrapped in script tags.\r\n\r\nI've tried adding the javascript as a file using head_scripts too.\r\n\r\nAny help much appreciated!" 3 | name: siva 4 | email: 3a5235fe982bc1289695aa54e7274b51 5 | url: 'https://www.spiraltaiji.com' 6 | hidden: '' 7 | date: '2017-09-28T18:26:22.283Z' 8 | -------------------------------------------------------------------------------- /docs/_data/comments/post-video-youtube/comment-1506623710918.yml: -------------------------------------------------------------------------------- 1 | _id: c23d9d40-a47b-11e7-adb3-e700074bdbd8 2 | message: "@siva - I suspect if you open your web browser's web development tools and take a look at the console you'll see some script errors.\r\n\r\njQuery and the Magnific Popup scripts are loaded at the bottom of every page right before the [closing `` tag](https://github.com/mmistakes/minimal-mistakes/blob/master/_layouts/default.html#L31). You're trying to configure MP in your post which is before the core scripts have a chance to load.\r\n\r\nYou'll need to add your custom scripts after those. Couple of ways you can do that depending on how you're using the theme.\r\n\r\n1. Edit `_layouts/default.html` and place after the scripts.html include.\r\n2. Edit [`_includes/scripts.html`](https://github.com/mmistakes/minimal-mistakes/blob/master/_includes/scripts.html) and place after all scripts.\r\n3. Use the new [`footer_scripts` config](https://mmistakes.github.io/minimal-mistakes/docs/javascript/) to add your own custom script." 3 | name: Michael Rose 4 | email: 1ce71bc10b86565464b612093d89707e 5 | url: 'https://mademistakes.com' 6 | hidden: '' 7 | date: '2017-09-28T18:35:10.917Z' 8 | -------------------------------------------------------------------------------- /docs/_data/comments/post-video-youtube/comment-1506632190623.yml: -------------------------------------------------------------------------------- 1 | _id: 80879fe0-a48f-11e7-988c-0da33b08c6af 2 | message: "@michael - I went with option 3, adding main.min.js and then my own file to config.yml. Don't know much about javascript, but I've learnt a little now.\r\n\r\nThanks for the quick and detailed response." 3 | name: siva 4 | email: 3a5235fe982bc1289695aa54e7274b51 5 | url: 'https://www.spiraltaiji.com' 6 | hidden: '' 7 | date: '2017-09-28T20:56:30.614Z' 8 | -------------------------------------------------------------------------------- /docs/_data/comments/welcome-to-jekyll/comment-1470942205700.yml: -------------------------------------------------------------------------------- 1 | message: "This is a test comment with some **Markdown** sprinkled about for *testing purposes*.\r\n\r\n### Subheading in a comment? Madness!\r\n\r\nNam et risus nec ipsum efficitur facilisis. Aenean tincidunt dapibus odio, eget rutrum urna lacinia non. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas." 2 | name: Michael Rose 3 | email: 1ce71bc10b86565464b612093d89707e 4 | url: 'https://mademistakes.com' 5 | hidden: '' 6 | date: '2016-08-11T19:03:24.929Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/welcome-to-jekyll/comment-1470942247755.yml: -------------------------------------------------------------------------------- 1 | message: '"How much wood would a woodchuck chuck if a woodchuck could chuck wood?"' 2 | name: Jackalope 3 | email: cba827e665ae179e1d1ae007a6c3c1ab 4 | url: '' 5 | hidden: '' 6 | date: '2016-08-11T19:04:06.958Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/welcome-to-jekyll/comment-1470942265819.yml: -------------------------------------------------------------------------------- 1 | message: '"How much wood would a woodchuck chuck if a woodchuck could chuck wood?"' 2 | name: Jackalope Duplicate 3 | email: cba827e665ae179e1d1ae007a6c3c1ab 4 | url: '' 5 | hidden: '' 6 | date: '2016-08-11T19:04:25.085Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/welcome-to-jekyll/comment-1470942493518.yml: -------------------------------------------------------------------------------- 1 | message: "Images can be added to a comment using Markdown like this\r\n\r\n```markdown\r\n![Bill Murray](http://www.fillmurray.com/600/400)\r\n```\r\n![Bill Murray](http://www.fillmurray.com/600/400)" 2 | name: Michael Rose 3 | email: 1ce71bc10b86565464b612093d89707e 4 | url: 'https://mademistakes.com' 5 | hidden: '' 6 | date: '2016-08-11T19:08:12.789Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/welcome-to-jekyll/comment-1471823346931.yml: -------------------------------------------------------------------------------- 1 | message: 'Wow, this is awesome' 2 | name: kkangshawn 3 | email: db92190b2ee6118786fd1f25dceb448c 4 | url: '' 5 | hidden: '' 6 | date: '2016-08-21T23:49:06.270Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/welcome-to-jekyll/comment-1471834988411.yml: -------------------------------------------------------------------------------- 1 | message: Test 2 | name: Test 3 | email: b642b4217b34b1e8d3bd915fc65c4452 4 | url: '' 5 | hidden: '' 6 | date: '2016-08-22T03:03:07.694Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/welcome-to-jekyll/comment-1472786599470.yml: -------------------------------------------------------------------------------- 1 | message: This is a test 2 | name: TestName 3 | email: 97dfebf4098c0f5c16bca61e2b76c373 4 | url: '' 5 | hidden: '' 6 | date: '2016-09-02T03:23:18.756Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/welcome-to-jekyll/comment-1474328950155.yml: -------------------------------------------------------------------------------- 1 | message: just testing as well 2 | name: js 3 | email: f349d4bc6fa472971f68bcccc04337f9 4 | url: '' 5 | hidden: '' 6 | date: '2016-09-19T23:49:09.452Z' 7 | -------------------------------------------------------------------------------- /docs/_data/comments/welcome-to-jekyll/comment-1500505983331.yml: -------------------------------------------------------------------------------- 1 | _id: d073fc00-6cd7-11e7-a639-bb0964fd6b0b 2 | message: 'Another test comment here :)' 3 | name: Bob Whitelock 4 | email: 38d95e43292a76cbefab8f8a823df64f 5 | url: 'http://www.bobwhitelock.co.uk' 6 | hidden: '' 7 | date: '2017-07-19T23:13:03.331Z' 8 | -------------------------------------------------------------------------------- /docs/_data/comments/welcome-to-jekyll/comment-1507141538771.yml: -------------------------------------------------------------------------------- 1 | _id: 6b96b520-a931-11e7-9a7d-c99de06bb99b 2 | message: >- 3 | Testing out leaving a comment with the new Staticman v2 endpoint and reCAPTCHA 4 | enabled. 5 | name: Michael Rose 6 | email: 1ce71bc10b86565464b612093d89707e 7 | url: 'https://mademistakes.com' 8 | hidden: '' 9 | date: '2017-10-04T18:25:38.766Z' 10 | -------------------------------------------------------------------------------- /docs/_data/comments/welcome-to-jekyll/comment-1529792272424.yml: -------------------------------------------------------------------------------- 1 | _id: 44fe8b10-7733-11e8-a9b3-5b9ff79eceda 2 | message: "Hey Michael,\r\nI was using your theme and found out that I cant use the collapsible markdown feature. Could you please let me know if there is any solution to this ? Eg code is below\r\n\r\n## collapsible markdown?\r\n\r\n
CLICK ME\r\n

\r\n\r\n#### yes, even hidden code blocks!\r\n\r\n```python\r\nprint(\"hello world!\")\r\n```\r\n\r\n

\r\n
" 3 | name: VIpin Kumar 4 | email: eeafc03a07852a239754fb68cca903b3 5 | url: '' 6 | hidden: '' 7 | date: '2018-06-23T22:17:52.423Z' 8 | -------------------------------------------------------------------------------- /docs/_data/comments/welcome-to-jekyll/comment-1529794012288.yml: -------------------------------------------------------------------------------- 1 | _id: 5217d3c0-7737-11e8-a9b3-5b9ff79eceda 2 | message: >- 3 | You're mixing Markdown inside of HTML elements, which is why it's not working. 4 | Look at Kramdown's documentation as they have ways of enabling it I believe. 5 | name: Michael Rose 6 | email: 1ce71bc10b86565464b612093d89707e 7 | url: 'https://mademistakes.com' 8 | hidden: '' 9 | date: '2018-06-23T22:46:52.287Z' 10 | -------------------------------------------------------------------------------- /docs/_docs/01-installation.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Installation 3 | permalink: /docs/installation/ 4 | excerpt: "Instructions for installing the Simple Transformers library." 5 | last_modified_at: 2021/01/29 00:29:07 6 | toc: true 7 | --- 8 | 9 | It's a good idea to always use virtual environments when working with Python packages. 10 | Anaconda/Miniconda is a package manager that lets you create virtual environments and manage package installations smoothly. 11 | 12 | Follow the instructions given below to install Simple Transformers with Anaconda (or Miniconda, a lighter version of Anaconda). 13 | 14 | ## Installation steps 15 | 16 | 1. Install Anaconda or Miniconda Package Manager from [here](https://www.anaconda.com/distribution/). 17 | 2. Create a new virtual environment and install packages. 18 | ```shell 19 | conda create -n st python pandas tqdm 20 | conda activate st 21 | ``` 22 | 3. Using a CUDA capable GPU is recommended. 23 | To install Pytorch with CUDA support: 24 | ```shell 25 | conda install pytorch pytorch-cuda=11.7 -c pytorch -c nvidia 26 | ``` 27 | CPU only: 28 | ```shell 29 | conda install pytorch cpuonly -c pytorch 30 | ``` 31 | 32 | **Note:** Check the [Pytorch website](https://pytorch.org/get-started/locally/) for the latest instructions. 33 | {: .notice--info} 34 | 35 | 4. Install simpletransformers. 36 | `pip install simpletransformers` 37 | 38 | ## Optional 39 | 40 | 1. Install Weights and Biases (wandb) for experiment tracking and visualizing training in a web browser. 41 | `pip install wandb` 42 | 43 | --- 44 | -------------------------------------------------------------------------------- /docs/_docs/02-upgrading.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Upgrading 3 | permalink: /docs/upgrading/ 4 | excerpt: "Instructions for upgrading the library." 5 | last_modified_at: 2020-05-02 17:57:14 6 | toc: true 7 | --- 8 | 9 | Simple Transformers is updated regularly and using the latest version is highly recommended. This will ensure that you have access to the latest features, improvements, and bug fixes. 10 | 11 | ## Check current version 12 | 13 | To check your current version with pip, you can do; 14 | 15 | ```shell 16 | pip show simpletransformers 17 | ``` 18 | 19 | As Simple Transformers is built on top of the Hugging Face Transformers library, make sure that you are using the latest Transformers release. 20 | 21 | ```shell 22 | pip show transformers 23 | ``` 24 | 25 | ## Update to latest version 26 | 27 | You can update a pip package with the following command. 28 | 29 | ```shell 30 | pip install --upgrade simpletransformers 31 | ``` 32 | 33 | This should upgrade the Transformers package to the required version as well. However, you can also update Transformers manually via; 34 | 35 | ```shell 36 | pip install --upgrade transformers 37 | ``` 38 | -------------------------------------------------------------------------------- /docs/_docs/32-convAI-specifics.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Conversational AI Specifics 3 | permalink: /docs/convAI-specifics/ 4 | excerpt: "Conversational AI Specifics" 5 | last_modified_at: 2020/09/06 21:31:35 6 | toc: true 7 | --- 8 | 9 | Chatbot creation based on the Hugging Face [State-of-the-Art Conversational AI](https://github.com/huggingface/transfer-learning-conv-ai). 10 | 11 | 12 | ## Usage Steps 13 | 14 | Using a `ConvAIModel` in Simple Transformers follows the [standard pattern](/docs/usage/#task-specific-models) except for the interaction functionality. 15 | 16 | 1. Initialize a `ConvAIModel` 17 | 2. Train the model with `train_model()` 18 | 3. Evaluate the model with `eval_model()` 19 | 4. Interact with the model `interact()` 20 | 21 | 22 | ## Supported model types 23 | 24 | - GPT 25 | - GPT2 26 | 27 | 28 | ## Interacting with a `ConvAIModel` 29 | 30 | ### `interact()` 31 | 32 | The `interact()` method can be used to talk with the model (interactively). Optionally, you can provide a list of strings to the method which will be used to build a *persona* for the chatbot. If it is not given, a random personality from the PERSONA-CHAT dataset will be used. 33 | 34 | ### `interact_single()` 35 | 36 | The `interact_single()` method can be used to communicate with the model through single messages, i.e. by providing the current message and the history of the conversation. Optionally, you can provide a list of strings to the method which will be used to build a *persona* for the chatbot. If it is not given, a random personality from the PERSONA-CHAT dataset will be used. 37 | -------------------------------------------------------------------------------- /docs/_docs/35-convAI-minimal-start.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Conversational AI Examples 3 | permalink: /docs/convAI-minimal-start/ 4 | excerpt: "Conversational AI Examples" 5 | last_modified_at: 2020/10/15 23:16:38 6 | toc: true 7 | --- 8 | 9 | ### Minimal Example 10 | 11 | You can download the pretrained (OpenAI GPT based) Conversation AI model open-sourced by Hugging Face [here](https://s3.amazonaws.com/models.huggingface.co/transfer-learning-chatbot/gpt_personachat_cache.tar.gz). 12 | 13 | For the minimal example given below, you can download the model and extract it to `gpt_personachat_cache`. Note that you can use any of the other GPT or GPT-2 models but they will require more training. 14 | 15 | You will also need to create the JSON file given in the Data Format section and save it as `data/minimal_train.json`. 16 | 17 | ```python 18 | from simpletransformers.conv_ai import ConvAIModel 19 | 20 | 21 | train_args = { 22 | "num_train_epochs": 50, 23 | "save_model_every_epoch": False, 24 | } 25 | 26 | # Create a ConvAIModel 27 | model = ConvAIModel("gpt", "gpt_personachat_cache", use_cuda=True, args=train_args) 28 | 29 | # Train the model 30 | model.train_model("data/minimal_train.json") 31 | 32 | # Evaluate the model 33 | model.eval_model() 34 | 35 | # Interact with the trained model. 36 | model.interact() 37 | 38 | ``` 39 | 40 | The `interact()` method can be given a list of Strings which will be used to build a personality. If a list of Strings is not given, a random personality will be chosen from PERSONA-CHAT instead. 41 | 42 | ### Real Dataset Example 43 | 44 | - [Persona-Chat Conversational AI](https://medium.com/@chaturangarajapakshe/how-to-train-your-chatbot-with-simple-transformers-da25160859f4?sk=edd04e406e9a3523fcfc46102529e775) 45 | -------------------------------------------------------------------------------- /docs/_docs/36-text-rep-examples.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Text Representation Examples 3 | permalink: /docs/text-rep-examples/ 4 | excerpt: "Text Representation Examples" 5 | last_modified_at: 2020/07/26 23:16:38 6 | toc: true 7 | --- 8 | 9 | ### Minimal example for generating word embeddings 10 | Generate a list of contextual word embeddings for every sentence in a list 11 | ```python 12 | from simpletransformers.language_representation import RepresentationModel 13 | 14 | sentences = ["Example sentence 1", "Example sentence 2"] 15 | model = RepresentationModel( 16 | model_type="bert", 17 | model_name="bert-base-uncased", 18 | use_cuda=False 19 | ) 20 | word_vectors = model.encode_sentences(sentences, combine_strategy=None) 21 | assert word_vectors.shape === (2, 5, 768) # token vector for every token in each sentence, bert based models add 2 tokens per sentence by default([CLS] & [SEP]) 22 | ``` 23 | 24 | ### Minimal example for generating sentence embeddings 25 | Same code as for generating word embeddings, the only difference is that we pass `combine_strategy="mean"` parameter 26 | ```python 27 | from simpletransformers.language_representation import RepresentationModel 28 | sentences = ["Example sentence 1", "Example sentence 2"] 29 | model = RepresentationModel( 30 | model_type="bert", 31 | model_name="bert-base-uncased", 32 | use_cuda=False 33 | ) 34 | word_vectors = model.encode_sentences(sentences, combine_strategy="mean") 35 | assert word_vectors.shape === (2, 768) # one sentence embedding per sentence 36 | ``` 37 | -------------------------------------------------------------------------------- /docs/_docs/38-language-generation-specifics.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Language Generation Specifics 3 | permalink: /docs/language-generation-specifics/ 4 | excerpt: "Specific notes for Language Generation" 5 | last_modified_at: 2020/12/08 00:04:09 6 | toc: true 7 | --- 8 | 9 | The Language Generation model provides an easy way to use a trained Transformer model for language generation. Unlike the other models in Simple Transformers, the `LanguageGenerationModel` does not support training of any kind. If you wish to train or fine-tune models for language generatin, please see the [Language Modeling](/docs/lm-specifics/) section. 10 | 11 | **Tip:** This [Medium article](https://towardsdatascience.com/understanding-electra-and-training-an-electra-language-model-3d33e3a9660d?source=friends_link&sk=2b4b4a79954e3d7c84ab863efaea8c65) provides more information on fine-tuning language models and language generation. 12 | {: .notice--success} 13 | 14 | 15 | ## Usage Steps 16 | 17 | The process of performing Language Generation in Simple Transformers consists of initializing a model and generating sequences. 18 | 19 | 1. Initialize a `LanguageGenerationModel` 20 | 2. Generate text with `generate()` 21 | 22 | 23 | ## Supported Model Types 24 | 25 | New model types are regularly added to the library. Language Modeling tasks currently supports the model types given below. 26 | 27 | | Model | Model code for `LanguageGenerationModel` | 28 | | -------------- | ---------------------------------------- | 29 | | CTRL | ctrl | 30 | | GPT-2 | gpt2 | 31 | | OpenAI GPT | openai-gpt | 32 | | Transformer-XL | transfo-xl | 33 | | XLM | xlm | 34 | | XLNet | xlnet | 35 | 36 | **Tip:** The model code is used to specify the `model_type` in a Simple Transformers model. 37 | {: .notice--success} 38 | -------------------------------------------------------------------------------- /docs/_docs/40-language-generation-minimal-start.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Language Generation Minimal Start 3 | permalink: /docs/language-generation-minimal-start/ 4 | excerpt: "Minimal start for Language Generation tasks." 5 | last_modified_at: 2020/12/08 12:19:33 6 | --- 7 | 8 | ```python 9 | import logging 10 | 11 | from simpletransformers.language_generation import LanguageGenerationModel, LanguageGenerationArgs 12 | 13 | 14 | logging.basicConfig(level=logging.INFO) 15 | transformers_logger = logging.getLogger("transformers") 16 | transformers_logger.setLevel(logging.WARNING) 17 | 18 | model = LanguageGenerationModel("gpt2", "gpt2") 19 | model.generate("Let's give a minimal start to the model like") 20 | 21 | ``` 22 | -------------------------------------------------------------------------------- /docs/_docs/41-multi-modal-classification-specifics.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Multi-Modal Classification Specifics 3 | permalink: /docs/multi-modal-classification-specifics/ 4 | excerpt: "Specific notes for Multi-Modal Classification tasks." 5 | last_modified_at: 2020/12/08 15:21:17 6 | toc: true 7 | --- 8 | 9 | Multi-Modal Classification fuses text and image data. This is performed using multi-modal bitransformer models introduced in the paper [Supervised Multimodal Bitransformers for Classifying Images and Text](https://arxiv.org/abs/1909.02950). 10 | 11 | 12 | ## Usage Steps 13 | 14 | The process of performing Multi-Modal Classification in Simple Transformers does not deviate from the [standard pattern](/docs/usage/#task-specific-models). 15 | 16 | 1. Initialize a `Model` 17 | 2. Train the model with `train_model()` 18 | 3. Evaluate the model with `eval_model()` 19 | 4. Make predictions on (unlabelled) data with `predict()` 20 | 21 | 22 | ## Supported Model Types 23 | 24 | 25 | | Model | Model code for `Model` | 26 | | ----------- | --------------------------------------- | 27 | | BERT | bert | 28 | 29 | **Tip:** The model code is used to specify the `model_type` in a Simple Transformers model. 30 | {: .notice--success} 31 | 32 | 33 | ## Label formats 34 | 35 | With Multi-Modal Classification, labels are always given as strings. You may specify a list of labels by passing in the list to `label_list` argument when creating the model. If `label_list` is given, `num_labels` is not required. 36 | 37 | If `label_list` is not given, `num_labels` is required and the labels should be strings starting from `"0"` up to `""`. 38 | -------------------------------------------------------------------------------- /docs/_docs/50-contributing.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Contributing" 3 | permalink: /docs/contributing/ 4 | excerpt: "How you can contribute to make this theme better." 5 | last_modified_at: 2020-05-02 17:59:07 6 | --- 7 | 8 | Coming soon! -------------------------------------------------------------------------------- /docs/_docs/51-docs-old.md: -------------------------------------------------------------------------------- 1 | The [Weights & Biases](https://www.wandb.com/) framework is supported for visualizing model training. 2 | 3 | To use this, simply set a project name for W&B in the `wandb_project` attribute of the `args` dictionary. This will log all hyperparameter values, training losses, and evaluation metrics to the given project. 4 | 5 | ```python 6 | model = ClassificationModel('roberta', 'roberta-base', args={'wandb_project': 'project-name'}) 7 | ``` 8 | 9 | For a complete example, see [here](https://medium.com/skilai/to-see-is-to-believe-visualizing-the-training-of-machine-learning-models-664ef3fe4f49). -------------------------------------------------------------------------------- /docs/_docs/53-terms.md: -------------------------------------------------------------------------------- 1 | --- 2 | permalink: /terms/ 3 | title: "Terms and Privacy Policy" 4 | last_modified_at: 2022/01/21 10:54:24 5 | --- 6 | 7 | This website does not collect any private information. -------------------------------------------------------------------------------- /docs/_pages/404.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Page Not Found" 3 | excerpt: "Page not found. Your pixels are in another canvas." 4 | sitemap: false 5 | permalink: /404.html 6 | --- 7 | 8 | Sorry, but the page you were trying to view does not exist --- perhaps you can try searching for it below. 9 | 10 | 14 | 16 | -------------------------------------------------------------------------------- /docs/_pages/category-archive.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Posts by Category" 3 | layout: categories 4 | permalink: /categories/ 5 | author_profile: true 6 | --- 7 | -------------------------------------------------------------------------------- /docs/_pages/collection-archive.html: -------------------------------------------------------------------------------- 1 | --- 2 | layout: archive 3 | title: "Posts by Collection" 4 | permalink: /collection-archive/ 5 | author_profile: true 6 | --- 7 | 8 | {% capture written_label %}'None'{% endcapture %} 9 | 10 | {% for collection in site.collections %} 11 | {% unless collection.output == false or collection.label == "posts" %} 12 | {% capture label %}{{ collection.label }}{% endcapture %} 13 | {% if label != written_label %} 14 |

{{ label }}

15 | {% capture written_label %}{{ label }}{% endcapture %} 16 | {% endif %} 17 | {% endunless %} 18 | {% for post in collection.docs %} 19 | {% unless collection.output == false or collection.label == "posts" %} 20 | {% include archive-single.html %} 21 | {% endunless %} 22 | {% endfor %} 23 | {% endfor %} -------------------------------------------------------------------------------- /docs/_pages/edge-case.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Edge Case 3 | layout: category 4 | permalink: /categories/edge-case/ 5 | taxonomy: Edge Case 6 | --- 7 | 8 | Sample post listing for the category `Edge Case`. 9 | -------------------------------------------------------------------------------- /docs/_pages/home.md: -------------------------------------------------------------------------------- 1 | --- 2 | layout: splash 3 | permalink: / 4 | hidden: true 5 | header: 6 | overlay_color: "#5e616c" 7 | overlay_image: /assets/images/mm-home-page-feature.jpg 8 | actions: 9 | - label: " Install now" 10 | url: "/docs/quick-start-guide/" 11 | excerpt: > 12 | A flexible two-column Jekyll theme. Perfect for building personal sites, blogs, and portfolios.
13 |
Latest release v4.19.1 14 | feature_row: 15 | - image_path: /assets/images/mm-customizable-feature.png 16 | alt: "customizable" 17 | title: "Super customizable" 18 | excerpt: "Everything from the menus, sidebars, comments, and more can be configured or set with YAML Front Matter." 19 | url: "/docs/configuration/" 20 | btn_class: "btn--primary" 21 | btn_label: "Learn more" 22 | - image_path: /assets/images/mm-responsive-feature.png 23 | alt: "fully responsive" 24 | title: "Responsive layouts" 25 | excerpt: "Built with HTML5 + CSS3. All layouts are fully responsive with helpers to augment your content." 26 | url: "/docs/layouts/" 27 | btn_class: "btn--primary" 28 | btn_label: "Learn more" 29 | - image_path: /assets/images/mm-free-feature.png 30 | alt: "100% free" 31 | title: "100% free" 32 | excerpt: "Free to use however you want under the MIT License. Clone it, fork it, customize it... whatever!" 33 | url: "/docs/license/" 34 | btn_class: "btn--primary" 35 | btn_label: "Learn more" 36 | --- 37 | 38 | {% include feature_row %} 39 | -------------------------------------------------------------------------------- /docs/_pages/markup.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Markup 3 | layout: tag 4 | permalink: /tags/markup/ 5 | taxonomy: markup 6 | --- 7 | 8 | Sample post listing for the tag `markup`. 9 | -------------------------------------------------------------------------------- /docs/_pages/page-a.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Page A" 3 | permalink: /page-a/ 4 | date: 2011-06-23T18:38:52+00:00 5 | --- 6 | 7 | Integer posuere erat a ante venenatis dapibus posuere velit aliquet. Aenean lacinia bibendum nulla sed consectetur. Etiam porta sem malesuada magna mollis euismod. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. -------------------------------------------------------------------------------- /docs/_pages/page-archive.html: -------------------------------------------------------------------------------- 1 | --- 2 | layout: archive 3 | title: "Page Archive" 4 | permalink: /page-archive/ 5 | author_profile: false 6 | --- 7 | 8 | {% for post in site.pages %} 9 | {% unless post.hidden %} 10 | {% include archive-single.html %} 11 | {% endunless %} 12 | {% endfor %} 13 | -------------------------------------------------------------------------------- /docs/_pages/page-b.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Page B" 3 | permalink: /page-b/ 4 | date: 2011-06-23T18:39:14+00:00 5 | --- 6 | 7 | (lorem ipsum) -------------------------------------------------------------------------------- /docs/_pages/pets.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Pets 3 | layout: collection 4 | permalink: /pets/ 5 | collection: pets 6 | entries_layout: grid 7 | classes: wide 8 | --- 9 | 10 | Sample document listing for the collection `_pets`. 11 | -------------------------------------------------------------------------------- /docs/_pages/portfolio-archive.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Portfolio 3 | layout: collection 4 | permalink: /portfolio/ 5 | collection: portfolio 6 | entries_layout: grid 7 | classes: wide 8 | --- 9 | 10 | Sample document listing for the collection `_portfolio`. 11 | -------------------------------------------------------------------------------- /docs/_pages/recipes-archive.md: -------------------------------------------------------------------------------- 1 | --- 2 | layout: collection 3 | title: "Recipes" 4 | collection: recipes 5 | permalink: /recipes/ 6 | author_profile: false 7 | --- 8 | 9 | Sample document listing for the collection `_recipes`. -------------------------------------------------------------------------------- /docs/_pages/sample-page.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Sample Page" 3 | permalink: /sample-page/ 4 | date: 2016-02-24T03:02:20+00:00 5 | --- 6 | 7 | This is an example page. It's different from a blog post because it will stay in one place and will show up in your site navigation (in most themes). Most people start with an About page that introduces them to potential site visitors. It might say something like this: 8 | 9 | > Hi there! I'm a bike messenger by day, aspiring actor by night, and this is my website. I live in Los Angeles, have a great dog named Jack, and I like pi'a coladas. (And gettin' caught in the rain.) 10 | 11 | ...or something like this: 12 | 13 | > The XYZ Doohickey Company was founded in 1971, and has been providing quality doohickeys to the public ever since. Located in Gotham City, XYZ employs over 2,000 people and does all kinds of awesome things for the Gotham community. 14 | 15 | You should probably delete this page and create new pages for your content. Have fun! -------------------------------------------------------------------------------- /docs/_pages/sitemap.md: -------------------------------------------------------------------------------- 1 | --- 2 | layout: archive 3 | title: "Sitemap" 4 | permalink: /sitemap/ 5 | author_profile: false 6 | --- 7 | 8 | A list of all the posts and pages found on the site. For you robots out there is an [XML version]({{ "sitemap.xml" | relative_url }}) available for digesting as well. 9 | 10 |

Pages

11 | {% for post in site.pages %} 12 | {% include archive-single.html %} 13 | {% endfor %} 14 | 15 |

Posts

16 | {% for post in site.posts %} 17 | {% include archive-single.html %} 18 | {% endfor %} 19 | 20 | {% capture written_label %}'None'{% endcapture %} 21 | 22 | {% for collection in site.collections %} 23 | {% unless collection.output == false or collection.label == "posts" %} 24 | {% capture label %}{{ collection.label }}{% endcapture %} 25 | {% if label != written_label %} 26 |

{{ label }}

27 | {% capture written_label %}{{ label }}{% endcapture %} 28 | {% endif %} 29 | {% endunless %} 30 | {% for post in collection.docs %} 31 | {% unless collection.output == false or collection.label == "posts" %} 32 | {% include archive-single.html %} 33 | {% endunless %} 34 | {% endfor %} 35 | {% endfor %} -------------------------------------------------------------------------------- /docs/_pages/tag-archive.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Posts by Tag" 3 | permalink: /tags/ 4 | layout: tags 5 | author_profile: true 6 | --- 7 | -------------------------------------------------------------------------------- /docs/_pages/tutorials.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Tutorials 3 | layout: collection 4 | permalink: /tutorials/ 5 | collection: tutorials 6 | entries_layout: grid 7 | classes: wide 8 | --- 9 | 10 | Links to Medium Tutorials will go here. 11 | -------------------------------------------------------------------------------- /docs/_pages/year-archive.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Posts by Year" 3 | permalink: /year-archive/ 4 | layout: posts 5 | author_profile: true 6 | --- 7 | -------------------------------------------------------------------------------- /docs/_sass/minimal-mistakes.scss: -------------------------------------------------------------------------------- 1 | /*! 2 | * Minimal Mistakes Jekyll Theme 4.18.1 by Michael Rose 3 | * Copyright 2013-2019 Michael Rose - mademistakes.com | @mmistakes 4 | * Licensed under MIT (https://github.com/mmistakes/minimal-mistakes/blob/master/LICENSE) 5 | */ 6 | 7 | /* Variables */ 8 | @import "minimal-mistakes/variables"; 9 | 10 | /* Mixins and functions */ 11 | @import "minimal-mistakes/vendor/breakpoint/breakpoint"; 12 | @include breakpoint-set("to ems", true); 13 | @import "minimal-mistakes/vendor/magnific-popup/magnific-popup"; // Magnific Popup 14 | @import "minimal-mistakes/vendor/susy/susy"; 15 | @import "minimal-mistakes/mixins"; 16 | 17 | /* Core CSS */ 18 | @import "minimal-mistakes/reset"; 19 | @import "minimal-mistakes/base"; 20 | @import "minimal-mistakes/forms"; 21 | @import "minimal-mistakes/tables"; 22 | @import "minimal-mistakes/animations"; 23 | 24 | /* Components */ 25 | @import "minimal-mistakes/buttons"; 26 | @import "minimal-mistakes/notices"; 27 | @import "minimal-mistakes/masthead"; 28 | @import "minimal-mistakes/navigation"; 29 | @import "minimal-mistakes/footer"; 30 | @import "minimal-mistakes/search"; 31 | @import "minimal-mistakes/syntax"; 32 | 33 | /* Utility classes */ 34 | @import "minimal-mistakes/utilities"; 35 | 36 | /* Layout specific */ 37 | @import "minimal-mistakes/page"; 38 | @import "minimal-mistakes/archive"; 39 | @import "minimal-mistakes/sidebar"; 40 | @import "minimal-mistakes/print"; 41 | -------------------------------------------------------------------------------- /docs/_sass/minimal-mistakes/_animations.scss: -------------------------------------------------------------------------------- 1 | /* ========================================================================== 2 | ANIMATIONS 3 | ========================================================================== */ 4 | 5 | @-webkit-keyframes intro { 6 | 0% { 7 | opacity: 0; 8 | } 9 | 100% { 10 | opacity: 1; 11 | } 12 | } 13 | 14 | @keyframes intro { 15 | 0% { 16 | opacity: 0; 17 | } 18 | 100% { 19 | opacity: 1; 20 | } 21 | } -------------------------------------------------------------------------------- /docs/_sass/minimal-mistakes/_footer.scss: -------------------------------------------------------------------------------- 1 | /* ========================================================================== 2 | FOOTER 3 | ========================================================================== */ 4 | 5 | .page__footer { 6 | @include clearfix; 7 | float: left; 8 | margin-left: 0; 9 | margin-right: 0; 10 | width: 100%; 11 | clear: both; 12 | margin-top: 3em; 13 | color: $muted-text-color; 14 | -webkit-animation: $intro-transition; 15 | animation: $intro-transition; 16 | -webkit-animation-delay: 0.45s; 17 | animation-delay: 0.45s; 18 | background-color: $footer-background-color; 19 | 20 | footer { 21 | @include clearfix; 22 | margin-left: auto; 23 | margin-right: auto; 24 | margin-top: 2em; 25 | max-width: 100%; 26 | padding: 0 1em 2em; 27 | 28 | @include breakpoint($x-large) { 29 | max-width: $x-large; 30 | } 31 | } 32 | 33 | a { 34 | color: inherit; 35 | text-decoration: none; 36 | 37 | &:hover { 38 | text-decoration: underline; 39 | } 40 | } 41 | 42 | .fas, 43 | .fab, 44 | .far, 45 | .fal { 46 | color: $muted-text-color; 47 | } 48 | } 49 | 50 | .page__footer-copyright { 51 | font-family: $global-font-family; 52 | font-size: $type-size-7; 53 | } 54 | 55 | .page__footer-follow { 56 | ul { 57 | margin: 0; 58 | padding: 0; 59 | list-style-type: none; 60 | } 61 | 62 | li { 63 | display: inline-block; 64 | padding-top: 5px; 65 | padding-bottom: 5px; 66 | font-family: $sans-serif-narrow; 67 | font-size: $type-size-6; 68 | text-transform: uppercase; 69 | } 70 | 71 | li + li:before { 72 | content: ""; 73 | padding-right: 5px; 74 | } 75 | 76 | a { 77 | padding-right: 10px; 78 | font-weight: bold; 79 | } 80 | 81 | .social-icons { 82 | a { 83 | white-space: nowrap; 84 | } 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /docs/_sass/minimal-mistakes/_masthead.scss: -------------------------------------------------------------------------------- 1 | /* ========================================================================== 2 | MASTHEAD 3 | ========================================================================== */ 4 | 5 | .masthead { 6 | position: relative; 7 | border-bottom: 1px solid $border-color; 8 | -webkit-animation: $intro-transition; 9 | animation: $intro-transition; 10 | -webkit-animation-delay: 0.15s; 11 | animation-delay: 0.15s; 12 | z-index: 20; 13 | 14 | &__inner-wrap { 15 | @include clearfix; 16 | margin-left: auto; 17 | margin-right: auto; 18 | padding: 1em; 19 | max-width: 100%; 20 | display: -webkit-box; 21 | display: -ms-flexbox; 22 | display: flex; 23 | -webkit-box-pack: justify; 24 | -ms-flex-pack: justify; 25 | justify-content: space-between; 26 | font-family: $sans-serif-narrow; 27 | 28 | @include breakpoint($x-large) { 29 | max-width: $max-width; 30 | } 31 | 32 | nav { 33 | z-index: 10; 34 | } 35 | 36 | a { 37 | text-decoration: none; 38 | } 39 | } 40 | } 41 | 42 | .site-logo img { 43 | max-height: 2rem; 44 | } 45 | 46 | .site-title { 47 | display: -webkit-box; 48 | display: -ms-flexbox; 49 | display: flex; 50 | -ms-flex-item-align: center; 51 | align-self: center; 52 | font-weight: bold; 53 | z-index: 20; 54 | } 55 | 56 | .site-subtitle { 57 | display: block; 58 | font-size: $type-size-8; 59 | } 60 | 61 | .masthead__menu { 62 | float: left; 63 | margin-left: 0; 64 | margin-right: 0; 65 | width: 100%; 66 | clear: both; 67 | 68 | .site-nav { 69 | margin-left: 0; 70 | 71 | @include breakpoint($small) { 72 | float: right; 73 | } 74 | } 75 | 76 | ul { 77 | margin: 0; 78 | padding: 0; 79 | clear: both; 80 | list-style-type: none; 81 | } 82 | } 83 | 84 | .masthead__menu-item { 85 | display: block; 86 | list-style-type: none; 87 | white-space: nowrap; 88 | 89 | &--lg { 90 | padding-right: 2em; 91 | font-weight: 700; 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /docs/_sass/minimal-mistakes/_tables.scss: -------------------------------------------------------------------------------- 1 | /* ========================================================================== 2 | TABLES 3 | ========================================================================== */ 4 | 5 | table { 6 | display: block; 7 | margin-bottom: 1em; 8 | width: 100%; 9 | font-family: $global-font-family; 10 | font-size: $type-size-6; 11 | border-collapse: collapse; 12 | overflow-x: auto; 13 | 14 | & + table { 15 | margin-top: 1em; 16 | } 17 | } 18 | 19 | thead { 20 | background-color: $border-color; 21 | border-bottom: 2px solid mix(#000, $border-color, 25%); 22 | } 23 | 24 | th { 25 | padding: 0.5em; 26 | font-weight: bold; 27 | text-align: left; 28 | } 29 | 30 | td { 31 | padding: 0.5em; 32 | border-bottom: 1px solid mix(#000, $border-color, 25%); 33 | } 34 | 35 | tr, 36 | td, 37 | th { 38 | vertical-align: middle; 39 | } 40 | 41 | tr:nth-child(even) { 42 | background-color: $code-background-color-dark; 43 | } 44 | -------------------------------------------------------------------------------- /docs/_sass/minimal-mistakes/skins/_air.scss: -------------------------------------------------------------------------------- 1 | /* ========================================================================== 2 | Air skin 3 | ========================================================================== */ 4 | 5 | /* Colors */ 6 | $background-color: #eeeeee !default; 7 | $text-color: #222831 !default; 8 | $muted-text-color: #393e46 !default; 9 | $primary-color: #0092ca !default; 10 | $border-color: mix(#fff, #393e46, 75%) !default; 11 | $footer-background-color: $primary-color !default; 12 | $link-color: #393e46 !default; 13 | $masthead-link-color: $text-color !default; 14 | $masthead-link-color-hover: $text-color !default; 15 | $navicon-link-color-hover: mix(#fff, $text-color, 80%) !default; 16 | 17 | .page__footer { 18 | color: #fff !important; // override 19 | } 20 | 21 | .page__footer-follow .social-icons .svg-inline--fa { 22 | color: inherit; 23 | } 24 | -------------------------------------------------------------------------------- /docs/_sass/minimal-mistakes/skins/_aqua.scss: -------------------------------------------------------------------------------- 1 | /* ========================================================================== 2 | Aqua skin 3 | ========================================================================== */ 4 | 5 | /* Colors */ 6 | $gray : #1976d2 !default; 7 | $dark-gray : mix(#000, $gray, 40%) !default; 8 | $darker-gray : mix(#000, $gray, 60%) !default; 9 | $light-gray : mix(#fff, $gray, 50%) !default; 10 | $lighter-gray : mix(#fff, $gray, 90%) !default; 11 | 12 | $body-color : #fff !default; 13 | $background-color : #f0fff0 !default; 14 | $code-background-color : $lighter-gray !default; 15 | $code-background-color-dark : $light-gray !default; 16 | $text-color : $dark-gray !default; 17 | $border-color : $lighter-gray !default; 18 | 19 | $primary-color : $gray !default; 20 | $success-color : #27ae60 !default; 21 | $warning-color : #e67e22 !default; 22 | $danger-color : #c0392b !default; 23 | $info-color : #03a9f4 !default; 24 | 25 | /* links */ 26 | $link-color : $info-color !default; 27 | $link-color-hover : mix(#000, $link-color, 25%) !default; 28 | $link-color-visited : mix(#fff, $link-color, 25%) !default; 29 | $masthead-link-color : $primary-color !default; 30 | $masthead-link-color-hover : mix(#000, $primary-color, 25%) !default; -------------------------------------------------------------------------------- /docs/_sass/minimal-mistakes/skins/_contrast.scss: -------------------------------------------------------------------------------- 1 | /* ========================================================================== 2 | Contrast skin 3 | ========================================================================== */ 4 | 5 | /* Colors */ 6 | $text-color: #000 !default; 7 | $muted-text-color: $text-color !default; 8 | $primary-color: #ff0000 !default; 9 | $border-color: mix(#fff, $text-color, 75%) !default; 10 | $footer-background-color: #000 !default; 11 | $link-color: #0000ff !default; 12 | $masthead-link-color: $text-color !default; 13 | $masthead-link-color-hover: $text-color !default; 14 | $navicon-link-color-hover: mix(#fff, $text-color, 80%) !default; 15 | 16 | /* contrast syntax highlighting (base16) */ 17 | $base00: #000000 !default; 18 | $base01: #242422 !default; 19 | $base02: #484844 !default; 20 | $base03: #6c6c66 !default; 21 | $base04: #918f88 !default; 22 | $base05: #b5b3aa !default; 23 | $base06: #d9d7cc !default; 24 | $base07: #fdfbee !default; 25 | $base08: #ff6c60 !default; 26 | $base09: #e9c062 !default; 27 | $base0a: #ffffb6 !default; 28 | $base0b: #a8ff60 !default; 29 | $base0c: #c6c5fe !default; 30 | $base0d: #96cbfe !default; 31 | $base0e: #ff73fd !default; 32 | $base0f: #b18a3d !default; 33 | 34 | .page__content { 35 | .notice, 36 | .notice--primary, 37 | .notice--info, 38 | .notice--warning, 39 | .notice--success, 40 | .notice--danger { 41 | color: $text-color; 42 | } 43 | } 44 | 45 | .page__footer { 46 | color: #fff !important; // override 47 | } 48 | 49 | .page__footer-follow .social-icons .svg-inline--fa { 50 | color: inherit; 51 | } 52 | -------------------------------------------------------------------------------- /docs/_sass/minimal-mistakes/skins/_dark.scss: -------------------------------------------------------------------------------- 1 | /* ========================================================================== 2 | Dark skin 3 | ========================================================================== */ 4 | 5 | /* Colors */ 6 | $background-color: #252a34 !default; 7 | $text-color: #eaeaea !default; 8 | $primary-color: #00adb5 !default; 9 | $border-color: mix(#fff, $background-color, 20%) !default; 10 | $code-background-color: mix(#000, $background-color, 15%) !default; 11 | $code-background-color-dark: mix(#000, $background-color, 20%) !default; 12 | $form-background-color: mix(#000, $background-color, 15%) !default; 13 | $footer-background-color: mix(#000, $background-color, 30%) !default; 14 | $link-color: mix($primary-color, $text-color, 40%) !default; 15 | $link-color-hover: mix(#fff, $link-color, 25%) !default; 16 | $link-color-visited: mix(#000, $link-color, 25%) !default; 17 | $masthead-link-color: $text-color !default; 18 | $masthead-link-color-hover: mix(#000, $text-color, 20%) !default; 19 | $navicon-link-color-hover: mix(#000, $background-color, 30%) !default; 20 | 21 | .author__urls.social-icons .svg-inline--fa, 22 | .page__footer-follow .social-icons .svg-inline--fa { 23 | color: inherit; 24 | } 25 | 26 | .ais-search-box .ais-search-box--input { 27 | background-color: $form-background-color; 28 | } -------------------------------------------------------------------------------- /docs/_sass/minimal-mistakes/skins/_default.scss: -------------------------------------------------------------------------------- 1 | /* ========================================================================== 2 | Default skin 3 | ========================================================================== */ 4 | 5 | // Intentionally left blank 6 | -------------------------------------------------------------------------------- /docs/_sass/minimal-mistakes/skins/_dirt.scss: -------------------------------------------------------------------------------- 1 | /* ========================================================================== 2 | Dirt skin 3 | ========================================================================== */ 4 | 5 | /* Colors */ 6 | $background-color: #f3f3f3 !default; 7 | $text-color: #343434 !default; 8 | $muted-text-color: #8e8b82 !default; 9 | $primary-color: #343434 !default; 10 | $border-color: #e9dcbe !default; 11 | $footer-background-color: #e9dcbe !default; 12 | $link-color: #343434 !default; 13 | $masthead-link-color: $text-color !default; 14 | $masthead-link-color-hover: $text-color !default; 15 | $navicon-link-color-hover: mix(#fff, $text-color, 80%) !default; 16 | 17 | /* dirt syntax highlighting (base16) */ 18 | $base00: #231e18 !default; 19 | $base01: #302b25 !default; 20 | $base02: #48413a !default; 21 | $base03: #9d8b70 !default; 22 | $base04: #b4a490 !default; 23 | $base05: #cabcb1 !default; 24 | $base06: #d7c8bc !default; 25 | $base07: #e4d4c8 !default; 26 | $base08: #d35c5c !default; 27 | $base09: #ca7f32 !default; 28 | $base0a: #e0ac16 !default; 29 | $base0b: #b7ba53 !default; 30 | $base0c: #6eb958 !default; 31 | $base0d: #88a4d3 !default; 32 | $base0e: #bb90e2 !default; 33 | $base0f: #b49368 !default; 34 | -------------------------------------------------------------------------------- /docs/_sass/minimal-mistakes/skins/_mint.scss: -------------------------------------------------------------------------------- 1 | /* ========================================================================== 2 | Mint skin 3 | ========================================================================== */ 4 | 5 | /* Colors */ 6 | $background-color: #f3f6f6 !default; 7 | $text-color: #40514e !default; 8 | $muted-text-color: #40514e !default; 9 | $primary-color: #11999e !default; 10 | $border-color: mix(#fff, #40514e, 75%) !default; 11 | $footer-background-color: #30e3ca !default; 12 | $link-color: #11999e !default; 13 | $masthead-link-color: $text-color !default; 14 | $masthead-link-color-hover: $text-color !default; 15 | $navicon-link-color-hover: mix(#fff, $text-color, 80%) !default; 16 | 17 | .page__footer { 18 | color: #fff !important; // override 19 | } 20 | 21 | .page__footer-follow .social-icons .svg-inline--fa { 22 | color: inherit; 23 | } 24 | -------------------------------------------------------------------------------- /docs/_sass/minimal-mistakes/skins/_neon.scss: -------------------------------------------------------------------------------- 1 | /* ========================================================================== 2 | Neon skin 3 | ========================================================================== */ 4 | 5 | /* Colors */ 6 | $background-color: #141010 !default; 7 | $text-color: #fff6fb !default; 8 | $primary-color: #f21368 !default; 9 | $border-color: mix(#fff, $background-color, 20%) !default; 10 | $code-background-color: mix(#000, $background-color, 15%) !default; 11 | $code-background-color-dark: mix(#000, $background-color, 20%) !default; 12 | $form-background-color: mix(#000, $background-color, 15%) !default; 13 | $footer-background-color: mix($primary-color, #000, 10%) !default; 14 | $link-color: $primary-color !default; 15 | $link-color-hover: mix(#fff, $link-color, 25%) !default; 16 | $link-color-visited: mix(#000, $link-color, 25%) !default; 17 | $masthead-link-color: $text-color !default; 18 | $masthead-link-color-hover: mix(#000, $text-color, 20%) !default; 19 | $navicon-link-color-hover: mix(#000, $background-color, 30%) !default; 20 | 21 | /* neon syntax highlighting (base16) */ 22 | $base00: #ffffff !default; 23 | $base01: #e0e0e0 !default; 24 | $base02: #d0d0d0 !default; 25 | $base03: #b0b0b0 !default; 26 | $base04: #000000 !default; 27 | $base05: #101010 !default; 28 | $base06: #151515 !default; 29 | $base07: #202020 !default; 30 | $base08: #ff0086 !default; 31 | $base09: #fd8900 !default; 32 | $base0a: #aba800 !default; 33 | $base0b: #00c918 !default; 34 | $base0c: #1faaaa !default; 35 | $base0d: #3777e6 !default; 36 | $base0e: #ad00a1 !default; 37 | $base0f: #cc6633 !default; 38 | 39 | .author__urls.social-icons .svg-inline--fa, 40 | .page__footer-follow .social-icons .svg-inline--fa { 41 | color: inherit; 42 | } 43 | 44 | /* next/previous buttons */ 45 | .pagination--pager { 46 | color: $text-color; 47 | background-color: $primary-color; 48 | border-color: transparent; 49 | 50 | &:visited { 51 | color: $text-color; 52 | } 53 | } 54 | 55 | .ais-search-box .ais-search-box--input { 56 | background-color: $form-background-color; 57 | } -------------------------------------------------------------------------------- /docs/_sass/minimal-mistakes/skins/_plum.scss: -------------------------------------------------------------------------------- 1 | /* ========================================================================== 2 | Plum skin 3 | ========================================================================== */ 4 | 5 | /* Colors */ 6 | $background-color: #521477 !default; 7 | $text-color: #fffd86 !default; 8 | $primary-color: #c327ab !default; 9 | $border-color: mix(#fff, $background-color, 20%) !default; 10 | $code-background-color: mix(#000, $background-color, 15%) !default; 11 | $code-background-color-dark: mix(#000, $background-color, 20%) !default; 12 | $form-background-color: mix(#000, $background-color, 15%) !default; 13 | $footer-background-color: mix(#000, $background-color, 25%) !default; 14 | $link-color: $primary-color !default; 15 | $link-color-hover: mix(#fff, $link-color, 25%) !default; 16 | $link-color-visited: mix(#000, $link-color, 25%) !default; 17 | $masthead-link-color: $text-color !default; 18 | $masthead-link-color-hover: mix(#000, $text-color, 20%) !default; 19 | $navicon-link-color-hover: mix(#000, $background-color, 30%) !default; 20 | 21 | /* plum syntax highlighting (base16) */ 22 | $base00: #ffffff !default; 23 | $base01: #e0e0e0 !default; 24 | $base02: #d0d0d0 !default; 25 | $base03: #b0b0b0 !default; 26 | $base04: #000000 !default; 27 | $base05: #101010 !default; 28 | $base06: #151515 !default; 29 | $base07: #202020 !default; 30 | $base08: #ff0086 !default; 31 | $base09: #fd8900 !default; 32 | $base0a: #aba800 !default; 33 | $base0b: #00c918 !default; 34 | $base0c: #1faaaa !default; 35 | $base0d: #3777e6 !default; 36 | $base0e: #ad00a1 !default; 37 | $base0f: #cc6633 !default; 38 | 39 | .author__urls.social-icons .svg-inline--fa, 40 | .page__footer-follow .social-icons .svg-inline--fa { 41 | color: inherit; 42 | } 43 | 44 | .page__content { 45 | a, 46 | a:visited { 47 | color: inherit; 48 | } 49 | } 50 | 51 | /* next/previous buttons */ 52 | .pagination--pager { 53 | color: $text-color; 54 | background-color: $primary-color; 55 | border-color: transparent; 56 | 57 | &:visited { 58 | color: $text-color; 59 | } 60 | } 61 | 62 | .ais-search-box .ais-search-box--input { 63 | background-color: $form-background-color; 64 | } -------------------------------------------------------------------------------- /docs/_sass/minimal-mistakes/skins/_sunrise.scss: -------------------------------------------------------------------------------- 1 | /* ========================================================================== 2 | Sunrise skin 3 | ========================================================================== */ 4 | 5 | /* Colors */ 6 | $dark-gray: #0e2431 !default; 7 | $background-color: #e8d5b7 !default; 8 | $text-color: #000 !default; 9 | $muted-text-color: $dark-gray !default; 10 | $primary-color: #fc3a52 !default; 11 | $border-color: mix(#000, $background-color, 20%) !default; 12 | $code-background-color: mix(#fff, $background-color, 20%) !default; 13 | $code-background-color-dark: mix(#000, $background-color, 10%) !default; 14 | $form-background-color: mix(#fff, $background-color, 15%) !default; 15 | $footer-background-color: #f9b248 !default; 16 | $link-color: mix(#000, $primary-color, 10%) !default; 17 | $link-color-hover: mix(#fff, $link-color, 25%) !default; 18 | $link-color-visited: mix(#000, $link-color, 25%) !default; 19 | $masthead-link-color: $text-color !default; 20 | $masthead-link-color-hover: mix(#000, $text-color, 20%) !default; 21 | $navicon-link-color-hover: mix(#000, $background-color, 30%) !default; 22 | 23 | /* sunrise syntax highlighting (base16) */ 24 | $base00: #1d1f21 !default; 25 | $base01: #282a2e !default; 26 | $base02: #373b41 !default; 27 | $base03: #969896 !default; 28 | $base04: #b4b7b4 !default; 29 | $base05: #c5c8c6 !default; 30 | $base06: #e0e0e0 !default; 31 | $base07: #ffffff !default; 32 | $base08: #cc6666 !default; 33 | $base09: #de935f !default; 34 | $base0a: #f0c674 !default; 35 | $base0b: #b5bd68 !default; 36 | $base0c: #8abeb7 !default; 37 | $base0d: #81a2be !default; 38 | $base0e: #b294bb !default; 39 | $base0f: #a3685a !default; 40 | 41 | .author__urls.social-icons .fa, 42 | .page__footer-follow .social-icons .svg-inline--fa { 43 | color: inherit; 44 | } 45 | -------------------------------------------------------------------------------- /docs/assets/images/3953273590_704e3899d5_m.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/3953273590_704e3899d5_m.jpg -------------------------------------------------------------------------------- /docs/assets/images/500x300.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/500x300.png -------------------------------------------------------------------------------- /docs/assets/images/air-skin-archive-large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/air-skin-archive-large.png -------------------------------------------------------------------------------- /docs/assets/images/air-skin-archive.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/air-skin-archive.png -------------------------------------------------------------------------------- /docs/assets/images/air-skin-post-large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/air-skin-post-large.png -------------------------------------------------------------------------------- /docs/assets/images/air-skin-post.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/air-skin-post.png -------------------------------------------------------------------------------- /docs/assets/images/android-chrome-144x144.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/android-chrome-144x144.png -------------------------------------------------------------------------------- /docs/assets/images/android-chrome-192x192.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/android-chrome-192x192.png -------------------------------------------------------------------------------- /docs/assets/images/android-chrome-36x36.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/android-chrome-36x36.png -------------------------------------------------------------------------------- /docs/assets/images/android-chrome-48x48.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/android-chrome-48x48.png -------------------------------------------------------------------------------- /docs/assets/images/android-chrome-72x72.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/android-chrome-72x72.png -------------------------------------------------------------------------------- /docs/assets/images/android-chrome-96x96.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/android-chrome-96x96.png -------------------------------------------------------------------------------- /docs/assets/images/apple-touch-icon-114x114.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/apple-touch-icon-114x114.png -------------------------------------------------------------------------------- /docs/assets/images/apple-touch-icon-120x120.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/apple-touch-icon-120x120.png -------------------------------------------------------------------------------- /docs/assets/images/apple-touch-icon-144x144.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/apple-touch-icon-144x144.png -------------------------------------------------------------------------------- /docs/assets/images/apple-touch-icon-152x152.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/apple-touch-icon-152x152.png -------------------------------------------------------------------------------- /docs/assets/images/apple-touch-icon-180x180.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/apple-touch-icon-180x180.png -------------------------------------------------------------------------------- /docs/assets/images/apple-touch-icon-57x57.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/apple-touch-icon-57x57.png -------------------------------------------------------------------------------- /docs/assets/images/apple-touch-icon-60x60.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/apple-touch-icon-60x60.png -------------------------------------------------------------------------------- /docs/assets/images/apple-touch-icon-72x72.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/apple-touch-icon-72x72.png -------------------------------------------------------------------------------- /docs/assets/images/apple-touch-icon-76x76.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/apple-touch-icon-76x76.png -------------------------------------------------------------------------------- /docs/assets/images/apple-touch-icon-precomposed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/apple-touch-icon-precomposed.png -------------------------------------------------------------------------------- /docs/assets/images/apple-touch-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/apple-touch-icon.png -------------------------------------------------------------------------------- /docs/assets/images/aqua-skin-archive-large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/aqua-skin-archive-large.png -------------------------------------------------------------------------------- /docs/assets/images/aqua-skin-archive.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/aqua-skin-archive.png -------------------------------------------------------------------------------- /docs/assets/images/aqua-skin-post-large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/aqua-skin-post-large.png -------------------------------------------------------------------------------- /docs/assets/images/aqua-skin-post.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/aqua-skin-post.png -------------------------------------------------------------------------------- /docs/assets/images/bio-photo-2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/bio-photo-2.jpg -------------------------------------------------------------------------------- /docs/assets/images/bio-photo.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/bio-photo.jpg -------------------------------------------------------------------------------- /docs/assets/images/browserconfig.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | #000000 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /docs/assets/images/contrast-code-block.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/contrast-code-block.jpg -------------------------------------------------------------------------------- /docs/assets/images/contrast-skin-archive-large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/contrast-skin-archive-large.png -------------------------------------------------------------------------------- /docs/assets/images/contrast-skin-archive.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/contrast-skin-archive.png -------------------------------------------------------------------------------- /docs/assets/images/contrast-skin-post-large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/contrast-skin-post-large.png -------------------------------------------------------------------------------- /docs/assets/images/contrast-skin-post.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/contrast-skin-post.png -------------------------------------------------------------------------------- /docs/assets/images/dark-code-block.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/dark-code-block.jpg -------------------------------------------------------------------------------- /docs/assets/images/dark-skin-archive-large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/dark-skin-archive-large.png -------------------------------------------------------------------------------- /docs/assets/images/dark-skin-archive.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/dark-skin-archive.png -------------------------------------------------------------------------------- /docs/assets/images/dark-skin-post-large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/dark-skin-post-large.png -------------------------------------------------------------------------------- /docs/assets/images/dark-skin-post.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/dark-skin-post.png -------------------------------------------------------------------------------- /docs/assets/images/default-code-block.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/default-code-block.jpg -------------------------------------------------------------------------------- /docs/assets/images/dirt-skin-archive-large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/dirt-skin-archive-large.png -------------------------------------------------------------------------------- /docs/assets/images/dirt-skin-archive.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/dirt-skin-archive.png -------------------------------------------------------------------------------- /docs/assets/images/dirt-skin-post-large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/dirt-skin-post-large.png -------------------------------------------------------------------------------- /docs/assets/images/dirt-skin-post.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/dirt-skin-post.png -------------------------------------------------------------------------------- /docs/assets/images/dracula-code-block.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/dracula-code-block.jpg -------------------------------------------------------------------------------- /docs/assets/images/facebook-share-example.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/facebook-share-example.jpg -------------------------------------------------------------------------------- /docs/assets/images/favicon-16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/favicon-16x16.png -------------------------------------------------------------------------------- /docs/assets/images/favicon-32x32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/favicon-32x32.png -------------------------------------------------------------------------------- /docs/assets/images/favicon-96x96.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/favicon-96x96.png -------------------------------------------------------------------------------- /docs/assets/images/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/favicon.ico -------------------------------------------------------------------------------- /docs/assets/images/foo-bar-identity-th.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/foo-bar-identity-th.jpg -------------------------------------------------------------------------------- /docs/assets/images/foo-bar-identity.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/foo-bar-identity.jpg -------------------------------------------------------------------------------- /docs/assets/images/google-custom-search-engine-layout.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/google-custom-search-engine-layout.png -------------------------------------------------------------------------------- /docs/assets/images/home-1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/home-1.jpg -------------------------------------------------------------------------------- /docs/assets/images/home.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/home.jpeg -------------------------------------------------------------------------------- /docs/assets/images/image-alignment-1200x4002.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/image-alignment-1200x4002.jpg -------------------------------------------------------------------------------- /docs/assets/images/image-alignment-150x150.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/image-alignment-150x150.jpg -------------------------------------------------------------------------------- /docs/assets/images/image-alignment-300x200.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/image-alignment-300x200.jpg -------------------------------------------------------------------------------- /docs/assets/images/image-alignment-580x300.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/image-alignment-580x300.jpg -------------------------------------------------------------------------------- /docs/assets/images/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Minimal Mistakes", 3 | "icons": [ 4 | { 5 | "src": "\/images\/android-chrome-36x36.png?v=M44lzPylqQ", 6 | "sizes": "36x36", 7 | "type": "image\/png", 8 | "density": 0.75 9 | }, 10 | { 11 | "src": "\/images\/android-chrome-48x48.png?v=M44lzPylqQ", 12 | "sizes": "48x48", 13 | "type": "image\/png", 14 | "density": 1 15 | }, 16 | { 17 | "src": "\/images\/android-chrome-72x72.png?v=M44lzPylqQ", 18 | "sizes": "72x72", 19 | "type": "image\/png", 20 | "density": 1.5 21 | }, 22 | { 23 | "src": "\/images\/android-chrome-96x96.png?v=M44lzPylqQ", 24 | "sizes": "96x96", 25 | "type": "image\/png", 26 | "density": 2 27 | }, 28 | { 29 | "src": "\/images\/android-chrome-144x144.png?v=M44lzPylqQ", 30 | "sizes": "144x144", 31 | "type": "image\/png", 32 | "density": 3 33 | }, 34 | { 35 | "src": "\/images\/android-chrome-192x192.png?v=M44lzPylqQ", 36 | "sizes": "192x192", 37 | "type": "image\/png", 38 | "density": 4 39 | } 40 | ] 41 | } 42 | -------------------------------------------------------------------------------- /docs/assets/images/markup-syntax-highlighting-teaser.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/markup-syntax-highlighting-teaser.jpg -------------------------------------------------------------------------------- /docs/assets/images/masthead-search.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/masthead-search.gif -------------------------------------------------------------------------------- /docs/assets/images/michael-rose.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/michael-rose.jpg -------------------------------------------------------------------------------- /docs/assets/images/mint-skin-archive-large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mint-skin-archive-large.png -------------------------------------------------------------------------------- /docs/assets/images/mint-skin-archive.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mint-skin-archive.png -------------------------------------------------------------------------------- /docs/assets/images/mint-skin-post-large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mint-skin-post-large.png -------------------------------------------------------------------------------- /docs/assets/images/mint-skin-post.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mint-skin-post.png -------------------------------------------------------------------------------- /docs/assets/images/mm-archive-grid-view-example.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-archive-grid-view-example.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-author-profile-reddit-color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-author-profile-reddit-color.png -------------------------------------------------------------------------------- /docs/assets/images/mm-author-profile-reddit-gs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-author-profile-reddit-gs.png -------------------------------------------------------------------------------- /docs/assets/images/mm-author-sidebar-example.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-author-sidebar-example.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-breadcrumbs-example.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-breadcrumbs-example.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-browser-mockups.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-browser-mockups.png -------------------------------------------------------------------------------- /docs/assets/images/mm-bundle-install.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-bundle-install.gif -------------------------------------------------------------------------------- /docs/assets/images/mm-custom-sidebar-example.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-custom-sidebar-example.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-custom-sidebar-nav.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-custom-sidebar-nav.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-customizable-feature.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-customizable-feature.png -------------------------------------------------------------------------------- /docs/assets/images/mm-free-feature.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-free-feature.png -------------------------------------------------------------------------------- /docs/assets/images/mm-gh-pages.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-gh-pages.gif -------------------------------------------------------------------------------- /docs/assets/images/mm-github-copy-repo-url.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-github-copy-repo-url.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-github-edit-config.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-github-edit-config.gif -------------------------------------------------------------------------------- /docs/assets/images/mm-header-overlay-black-filter.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-header-overlay-black-filter.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-header-overlay-red-filter.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-header-overlay-red-filter.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-home-page-feature.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-home-page-feature.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-home-post-pagination-example.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-home-post-pagination-example.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-layout-archive-taxonomy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-layout-archive-taxonomy.png -------------------------------------------------------------------------------- /docs/assets/images/mm-layout-archive.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-layout-archive.png -------------------------------------------------------------------------------- /docs/assets/images/mm-layout-examples.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-layout-examples.png -------------------------------------------------------------------------------- /docs/assets/images/mm-layout-single-header.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-layout-single-header.png -------------------------------------------------------------------------------- /docs/assets/images/mm-layout-single-meta.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-layout-single-meta.png -------------------------------------------------------------------------------- /docs/assets/images/mm-layout-single.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-layout-single.png -------------------------------------------------------------------------------- /docs/assets/images/mm-layout-splash.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-layout-splash.png -------------------------------------------------------------------------------- /docs/assets/images/mm-masthead-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-masthead-logo.png -------------------------------------------------------------------------------- /docs/assets/images/mm-paragraph-indent-example.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-paragraph-indent-example.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-portfolio-collection-example.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-portfolio-collection-example.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-priority-plus-masthead.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-priority-plus-masthead.gif -------------------------------------------------------------------------------- /docs/assets/images/mm-read-time-example.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-read-time-example.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-responsive-feature.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-responsive-feature.png -------------------------------------------------------------------------------- /docs/assets/images/mm-single-header-example.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-single-header-example.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-single-header-overlay-example.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-single-header-overlay-example.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-single-header-overlay-fill-example.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-single-header-overlay-fill-example.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-social-share-links-default.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-social-share-links-default.png -------------------------------------------------------------------------------- /docs/assets/images/mm-social-share-links-reddit-color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-social-share-links-reddit-color.png -------------------------------------------------------------------------------- /docs/assets/images/mm-social-share-links-reddit-gs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-social-share-links-reddit-gs.png -------------------------------------------------------------------------------- /docs/assets/images/mm-staticman-pr-webhook.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-staticman-pr-webhook.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-susy-grid-overlay.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-susy-grid-overlay.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-teaser-images-example.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-teaser-images-example.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-theme-fork-repo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-theme-fork-repo.png -------------------------------------------------------------------------------- /docs/assets/images/mm-theme-post-600.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-theme-post-600.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-theme-post-750.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-theme-post-750.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-toc-helper-example.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-toc-helper-example.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-twitter-card-summary-image.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-twitter-card-summary-image.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-twitter-card-summary-large.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-twitter-card-summary-large.jpg -------------------------------------------------------------------------------- /docs/assets/images/mm-ui-text-labels.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mm-ui-text-labels.jpg -------------------------------------------------------------------------------- /docs/assets/images/mstile-144x144.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mstile-144x144.png -------------------------------------------------------------------------------- /docs/assets/images/mstile-150x150.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mstile-150x150.png -------------------------------------------------------------------------------- /docs/assets/images/mstile-310x150.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mstile-310x150.png -------------------------------------------------------------------------------- /docs/assets/images/mstile-310x310.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mstile-310x310.png -------------------------------------------------------------------------------- /docs/assets/images/mstile-70x70.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/mstile-70x70.png -------------------------------------------------------------------------------- /docs/assets/images/neon-code-block.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/neon-code-block.jpg -------------------------------------------------------------------------------- /docs/assets/images/neon-skin-archive-large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/neon-skin-archive-large.png -------------------------------------------------------------------------------- /docs/assets/images/neon-skin-archive.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/neon-skin-archive.png -------------------------------------------------------------------------------- /docs/assets/images/neon-skin-post-large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/neon-skin-post-large.png -------------------------------------------------------------------------------- /docs/assets/images/neon-skin-post.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/neon-skin-post.png -------------------------------------------------------------------------------- /docs/assets/images/page-header-image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/page-header-image.png -------------------------------------------------------------------------------- /docs/assets/images/page-header-og-image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/page-header-og-image.png -------------------------------------------------------------------------------- /docs/assets/images/page-header-overlay-image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/page-header-overlay-image.png -------------------------------------------------------------------------------- /docs/assets/images/page-header-teaser.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/page-header-teaser.png -------------------------------------------------------------------------------- /docs/assets/images/paragraph-indent.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/paragraph-indent.png -------------------------------------------------------------------------------- /docs/assets/images/paragraph-no-indent.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/paragraph-no-indent.png -------------------------------------------------------------------------------- /docs/assets/images/plum-code-block.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/plum-code-block.jpg -------------------------------------------------------------------------------- /docs/assets/images/plum-skin-archive-large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/plum-skin-archive-large.png -------------------------------------------------------------------------------- /docs/assets/images/plum-skin-archive.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/plum-skin-archive.png -------------------------------------------------------------------------------- /docs/assets/images/plum-skin-post-large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/plum-skin-post-large.png -------------------------------------------------------------------------------- /docs/assets/images/plum-skin-post.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/plum-skin-post.png -------------------------------------------------------------------------------- /docs/assets/images/robot-face-emoji-by-google.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/robot-face-emoji-by-google.png -------------------------------------------------------------------------------- /docs/assets/images/safari-pinned-tab.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 7 | 8 | Created by potrace 1.11, written by Peter Selinger 2001-2013 9 | 10 | 12 | 26 | 29 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /docs/assets/images/search-layout-example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/search-layout-example.png -------------------------------------------------------------------------------- /docs/assets/images/site-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/site-logo.png -------------------------------------------------------------------------------- /docs/assets/images/social-media-preview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/social-media-preview.png -------------------------------------------------------------------------------- /docs/assets/images/solarized-light-code-block.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/solarized-light-code-block.jpg -------------------------------------------------------------------------------- /docs/assets/images/sunrise-code-block.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/sunrise-code-block.jpg -------------------------------------------------------------------------------- /docs/assets/images/sunrise-skin-archive-large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/sunrise-skin-archive-large.png -------------------------------------------------------------------------------- /docs/assets/images/sunrise-skin-archive.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/sunrise-skin-archive.png -------------------------------------------------------------------------------- /docs/assets/images/sunrise-skin-post-large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/sunrise-skin-post-large.png -------------------------------------------------------------------------------- /docs/assets/images/sunrise-skin-post.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/sunrise-skin-post.png -------------------------------------------------------------------------------- /docs/assets/images/thilina-rajapakse.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/thilina-rajapakse.jpeg -------------------------------------------------------------------------------- /docs/assets/images/unsplash-gallery-image-1-th.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/unsplash-gallery-image-1-th.jpg -------------------------------------------------------------------------------- /docs/assets/images/unsplash-gallery-image-1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/unsplash-gallery-image-1.jpg -------------------------------------------------------------------------------- /docs/assets/images/unsplash-gallery-image-2-th.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/unsplash-gallery-image-2-th.jpg -------------------------------------------------------------------------------- /docs/assets/images/unsplash-gallery-image-2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/unsplash-gallery-image-2.jpg -------------------------------------------------------------------------------- /docs/assets/images/unsplash-gallery-image-3-th.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/unsplash-gallery-image-3-th.jpg -------------------------------------------------------------------------------- /docs/assets/images/unsplash-gallery-image-3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/unsplash-gallery-image-3.jpg -------------------------------------------------------------------------------- /docs/assets/images/unsplash-gallery-image-4-th.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/unsplash-gallery-image-4-th.jpg -------------------------------------------------------------------------------- /docs/assets/images/unsplash-gallery-image-4.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/unsplash-gallery-image-4.jpg -------------------------------------------------------------------------------- /docs/assets/images/unsplash-image-1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/unsplash-image-1.jpg -------------------------------------------------------------------------------- /docs/assets/images/unsplash-image-10.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/unsplash-image-10.jpg -------------------------------------------------------------------------------- /docs/assets/images/unsplash-image-11.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/unsplash-image-11.jpg -------------------------------------------------------------------------------- /docs/assets/images/unsplash-image-2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/unsplash-image-2.jpg -------------------------------------------------------------------------------- /docs/assets/images/unsplash-image-3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/unsplash-image-3.jpg -------------------------------------------------------------------------------- /docs/assets/images/unsplash-image-4.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/unsplash-image-4.jpg -------------------------------------------------------------------------------- /docs/assets/images/unsplash-image-5.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/unsplash-image-5.jpg -------------------------------------------------------------------------------- /docs/assets/images/unsplash-image-6.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/unsplash-image-6.jpg -------------------------------------------------------------------------------- /docs/assets/images/unsplash-image-7.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/unsplash-image-7.jpg -------------------------------------------------------------------------------- /docs/assets/images/unsplash-image-8.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/unsplash-image-8.jpg -------------------------------------------------------------------------------- /docs/assets/images/unsplash-image-9.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/assets/images/unsplash-image-9.jpg -------------------------------------------------------------------------------- /docs/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/favicon.ico -------------------------------------------------------------------------------- /docs/index.html: -------------------------------------------------------------------------------- 1 | --- 2 | layout: splash 3 | permalink: / 4 | hidden: true 5 | header: 6 | overlay_color: "#5e616c" 7 | overlay_filter: rgba(0, 0, 0, 0.7) 8 | overlay_image: /assets/images/home-1.jpg 9 | actions: 10 | - label: " Install now" 11 | url: "/docs/installation/" 12 | excerpt: > 13 | Using Transformer models has never been simpler!

14 | Built-in support for: 15 |
  • Text Classification
  • 16 |
  • Token Classification
  • 17 |
  • Question Answering
  • 18 |
  • Language Modeling
  • 19 |
  • Language Generation
  • 20 |
  • Multi-Modal Classification
  • 21 |
  • Conversational AI
  • 22 |
  • Text Representation Generation
  • 23 | feature_row: 24 | - title: "Simple but Powerful" 25 | excerpt: "Get started with 3 lines of code, or configure every detail." 26 | url: "/docs/usage/#configuring-a-simple-transformers-model" 27 | btn_class: "btn--primary" 28 | btn_label: "Learn more" 29 | - title: "Consistent but Flexible" 30 | excerpt: "All tasks follow a consistent pattern, but are flexible when necessary." 31 | url: "/docs/usage/#task-specific-models" 32 | btn_class: "btn--primary" 33 | btn_label: "Learn more" 34 | - title: "Beginner Friendly" 35 | excerpt: "Transformers are amazing and using them shouldn't be difficult." 36 | url: "/about/" 37 | btn_class: "btn--primary" 38 | btn_label: "Learn more" 39 | --- 40 | 41 | {% include feature_row %} -------------------------------------------------------------------------------- /docs/screenshot-layouts.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/screenshot-layouts.png -------------------------------------------------------------------------------- /docs/screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/docs/screenshot.png -------------------------------------------------------------------------------- /examples/hyperparameter tuning/extended-tuning/data_prep.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | from utils import load_rte_data_file 4 | 5 | # Preparing train data 6 | train_df = load_rte_data_file("data/train.jsonl") 7 | eval_df = load_rte_data_file("data/val.jsonl") 8 | eval_df, test_df = train_test_split(eval_df, test_size=0.5, random_state=4) 9 | 10 | eval_df.to_json("data/eval_df.jsonl", orient="records", lines=True) 11 | test_df.to_json("data/test_df.jsonl", orient="records", lines=True) 12 | -------------------------------------------------------------------------------- /examples/hyperparameter tuning/extended-tuning/readme.md: -------------------------------------------------------------------------------- 1 | # Hyperparameter Optimization for Optimum Transformer Models 2 | 3 | Code for [Medium article](https://towardsdatascience.com/hyperparameter-optimization-for-optimum-transformer-models-b95a32b70949?source=friends_link&sk=7d19ce15c9ac1230642d826b9deeb638). -------------------------------------------------------------------------------- /examples/hyperparameter tuning/extended-tuning/train_default.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from statistics import mean 3 | 4 | import pandas as pd 5 | import wandb 6 | from sklearn.metrics import accuracy_score 7 | 8 | from simpletransformers.classification import ClassificationArgs, ClassificationModel 9 | from utils import load_rte_data_file 10 | 11 | logging.basicConfig(level=logging.INFO) 12 | transformers_logger = logging.getLogger("transformers") 13 | transformers_logger.setLevel(logging.WARNING) 14 | 15 | # Preparing train data 16 | # train_df = load_rte_data_file("data/train.jsonl") 17 | train_df = pd.read_json("data/augmented_train.jsonl", lines=True, orient="records") 18 | eval_df = pd.read_json("data/eval_df", lines=True, orient="records") 19 | test_df = pd.read_json("data/test_df", lines=True, orient="records") 20 | 21 | model_args = ClassificationArgs() 22 | model_args.eval_batch_size = 32 23 | model_args.evaluate_during_training = True 24 | model_args.evaluate_during_training_silent = False 25 | model_args.evaluate_during_training_steps = -1 26 | model_args.save_eval_checkpoints = False 27 | model_args.save_model_every_epoch = False 28 | model_args.learning_rate = 1e-5 29 | model_args.manual_seed = 4 30 | model_args.max_seq_length = 256 31 | model_args.multiprocessing_chunksize = 5000 32 | model_args.no_cache = True 33 | model_args.num_train_epochs = 3 34 | model_args.overwrite_output_dir = True 35 | model_args.reprocess_input_data = True 36 | model_args.train_batch_size = 16 37 | model_args.gradient_accumulation_steps = 2 38 | model_args.labels_list = ["not_entailment", "entailment"] 39 | model_args.output_dir = "default_output" 40 | model_args.best_model_dir = "default_output/best_model" 41 | model_args.wandb_project = "RTE - Hyperparameter Optimization" 42 | model_args.wandb_kwargs = {"name": "augmented-default"} 43 | 44 | # Create a TransformerModel 45 | model = ClassificationModel("roberta", "roberta-large", use_cuda=True, args=model_args) 46 | 47 | # Train the model 48 | model.train_model( 49 | train_df, 50 | eval_df=eval_df, 51 | accuracy=lambda truth, predictions: accuracy_score( 52 | truth, [round(p) for p in predictions] 53 | ), 54 | ) 55 | 56 | model.eval_model(test_df, verbose=True) 57 | -------------------------------------------------------------------------------- /examples/hyperparameter tuning/extended-tuning/utils.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | 4 | def load_rte_data_file(filepath): 5 | df = pd.read_json(filepath, lines=True) 6 | df = df.rename( 7 | columns={"premise": "text_a", "hypothesis": "text_b", "label": "labels"} 8 | ) 9 | df = df[["text_a", "text_b", "labels"]] 10 | return df 11 | 12 | 13 | def load_rte_test(filepath): 14 | df = pd.read_json(filepath, lines=True) 15 | df = df.rename(columns={"premise": "text_a", "hypothesis": "text_b"}) 16 | return df 17 | -------------------------------------------------------------------------------- /examples/hyperparameter tuning/sweeps.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import pandas as pd 4 | import sklearn 5 | import wandb 6 | 7 | from simpletransformers.classification import ClassificationArgs, ClassificationModel 8 | 9 | sweep_config = { 10 | "method": "bayes", # grid, random 11 | "metric": {"name": "train_loss", "goal": "minimize"}, 12 | "parameters": { 13 | "num_train_epochs": {"values": [2, 3, 5]}, 14 | "learning_rate": {"min": 5e-5, "max": 4e-4}, 15 | }, 16 | } 17 | 18 | sweep_id = wandb.sweep(sweep_config, project="Simple Sweep") 19 | 20 | logging.basicConfig(level=logging.INFO) 21 | transformers_logger = logging.getLogger("transformers") 22 | transformers_logger.setLevel(logging.WARNING) 23 | 24 | # Preparing train data 25 | train_data = [ 26 | ["Aragorn was the heir of Isildur", "true"], 27 | ["Frodo was the heir of Isildur", "false"], 28 | ] 29 | train_df = pd.DataFrame(train_data) 30 | train_df.columns = ["text", "labels"] 31 | 32 | # Preparing eval data 33 | eval_data = [ 34 | ["Theoden was the king of Rohan", "true"], 35 | ["Merry was the king of Rohan", "false"], 36 | ] 37 | eval_df = pd.DataFrame(eval_data) 38 | eval_df.columns = ["text", "labels"] 39 | 40 | model_args = ClassificationArgs() 41 | model_args.reprocess_input_data = True 42 | model_args.overwrite_output_dir = True 43 | model_args.evaluate_during_training = True 44 | model_args.manual_seed = 4 45 | model_args.use_multiprocessing = True 46 | model_args.train_batch_size = 16 47 | model_args.eval_batch_size = 8 48 | model_args.labels_list = ["true", "false"] 49 | model_args.wandb_project = "Simple Sweep" 50 | 51 | 52 | def train(): 53 | # Initialize a new wandb run 54 | wandb.init() 55 | 56 | # Create a TransformerModel 57 | model = ClassificationModel( 58 | "roberta", 59 | "roberta-base", 60 | use_cuda=True, 61 | args=model_args, 62 | sweep_config=wandb.config, 63 | ) 64 | 65 | # Train the model 66 | model.train_model(train_df, eval_df=eval_df) 67 | 68 | # Evaluate the model 69 | model.eval_model(eval_df) 70 | 71 | # Sync wandb 72 | wandb.join() 73 | 74 | 75 | wandb.agent(sweep_id, train) 76 | -------------------------------------------------------------------------------- /examples/language_generation/data_prep.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | df = pd.read_csv("data/cs.AI.tsv", sep="\t") 4 | abstracts = df["abstract"].tolist() 5 | 6 | with open("data/train.txt", "w") as f: 7 | for abstract in abstracts[:-10]: 8 | f.writelines(abstract + "\n") 9 | 10 | with open("data/test.txt", "w") as f: 11 | for abstract in abstracts[-10:]: 12 | f.writelines(abstract + "\n") 13 | -------------------------------------------------------------------------------- /examples/language_generation/fine_tune.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from simpletransformers.language_modeling import LanguageModelingModel 4 | 5 | logging.basicConfig(level=logging.INFO) 6 | transformers_logger = logging.getLogger("transformers") 7 | transformers_logger.setLevel(logging.WARNING) 8 | 9 | train_args = { 10 | "reprocess_input_data": True, 11 | "overwrite_output_dir": True, 12 | "block_size": 512, 13 | "max_seq_length": 512, 14 | "learning_rate": 5e-6, 15 | "train_batch_size": 8, 16 | "gradient_accumulation_steps": 8, 17 | "num_train_epochs": 3, 18 | "mlm": False, 19 | "output_dir": f"outputs/fine-tuned/", 20 | } 21 | 22 | model = LanguageModelingModel("gpt2", "gpt2", args=train_args) 23 | 24 | model.train_model("data/train.txt", eval_file="data/test.txt") 25 | 26 | model.eval_model("data/test.txt") 27 | -------------------------------------------------------------------------------- /examples/language_generation/train_new_lm.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | 4 | from simpletransformers.language_modeling import LanguageModelingModel 5 | 6 | logging.basicConfig(level=logging.INFO) 7 | transformers_logger = logging.getLogger("transformers") 8 | transformers_logger.setLevel(logging.WARNING) 9 | 10 | 11 | train_args = { 12 | "reprocess_input_data": True, 13 | "overwrite_output_dir": True, 14 | "num_train_epochs": 20, 15 | "save_eval_checkpoints": True, 16 | "block_size": 509, 17 | "max_seq_length": 509, 18 | # "save_model_every_epoch": False, 19 | "learning_rate": 1e-4, 20 | "train_batch_size": 16, 21 | "gradient_accumulation_steps": 4, 22 | "mlm": False, 23 | "dataset_type": "simple", 24 | "logging_steps": 100, 25 | "evaluate_during_training": True, 26 | "evaluate_during_training_steps": 3000, 27 | "evaluate_during_training_verbose": True, 28 | "use_cached_eval_features": True, 29 | "sliding_window": True, 30 | "use_multiprocessing": False, 31 | "vocab_size": 10000, 32 | "output_dir": f"outputs/from_scratch_", 33 | "best_model_dir": f"outputs/from_scratch/best_model", 34 | "fp16": False, 35 | "local_rank": -1, 36 | } 37 | 38 | parser = argparse.ArgumentParser() 39 | parser.add_argument( 40 | "--local_rank", 41 | type=int, 42 | default=-1, 43 | help="Local rank. Necessary for using the torch.distributed.launch utility.", 44 | ) 45 | args = parser.parse_args() 46 | 47 | train_args["local_rank"] = args.local_rank 48 | 49 | train_file = f"data/train.txt" 50 | test_file = f"data/test.txt" 51 | 52 | model = LanguageModelingModel( 53 | "gpt2", 54 | None, 55 | args=train_args, 56 | train_files=train_file, 57 | ) 58 | 59 | model.train_model( 60 | train_file, 61 | eval_file=test_file, 62 | ) 63 | 64 | model.eval_model(test_file) 65 | -------------------------------------------------------------------------------- /examples/language_representation/binary_classification_dummy.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from sklearn.linear_model import RidgeClassifier 3 | from sklearn.metrics import classification_report 4 | 5 | from simpletransformers.language_representation import RepresentationModel 6 | 7 | train_data = [ 8 | ["Example sentence belonging to class 1", 1], 9 | ["Example sentence belonging to class 0", 0], 10 | ] 11 | train_df = pd.DataFrame(train_data, columns=["text", "target"]) 12 | 13 | eval_data = [ 14 | ["Example eval sentence belonging to class 1", 1], 15 | ["Example eval sentence belonging to class 0", 0], 16 | ] 17 | eval_df = pd.DataFrame(eval_data, columns=["text", "target"]) 18 | 19 | model = RepresentationModel( 20 | model_type="bert", 21 | model_name="bert-base-uncased", 22 | use_cuda=False, 23 | args={"no_save": True, "reprocess_input_data": True, "overwrite_output_dir": True}, 24 | ) 25 | train_vectors = model.encode_sentences( 26 | train_df["text"].to_list(), combine_strategy="mean" 27 | ) 28 | eval_vectors = model.encode_sentences( 29 | eval_df["text"].to_list(), combine_strategy="mean" 30 | ) 31 | 32 | 33 | clf_model = RidgeClassifier() 34 | clf_model.fit(train_vectors, train_df["target"]) 35 | predictions = clf_model.predict(eval_vectors) 36 | print(classification_report(eval_df["target"], predictions)) 37 | -------------------------------------------------------------------------------- /examples/language_representation/classification_yelp_polarity/classification_yelp.py: -------------------------------------------------------------------------------- 1 | from os.path import dirname, join 2 | 3 | import pandas as pd 4 | from sklearn.linear_model import RidgeClassifier 5 | from sklearn.metrics import classification_report 6 | 7 | from simpletransformers.language_representation import RepresentationModel 8 | 9 | project_root = dirname( 10 | dirname(dirname(dirname(__file__))) 11 | ) # path to root of the project 12 | 13 | MODEL_TYPE = "gpt2" # change this to test other model types: bert, roberta, gpt2 14 | 15 | 16 | prefix = project_root + "/data/" 17 | 18 | train_df = pd.read_csv(prefix + "train.csv", header=None) 19 | train_df.head() 20 | 21 | eval_df = pd.read_csv(prefix + "test.csv", header=None) 22 | eval_df.head() 23 | 24 | train_df[0] = (train_df[0] == 2).astype(int) 25 | eval_df[0] = (eval_df[0] == 2).astype(int) 26 | # don't use entire dataset, since it's too big and will tale a long time to run, select only a portion of it 27 | train_df = pd.DataFrame( 28 | {"text": train_df[1].replace(r"\n", " ", regex=True), "labels": train_df[0]} 29 | )[:1000] 30 | print(train_df.head()) 31 | eval_df = pd.DataFrame( 32 | {"text": eval_df[1].replace(r"\n", " ", regex=True), "labels": eval_df[0]} 33 | )[:100] 34 | print(eval_df.head()) 35 | 36 | 37 | if MODEL_TYPE == "bert": 38 | model_name = "bert-base-uncased" 39 | 40 | elif MODEL_TYPE == "roberta": 41 | model_name = "roberta-base" 42 | elif MODEL_TYPE == "gpt2": 43 | model_name = "gpt2" 44 | 45 | 46 | model = RepresentationModel( 47 | model_type=MODEL_TYPE, 48 | model_name=model_name, 49 | use_cuda=False, 50 | args={"no_save": True, "reprocess_input_data": True, "overwrite_output_dir": True}, 51 | ) 52 | 53 | train_vectors = model.encode_sentences( 54 | train_df["text"].to_list(), combine_strategy="mean" 55 | ) 56 | eval_vectors = model.encode_sentences( 57 | eval_df["text"].to_list(), combine_strategy="mean" 58 | ) 59 | 60 | 61 | clf_model = RidgeClassifier() 62 | clf_model.fit(train_vectors, train_df["labels"]) 63 | predictions = clf_model.predict(eval_vectors) 64 | print(classification_report(eval_df["labels"], predictions)) 65 | -------------------------------------------------------------------------------- /examples/language_representation/classification_yelp_polarity/data_download.sh: -------------------------------------------------------------------------------- 1 | mkdir data 2 | wget https://s3.amazonaws.com/fast-ai-nlp/yelp_review_polarity_csv.tgz -O data/data.tgz 3 | tar -xvzf data/data.tgz -C data/ 4 | mv data/yelp_review_polarity_csv/* data/ 5 | rm -r data/yelp_review_polarity_csv/ 6 | rm data/data.tgz -------------------------------------------------------------------------------- /examples/retrieval/download_msmarco.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datasets import load_dataset 3 | 4 | 5 | os.makedirs("data/msmarco", exist_ok=True) 6 | 7 | print("=== Downloading MSMARCO ===") 8 | print("Downloading MSMARCO training triples...") 9 | dataset = load_dataset("thilina/negative-sampling")["train"] 10 | 11 | print("Dataset loaded. Sample:") 12 | print(dataset[0]) 13 | 14 | qrels = load_dataset("BeIR/msmarco-qrels")["validation"] 15 | 16 | print("Saving dataset to disk...") 17 | # Save the dataset to disk 18 | dataset.to_csv("data/msmarco/msmarco-train.tsv", sep="\t", index=False) 19 | qrels.to_csv("data/msmarco/devs.tsv", sep="\t", index=False) 20 | 21 | print("Done.") 22 | print("=== MSMARCO download complete ===") 23 | -------------------------------------------------------------------------------- /examples/retrieval/train_dpr_base.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import pandas as pd 4 | from simpletransformers.retrieval import RetrievalModel, RetrievalArgs 5 | 6 | # Configuring logging 7 | logging.basicConfig(level=logging.INFO) 8 | transformers_logger = logging.getLogger("transformers") 9 | transformers_logger.setLevel(logging.WARNING) 10 | 11 | # Specifying the path to the training data 12 | train_data_path = "data/msmarco/msmarco-train.tsv" 13 | 14 | # Loading the training data 15 | if train_data_path.endswith(".tsv"): 16 | train_data = pd.read_csv(train_data_path, sep="\t") 17 | else: 18 | train_data = train_data_path 19 | 20 | # Configuring the model arguments 21 | model_args = RetrievalArgs() 22 | model_args.reprocess_input_data = True 23 | model_args.overwrite_output_dir = True 24 | model_args.use_cached_eval_features = False 25 | model_args.include_title = False if "msmarco" in train_data_path else True 26 | model_args.max_seq_length = 256 27 | model_args.num_train_epochs = 40 28 | model_args.train_batch_size = 16 29 | model_args.use_hf_datasets = True 30 | model_args.learning_rate = 1e-6 31 | model_args.warmup_steps = 5000 32 | model_args.save_steps = 300000 33 | model_args.evaluate_during_training = True 34 | model_args.evaluate_during_training_steps = False 35 | model_args.save_model_every_epoch = False 36 | model_args.wandb_project = "Retrieval training example" 37 | model_args.hard_negatives = False 38 | model_args.n_gpu = 1 39 | model_args.data_format = "beir" 40 | model_args.output_dir = f"trained_models/pretrained/DPR-base-msmarco" 41 | model_args.wandb_kwargs = {"name": f"DPR-base-msmarco"} 42 | 43 | # Defining the model type and names 44 | model_type = "custom" 45 | model_name = None 46 | context_name = "bert-base-multilingual-cased" 47 | question_name = "bert-base-multilingual-cased" 48 | 49 | # Main execution 50 | if __name__ == "__main__": 51 | # Creating the model 52 | model = RetrievalModel( 53 | model_type, 54 | model_name, 55 | context_name, 56 | question_name, 57 | args=model_args, 58 | ) 59 | 60 | # Training the model 61 | model.train_model( 62 | train_data, 63 | eval_set="dev", 64 | ) 65 | -------------------------------------------------------------------------------- /examples/seq2seq/minimal_seq2seq.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import pandas as pd 4 | 5 | from simpletransformers.seq2seq import Seq2SeqModel 6 | 7 | logging.basicConfig(level=logging.INFO) 8 | transformers_logger = logging.getLogger("transformers") 9 | transformers_logger.setLevel(logging.WARNING) 10 | 11 | 12 | train_data = [ 13 | ["one", "1"], 14 | ["two", "2"], 15 | ] 16 | 17 | train_df = pd.DataFrame(train_data, columns=["input_text", "target_text"]) 18 | 19 | eval_data = [ 20 | ["three", "3"], 21 | ["four", "4"], 22 | ] 23 | 24 | eval_df = pd.DataFrame(eval_data, columns=["input_text", "target_text"]) 25 | 26 | model_args = { 27 | "reprocess_input_data": True, 28 | "overwrite_output_dir": True, 29 | "max_seq_length": 10, 30 | "train_batch_size": 2, 31 | "num_train_epochs": 100, 32 | "save_eval_checkpoints": False, 33 | "save_model_every_epoch": False, 34 | # "silent": True, 35 | "evaluate_generated_text": True, 36 | "evaluate_during_training": True, 37 | "evaluate_during_training_verbose": True, 38 | "use_multiprocessing": False, 39 | "save_best_model": False, 40 | "max_length": 15, 41 | } 42 | 43 | model = Seq2SeqModel("bert", "bert-base-cased", "bert-base-cased", args=model_args) 44 | 45 | 46 | def count_matches(labels, preds): 47 | print(labels) 48 | print(preds) 49 | return sum([1 if label == pred else 0 for label, pred in zip(labels, preds)]) 50 | 51 | 52 | model.train_model(train_df, eval_data=eval_df, matches=count_matches) 53 | 54 | print(model.eval_model(eval_df, matches=count_matches)) 55 | 56 | print(model.predict(["four", "five"])) 57 | -------------------------------------------------------------------------------- /examples/seq2seq/paraphrasing/data_download.sh: -------------------------------------------------------------------------------- 1 | mkdir data 2 | wget https://storage.googleapis.com/paws/english/paws_wiki_labeled_final.tar.gz -P data 3 | tar -xvf data/paws_wiki_labeled_final.tar.gz -C data 4 | mv data/final/* data 5 | rm -r data/final 6 | 7 | wget http://qim.fs.quoracdn.net/quora_duplicate_questions.tsv -P data 8 | -------------------------------------------------------------------------------- /examples/seq2seq/paraphrasing/predict.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from simpletransformers.seq2seq import Seq2SeqModel 4 | 5 | 6 | logging.basicConfig(level=logging.INFO) 7 | transformers_logger = logging.getLogger("transformers") 8 | transformers_logger.setLevel(logging.ERROR) 9 | 10 | model = Seq2SeqModel(encoder_decoder_type="bart", encoder_decoder_name="outputs") 11 | 12 | 13 | while True: 14 | original = input("Enter text to paraphrase: ") 15 | to_predict = [original] 16 | 17 | preds = model.predict(to_predict) 18 | 19 | print("---------------------------------------------------------") 20 | print(original) 21 | 22 | print() 23 | print("Predictions >>>") 24 | for pred in preds[0]: 25 | print(pred) 26 | 27 | print("---------------------------------------------------------") 28 | print() 29 | -------------------------------------------------------------------------------- /examples/seq2seq/paraphrasing/readme.md: -------------------------------------------------------------------------------- 1 | Code for the Medium Article [here](https://towardsdatascience.com/bart-for-paraphrasing-with-simple-transformers-7c9ea3dfdd8c?source=friends_link&sk=07420669325ac550f86b86bad362633c). -------------------------------------------------------------------------------- /examples/seq2seq/paraphrasing/utils.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | 3 | import pandas as pd 4 | 5 | 6 | def load_data( 7 | file_path, input_text_column, target_text_column, label_column, keep_label=1 8 | ): 9 | df = pd.read_csv(file_path, sep="\t", error_bad_lines=False) 10 | df = df.loc[df[label_column] == keep_label] 11 | df = df.rename( 12 | columns={input_text_column: "input_text", target_text_column: "target_text"} 13 | ) 14 | df = df[["input_text", "target_text"]] 15 | df["prefix"] = "paraphrase" 16 | 17 | return df 18 | 19 | 20 | def clean_unnecessary_spaces(out_string): 21 | if not isinstance(out_string, str): 22 | warnings.warn(f">>> {out_string} <<< is not a string.") 23 | out_string = str(out_string) 24 | out_string = ( 25 | out_string.replace(" .", ".") 26 | .replace(" ?", "?") 27 | .replace(" !", "!") 28 | .replace(" ,", ",") 29 | .replace(" ' ", "'") 30 | .replace(" n't", "n't") 31 | .replace(" 'm", "'m") 32 | .replace(" 's", "'s") 33 | .replace(" 've", "'ve") 34 | .replace(" 're", "'re") 35 | ) 36 | return out_string 37 | -------------------------------------------------------------------------------- /examples/t5/mixed_tasks/train.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | from simpletransformers.t5 import T5Model 4 | 5 | train_df = pd.read_csv("data/train.tsv", sep="\t").astype(str) 6 | eval_df = pd.read_csv("data/eval.tsv", sep="\t").astype(str) 7 | 8 | model_args = { 9 | "max_seq_length": 196, 10 | "train_batch_size": 16, 11 | "eval_batch_size": 64, 12 | "num_train_epochs": 1, 13 | "evaluate_during_training": True, 14 | "evaluate_during_training_steps": 15000, 15 | "evaluate_during_training_verbose": True, 16 | "use_multiprocessing": False, 17 | "fp16": False, 18 | "save_steps": -1, 19 | "save_eval_checkpoints": False, 20 | "save_model_every_epoch": False, 21 | "reprocess_input_data": True, 22 | "overwrite_output_dir": True, 23 | "wandb_project": "T5 mixed tasks - Binary, Multi-Label, Regression", 24 | } 25 | 26 | model = T5Model("t5", "t5-base", args=model_args) 27 | 28 | model.train_model(train_df, eval_data=eval_df) 29 | -------------------------------------------------------------------------------- /examples/t5/mt5/train.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pandas as pd 3 | from simpletransformers.t5 import T5Model, T5Args 4 | 5 | 6 | logging.basicConfig(level=logging.INFO) 7 | transformers_logger = logging.getLogger("transformers") 8 | transformers_logger.setLevel(logging.WARNING) 9 | 10 | train_df = pd.read_csv("data/train.tsv", sep="\t").astype(str) 11 | eval_df = pd.read_csv("data/eval.tsv", sep="\t").astype(str) 12 | 13 | model_args = T5Args() 14 | model_args.max_seq_length = 196 15 | model_args.train_batch_size = 8 16 | model_args.eval_batch_size = 8 17 | model_args.num_train_epochs = 1 18 | model_args.evaluate_during_training = False 19 | model_args.use_multiprocessing = False 20 | model_args.fp16 = False 21 | model_args.save_steps = -1 22 | model_args.save_eval_checkpoints = False 23 | model_args.save_model_every_epoch = False 24 | model_args.no_cache = True 25 | model_args.reprocess_input_data = True 26 | model_args.overwrite_output_dir = True 27 | model_args.num_return_sequences = 1 28 | model_args.wandb_project = "MT5 mixed tasks" 29 | 30 | model = T5Model("mt5", "google/mt5-base", args=model_args) 31 | 32 | # Train the model 33 | model.train_model(train_df, eval_data=eval_df) 34 | 35 | # Optional: Evaluate the model. We'll test it properly anyway. 36 | results = model.eval_model(eval_df, verbose=True) 37 | -------------------------------------------------------------------------------- /examples/t5/mt5/translate_dataset.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from tqdm.auto import tqdm 4 | import pandas as pd 5 | 6 | from translation_models import load_german, load_dutch, load_swedish, load_romance 7 | 8 | 9 | logging.basicConfig(level=logging.INFO) 10 | transformers_logger = logging.getLogger("transformers") 11 | transformers_logger.setLevel(logging.WARNING) 12 | 13 | 14 | model_map = { 15 | "english-dutch": load_dutch(), 16 | "english-german": load_german(), 17 | "english-swedish": load_swedish(), 18 | "english-romance": load_romance(), 19 | } 20 | 21 | 22 | def do_translate(input_text, target_language=None): 23 | if target_language == "german": 24 | return model_map["english-german"].predict(input_text) 25 | elif target_language == "dutch": 26 | return model_map["english-dutch"].predict(input_text) 27 | elif target_language == "swedish": 28 | return model_map["english-swedish"].predict(input_text) 29 | elif target_language == "spanish": 30 | return model_map["english-romance"].predict( 31 | [">>es<< " + text for text in input_text] 32 | ) 33 | elif target_language == "french": 34 | return model_map["english-romance"].predict( 35 | [">>fr<< " + text for text in input_text] 36 | ) 37 | 38 | 39 | def translate_dataset(input_file, target_language): 40 | df = pd.read_csv(input_file, sep="\t").astype(str) 41 | df = df[df["prefix"] == "binary classification"] 42 | input_text = df["input_text"].tolist() 43 | 44 | translated_text = do_translate(input_text, target_language=target_language) 45 | 46 | df["input_text"] = translated_text 47 | 48 | return df 49 | 50 | 51 | languages = ["dutch", "german", "french", "swedish", "spanish"] 52 | 53 | for lang in tqdm(languages): 54 | translated_dataset = translate_dataset("data/eval.tsv", lang) 55 | translated_dataset.to_csv(f"data/{lang}_eval.tsv", "\t") 56 | -------------------------------------------------------------------------------- /examples/t5/mt5/translation_models.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from simpletransformers.seq2seq import Seq2SeqModel, Seq2SeqArgs 4 | 5 | 6 | logging.basicConfig(level=logging.INFO) 7 | transformers_logger = logging.getLogger("transformers") 8 | transformers_logger.setLevel(logging.WARNING) 9 | 10 | model_args = Seq2SeqArgs() 11 | model_args.reprocess_input_data = True 12 | model_args.overwrite_output_dir = True 13 | model_args.eval_batch_size = 64 14 | model_args.use_multiprocessing = False 15 | model_args.max_seq_length = 196 16 | model_args.max_length = 512 17 | model_args.num_beams = None 18 | model_args.do_sample = True 19 | model_args.top_k = 50 20 | model_args.top_p = 0.95 21 | 22 | use_cuda = True 23 | 24 | 25 | def load_german(): 26 | english_to_german_model = Seq2SeqModel( 27 | encoder_decoder_type="marian", 28 | encoder_decoder_name="Helsinki-NLP/opus-mt-en-de", 29 | use_cuda=use_cuda, 30 | args=model_args, 31 | ) 32 | return english_to_german_model 33 | 34 | 35 | def load_dutch(): 36 | english_to_dutch_model = Seq2SeqModel( 37 | encoder_decoder_type="marian", 38 | encoder_decoder_name="Helsinki-NLP/opus-mt-en-nl", 39 | use_cuda=use_cuda, 40 | args=model_args, 41 | ) 42 | return english_to_dutch_model 43 | 44 | 45 | def load_swedish(): 46 | english_to_swedish_model = Seq2SeqModel( 47 | encoder_decoder_type="marian", 48 | encoder_decoder_name="Helsinki-NLP/opus-mt-en-sw", 49 | use_cuda=use_cuda, 50 | args=model_args, 51 | ) 52 | return english_to_swedish_model 53 | 54 | 55 | def load_romance(): 56 | english_to_romance_model = Seq2SeqModel( 57 | encoder_decoder_type="marian", 58 | encoder_decoder_name="Helsinki-NLP/opus-mt-en-roa", 59 | use_cuda=use_cuda, 60 | args=model_args, 61 | ) 62 | return english_to_romance_model 63 | -------------------------------------------------------------------------------- /examples/t5/mt5_translation/readme.md: -------------------------------------------------------------------------------- 1 | Code for the Medium article [How to Train an mT5 Model for Translation With Simple Transformers](https://chaturangarajapakshe.medium.com/how-to-train-an-mt5-model-for-translation-with-simple-transformers-30ba5fa66c5f?sk=31d91f9639bc5876ae465c340e1a026f) -------------------------------------------------------------------------------- /examples/t5/mt5_translation/test.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sacrebleu 3 | import pandas as pd 4 | from simpletransformers.t5 import T5Model, T5Args 5 | 6 | 7 | logging.basicConfig(level=logging.INFO) 8 | transformers_logger = logging.getLogger("transformers") 9 | transformers_logger.setLevel(logging.WARNING) 10 | 11 | model_args = T5Args() 12 | model_args.max_length = 512 13 | model_args.length_penalty = 1 14 | model_args.num_beams = 10 15 | 16 | model = T5Model("mt5", "outputs_base", args=model_args) 17 | 18 | eval_df = pd.read_csv("data/eval.tsv", sep="\t").astype(str) 19 | 20 | sinhala_truth = [ 21 | eval_df.loc[eval_df["prefix"] == "translate english to sinhala"][ 22 | "target_text" 23 | ].tolist() 24 | ] 25 | to_sinhala = eval_df.loc[eval_df["prefix"] == "translate english to sinhala"][ 26 | "input_text" 27 | ].tolist() 28 | 29 | english_truth = [ 30 | eval_df.loc[eval_df["prefix"] == "translate sinhala to english"][ 31 | "target_text" 32 | ].tolist() 33 | ] 34 | to_english = eval_df.loc[eval_df["prefix"] == "translate sinhala to english"][ 35 | "input_text" 36 | ].tolist() 37 | 38 | # Predict 39 | sinhala_preds = model.predict(to_sinhala) 40 | 41 | eng_sin_bleu = sacrebleu.corpus_bleu(sinhala_preds, sinhala_truth) 42 | print("--------------------------") 43 | print("English to Sinhalese: ", eng_sin_bleu.score) 44 | 45 | english_preds = model.predict(to_english) 46 | 47 | sin_eng_bleu = sacrebleu.corpus_bleu(english_preds, english_truth) 48 | print("Sinhalese to English: ", sin_eng_bleu.score) 49 | -------------------------------------------------------------------------------- /examples/t5/mt5_translation/train.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pandas as pd 3 | from simpletransformers.t5 import T5Model, T5Args 4 | 5 | 6 | logging.basicConfig(level=logging.INFO) 7 | transformers_logger = logging.getLogger("transformers") 8 | transformers_logger.setLevel(logging.WARNING) 9 | 10 | train_df = pd.read_csv("data/train.tsv", sep="\t").astype(str) 11 | eval_df = pd.read_csv("data/eval.tsv", sep="\t").astype(str) 12 | 13 | train_df["prefix"] = "" 14 | eval_df["prefix"] = "" 15 | 16 | model_args = T5Args() 17 | model_args.max_seq_length = 96 18 | model_args.train_batch_size = 20 19 | model_args.eval_batch_size = 20 20 | model_args.num_train_epochs = 1 21 | model_args.evaluate_during_training = True 22 | model_args.evaluate_during_training_steps = 30000 23 | model_args.use_multiprocessing = False 24 | model_args.fp16 = False 25 | model_args.save_steps = -1 26 | model_args.save_eval_checkpoints = False 27 | model_args.no_cache = True 28 | model_args.reprocess_input_data = True 29 | model_args.overwrite_output_dir = True 30 | model_args.preprocess_inputs = False 31 | model_args.num_return_sequences = 1 32 | model_args.wandb_project = "MT5 Sinhala-English Translation" 33 | 34 | model = T5Model("mt5", "google/mt5-base", args=model_args) 35 | 36 | # Train the model 37 | model.train_model(train_df, eval_data=eval_df) 38 | 39 | # Optional: Evaluate the model. We'll test it properly anyway. 40 | results = model.eval_model(eval_df, verbose=True) 41 | -------------------------------------------------------------------------------- /examples/t5/training_on_a_new_task/data_prep.py: -------------------------------------------------------------------------------- 1 | import gzip 2 | import os 3 | 4 | import pandas as pd 5 | from sklearn.model_selection import train_test_split 6 | from tqdm.auto import tqdm 7 | 8 | 9 | def parse(path): 10 | g = gzip.open(path, "rb") 11 | for l in g: 12 | yield eval(l) 13 | 14 | 15 | def getDF(path): 16 | i = 0 17 | df = {} 18 | for d in parse(path): 19 | df[i] = d 20 | i += 1 21 | 22 | return pd.DataFrame.from_dict(df, orient="index") 23 | 24 | 25 | categories = [ 26 | category[3:] 27 | for category in os.listdir("data") 28 | if category.endswith(".gz") and category.startswith("qa") 29 | ] 30 | 31 | for category in tqdm(categories): 32 | if not os.path.isfile(f"data/{category.split('.')[0]}.tsv"): 33 | try: 34 | df1 = getDF(f"data/qa_{category}") 35 | df2 = getDF(f"data/meta_{category}") 36 | 37 | df = pd.merge(df1, df2, on="asin", how="left") 38 | df = df[["question", "answer", "description"]] 39 | df = df.dropna() 40 | df = df.drop_duplicates(subset="answer") 41 | print(df.head()) 42 | 43 | df.to_csv(f"data/{category.split('.')[0]}.tsv", "\t") 44 | except: 45 | pass 46 | 47 | df = pd.concat( 48 | ( 49 | pd.read_csv(f"data/{f}", sep="\t") 50 | for f in os.listdir("data") 51 | if f.endswith(".tsv") 52 | ) 53 | ) 54 | df = df[["question", "description"]] 55 | df["description"] = df["description"].apply(lambda x: x[2:-2]) 56 | df.columns = ["target_text", "input_text"] 57 | df["prefix"] = "ask_question" 58 | 59 | df.to_csv(f"data/data_all.tsv", "\t") 60 | 61 | train_df, eval_df = train_test_split(df, test_size=0.05) 62 | 63 | train_df.to_csv("data/train_df.tsv", "\t") 64 | eval_df.to_csv("data/eval_df.tsv", "\t") 65 | -------------------------------------------------------------------------------- /examples/t5/training_on_a_new_task/predict.py: -------------------------------------------------------------------------------- 1 | from simpletransformers.t5 import T5Model 2 | 3 | model_args = { 4 | "reprocess_input_data": True, 5 | "overwrite_output_dir": True, 6 | "max_seq_length": 128, 7 | "eval_batch_size": 16, 8 | "num_train_epochs": 1, 9 | "save_eval_checkpoints": False, 10 | "use_multiprocessing": False, 11 | # "silent": True, 12 | "num_beams": None, 13 | "do_sample": True, 14 | "max_length": 50, 15 | "top_k": 50, 16 | "top_p": 0.95, 17 | "num_return_sequences": 3, 18 | } 19 | 20 | model = T5Model("t5", "outputs/best_model", args=model_args) 21 | 22 | query = ( 23 | "ask_question: " 24 | + """ANTIQUE CAST METAL 3 GLOBE CANDLABRA JADITE LAMP. 25 | 26 | Stunning antique lamp with three candle style globes. Cast metal base with jadite green glass insert. Has been rewired with new braided cord. In excellent condition with only one chip (as pictured) on the edge of the glass insert. E9 69 on underside of metal base. Missing finial. New low wattage globes. 27 | """ 28 | ) 29 | 30 | preds = model.predict([query]) 31 | 32 | print(preds) 33 | -------------------------------------------------------------------------------- /examples/t5/training_on_a_new_task/test.py: -------------------------------------------------------------------------------- 1 | from pprint import pprint 2 | 3 | import pandas as pd 4 | 5 | from simpletransformers.t5 import T5Model 6 | 7 | model_args = { 8 | "reprocess_input_data": True, 9 | "overwrite_output_dir": True, 10 | "max_seq_length": 128, 11 | "eval_batch_size": 128, 12 | "num_train_epochs": 1, 13 | "save_eval_checkpoints": False, 14 | "use_multiprocessing": False, 15 | "num_beams": None, 16 | "do_sample": True, 17 | "max_length": 50, 18 | "top_k": 50, 19 | "top_p": 0.95, 20 | "num_return_sequences": 3, 21 | } 22 | 23 | model = T5Model("test_outputs_large/best_model", args=model_args) 24 | 25 | df = pd.read_csv("data/eval_df.tsv", sep="\t").astype(str) 26 | preds = model.predict( 27 | ["ask_question: " + description for description in df["input_text"].tolist()] 28 | ) 29 | 30 | questions = df["target_text"].tolist() 31 | 32 | with open("test_outputs_large/generated_questions.txt", "w") as f: 33 | for i, desc in enumerate(df["input_text"].tolist()): 34 | pprint(desc) 35 | pprint(preds[i]) 36 | print() 37 | 38 | f.write(str(desc) + "\n\n") 39 | 40 | f.write("Real question:\n") 41 | f.write(questions[i] + "\n\n") 42 | 43 | f.write("Generated questions:\n") 44 | for pred in preds[i]: 45 | f.write(str(pred) + "\n") 46 | f.write( 47 | "________________________________________________________________________________\n" 48 | ) 49 | -------------------------------------------------------------------------------- /examples/t5/training_on_a_new_task/train.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | from simpletransformers.t5 import T5Model 4 | 5 | train_df = pd.read_csv("data/train_df.tsv", sep="\t").astype(str) 6 | eval_df = pd.read_csv("data/eval_df.tsv", sep="\t").astype(str) 7 | 8 | model_args = { 9 | "reprocess_input_data": True, 10 | "overwrite_output_dir": True, 11 | "max_seq_length": 128, 12 | "train_batch_size": 8, 13 | "num_train_epochs": 1, 14 | "save_eval_checkpoints": True, 15 | "save_steps": -1, 16 | "use_multiprocessing": False, 17 | # "silent": True, 18 | "evaluate_during_training": True, 19 | "evaluate_during_training_steps": 15000, 20 | "evaluate_during_training_verbose": True, 21 | "fp16": False, 22 | "wandb_project": "Question Generation with T5", 23 | } 24 | 25 | model = T5Model("t5", "t5-large", args=model_args) 26 | 27 | model.train_model(train_df, eval_data=eval_df) 28 | -------------------------------------------------------------------------------- /examples/text_classification/binary_classification.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | from simpletransformers.classification import ClassificationModel 4 | 5 | # Train and Evaluation data needs to be in a Pandas Dataframe of two columns. The first column is the text with type str, and the second column is the label with type int. 6 | train_data = [ 7 | ["Example sentence belonging to class 1", 1], 8 | ["Example sentence belonging to class 0", 0], 9 | ] 10 | train_df = pd.DataFrame(train_data) 11 | 12 | eval_data = [ 13 | ["Example eval sentence belonging to class 1", 1], 14 | ["Example eval sentence belonging to class 0", 0], 15 | ] 16 | eval_df = pd.DataFrame(eval_data) 17 | 18 | # Create a ClassificationModel 19 | model = ClassificationModel("roberta", "roberta-base") 20 | 21 | # Train the model 22 | model.train_model(train_df) 23 | 24 | # Evaluate the model 25 | result, model_outputs, wrong_predictions = model.eval_model(eval_df) 26 | -------------------------------------------------------------------------------- /examples/text_classification/lazy_loading_regression.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pandas as pd 4 | 5 | from simpletransformers.classification import ClassificationModel 6 | 7 | train_data = [ 8 | ["Example sentence belonging to class 1", "Yep, this is 1", 0.8], 9 | ["Example sentence belonging to class 0", "Yep, this is 0", 0.2], 10 | [ 11 | "This is an entirely different phrase altogether and should be treated so.", 12 | "Is this being picked up?", 13 | 1000.5, 14 | ], 15 | ] 16 | 17 | train_df = pd.DataFrame(train_data, columns=["text_a", "text_b", "labels"]) 18 | 19 | eval_data = [ 20 | ["Example sentence belonging to class 1", "Yep, this is 1", 1.9], 21 | ["Example sentence belonging to class 0", "Yep, this is 0", 0.1], 22 | ["Example 2 sentence belonging to class 0", "Yep, this is 0", 5], 23 | ] 24 | 25 | eval_df = pd.DataFrame(eval_data, columns=["text_a", "text_b", "labels"]) 26 | 27 | os.makedirs("data", exist_ok=True) 28 | 29 | train_df.to_csv("data/regression_train.tsv", sep="\t", index=False) 30 | eval_df.to_csv("data/regression_eval.tsv", sep="\t", index=False) 31 | 32 | train_args = { 33 | "reprocess_input_data": True, 34 | "overwrite_output_dir": True, 35 | "lazy_text_a_column": 0, 36 | "lazy_text_b_column": 1, 37 | "lazy_labels_column": 2, 38 | "lazy_header_row": True, 39 | "regression": True, 40 | "lazy_loading": True, 41 | } 42 | 43 | # Create a TransformerModel 44 | model = ClassificationModel("bert", "bert-base-cased", num_labels=1, args=train_args) 45 | # print(train_df.head()) 46 | 47 | # Train the model 48 | model.train_model("data/regression_train.tsv") 49 | 50 | # # # Evaluate the model 51 | result, model_outputs, wrong_predictions = model.eval_model("data/regression_eval.tsv") 52 | 53 | print(result) 54 | 55 | preds, out = model.predict([["Test sentence", "Other sentence"]]) 56 | 57 | print(preds) 58 | -------------------------------------------------------------------------------- /examples/text_classification/multiclass_classification.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | from simpletransformers.classification import ClassificationModel 4 | 5 | # Train and Evaluation data needs to be in a Pandas Dataframe containing at least two columns. If the Dataframe has a header, it should contain a 'text' and a 'labels' column. If no header is present, the Dataframe should contain at least two columns, with the first column is the text with type str, and the second column in the label with type int. 6 | train_data = [ 7 | ["Example sentence belonging to class 1", 1], 8 | ["Example sentence belonging to class 0", 0], 9 | ["Example eval senntence belonging to class 2", 2], 10 | ] 11 | train_df = pd.DataFrame(train_data) 12 | 13 | eval_data = [ 14 | ["Example eval sentence belonging to class 1", 1], 15 | ["Example eval sentence belonging to class 0", 0], 16 | ["Example eval senntence belonging to class 2", 2], 17 | ] 18 | eval_df = pd.DataFrame(eval_data) 19 | 20 | # Create a ClassificationModel 21 | model = ClassificationModel( 22 | "bert", 23 | "bert-base-cased", 24 | num_labels=3, 25 | args={"reprocess_input_data": True, "overwrite_output_dir": True}, 26 | ) 27 | 28 | # Train the model 29 | model.train_model(train_df) 30 | 31 | # Evaluate the model 32 | result, model_outputs, wrong_predictions = model.eval_model(eval_df) 33 | 34 | predictions, raw_outputs = model.predict(["Some arbitary sentence"]) 35 | -------------------------------------------------------------------------------- /examples/text_classification/multilabel_classification.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | from simpletransformers.classification import MultiLabelClassificationModel 4 | 5 | # Train and Evaluation data needs to be in a Pandas Dataframe containing at least two columns, a 'text' and a 'labels' column. The `labels` column should contain multi-hot encoded lists. 6 | train_data = [ 7 | ["Example sentence 1 for multilabel classification.", [1, 1, 1, 1, 0, 1]] 8 | ] + [["This is another example sentence. ", [0, 1, 1, 0, 0, 0]]] 9 | train_df = pd.DataFrame(train_data, columns=["text", "labels"]) 10 | 11 | eval_data = [ 12 | ["Example eval sentence for multilabel classification.", [1, 1, 1, 1, 0, 1]], 13 | ["Example eval senntence belonging to class 2", [0, 1, 1, 0, 0, 0]], 14 | ] 15 | eval_df = pd.DataFrame(eval_data) 16 | 17 | # Create a MultiLabelClassificationModel 18 | model = MultiLabelClassificationModel( 19 | "roberta", 20 | "roberta-base", 21 | num_labels=6, 22 | args={ 23 | "reprocess_input_data": True, 24 | "overwrite_output_dir": True, 25 | "num_train_epochs": 5, 26 | }, 27 | ) 28 | 29 | # You can set class weights by using the optional weight argument 30 | print(train_df.head()) 31 | 32 | # Train the model 33 | model.train_model(train_df) 34 | 35 | # Evaluate the model 36 | result, model_outputs, wrong_predictions = model.eval_model(eval_df) 37 | print(result) 38 | print(model_outputs) 39 | 40 | predictions, raw_outputs = model.predict( 41 | ["This thing is entirely different from the other thing. "] 42 | ) 43 | print(predictions) 44 | print(raw_outputs) 45 | -------------------------------------------------------------------------------- /examples/text_classification/yelp_reviews_polarity/data_download.sh: -------------------------------------------------------------------------------- 1 | mkdir data 2 | wget https://s3.amazonaws.com/fast-ai-nlp/yelp_review_polarity_csv.tgz -O data/data.tgz 3 | tar -xvzf data/data.tgz -C data/ 4 | mv data/yelp_review_polarity_csv/* data/ 5 | rm -r data/yelp_review_polarity_csv/ 6 | rm data/data.tgz -------------------------------------------------------------------------------- /examples/text_classification/yelp_reviews_polarity/run_trainers.sh: -------------------------------------------------------------------------------- 1 | rm -r cache_dir 2 | python train.py electra-small 3 | 4 | rm -r cache_dir 5 | python train.py electra-base 6 | 7 | rm -r cache_dir 8 | python train.py bert 9 | 10 | rm -r cache_dir 11 | python train.py roberta 12 | 13 | rm -r cache_dir 14 | python train.py distilbert 15 | 16 | rm -r cache_dir 17 | python train.py distilroberta 18 | 19 | rm -r cache_dir 20 | python train.py xlnet 21 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | # test 2 | pytest-cov==2.7.1 3 | pytest-localserver==0.5.0 4 | pytest==7.1.2 5 | 6 | # lint/format/types 7 | black==22.3.0 8 | flake8==3.7.8 9 | pytype==2019.7.11 10 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [tool:pytest] 2 | python_functions=test_ 3 | 4 | codestyle_max_line_length = 119 5 | 6 | log_cli = true 7 | log_cli_level = WARNING 8 | 9 | [metadata] 10 | description-file = README.md 11 | license_file = LICENSE 12 | 13 | [pycodestyle] 14 | max-line-length = 119 15 | 16 | [flake8] 17 | max-line-length = 119 18 | ignore = E203 , W503, F401 19 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import find_packages, setup 2 | 3 | with open("README.md", "r") as fh: 4 | long_description = fh.read() 5 | 6 | setup( 7 | name="simpletransformers", 8 | version="0.70.1", 9 | author="Thilina Rajapakse", 10 | author_email="chaturangarajapakshe@gmail.com", 11 | description="An easy-to-use wrapper library for the Transformers library.", 12 | long_description=long_description, 13 | long_description_content_type="text/markdown", 14 | url="https://github.com/ThilinaRajapakse/simpletransformers/", 15 | packages=find_packages(), 16 | scripts=["bin/simple-viewer"], 17 | classifiers=[ 18 | "Intended Audience :: Science/Research", 19 | "License :: OSI Approved :: Apache Software License", 20 | "Programming Language :: Python :: 3", 21 | "Topic :: Scientific/Engineering :: Artificial Intelligence", 22 | ], 23 | python_requires=">=3.6", 24 | install_requires=[ 25 | "numpy", 26 | "requests", 27 | "tqdm>=4.47.0", 28 | "regex", 29 | "transformers>=4.31.0", 30 | "datasets", 31 | "scipy", 32 | "scikit-learn", 33 | "seqeval", 34 | "tensorboard", 35 | "tensorboardx", 36 | "pandas", 37 | "tokenizers", 38 | "wandb>=0.10.32", 39 | "streamlit", 40 | "sentencepiece", 41 | ], 42 | ) 43 | -------------------------------------------------------------------------------- /simpletransformers/__init__.py: -------------------------------------------------------------------------------- 1 | name = "simpletransformers" 2 | -------------------------------------------------------------------------------- /simpletransformers/classification/__init__.py: -------------------------------------------------------------------------------- 1 | from simpletransformers.classification.classification_model import ClassificationModel 2 | from simpletransformers.classification.multi_label_classification_model import ( 3 | MultiLabelClassificationModel, 4 | ) 5 | from simpletransformers.classification.multi_modal_classification_model import ( 6 | MultiModalClassificationModel, 7 | ) 8 | from simpletransformers.config.model_args import ( 9 | ClassificationArgs, 10 | MultiLabelClassificationArgs, 11 | MultiModalClassificationArgs, 12 | ) 13 | -------------------------------------------------------------------------------- /simpletransformers/classification/transformer_models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/simpletransformers/classification/transformer_models/__init__.py -------------------------------------------------------------------------------- /simpletransformers/classification/transformer_models/xlm_roberta_model.py: -------------------------------------------------------------------------------- 1 | from transformers.models.xlm_roberta.configuration_xlm_roberta import XLMRobertaConfig 2 | from transformers.models.xlm_roberta.modeling_xlm_roberta import ( 3 | XLM_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST, 4 | ) 5 | 6 | from simpletransformers.classification.transformer_models.roberta_model import ( 7 | RobertaForSequenceClassification, 8 | ) 9 | 10 | 11 | class XLMRobertaForSequenceClassification(RobertaForSequenceClassification): 12 | config_class = XLMRobertaConfig 13 | pretrained_model_archive_map = XLM_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST 14 | -------------------------------------------------------------------------------- /simpletransformers/config/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/simpletransformers/config/__init__.py -------------------------------------------------------------------------------- /simpletransformers/config/global_args.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from multiprocessing import cpu_count 3 | 4 | global_args = { 5 | "adam_epsilon": 1e-8, 6 | "best_model_dir": "outputs/best_model", 7 | "cache_dir": "cache_dir/", 8 | "config": {}, 9 | "do_lower_case": False, 10 | "early_stopping_consider_epochs": False, 11 | "early_stopping_delta": 0, 12 | "early_stopping_metric": "eval_loss", 13 | "early_stopping_metric_minimize": True, 14 | "early_stopping_patience": 3, 15 | "encoding": None, 16 | "eval_batch_size": 8, 17 | "evaluate_during_training": False, 18 | "evaluate_during_training_silent": True, 19 | "evaluate_during_training_steps": 2000, 20 | "evaluate_during_training_verbose": False, 21 | "fp16": True, 22 | "gradient_accumulation_steps": 1, 23 | "learning_rate": 4e-5, 24 | "local_rank": -1, 25 | "logging_steps": 50, 26 | "manual_seed": None, 27 | "max_grad_norm": 1.0, 28 | "max_seq_length": 128, 29 | "multiprocessing_chunksize": 500, 30 | "n_gpu": 1, 31 | "no_cache": False, 32 | "no_save": False, 33 | "num_train_epochs": 1, 34 | "output_dir": "outputs/", 35 | "overwrite_output_dir": False, 36 | "process_count": cpu_count() - 2 if cpu_count() > 2 else 1, 37 | "reprocess_input_data": True, 38 | "save_best_model": True, 39 | "save_eval_checkpoints": True, 40 | "save_model_every_epoch": True, 41 | "save_steps": 2000, 42 | "save_optimizer_and_scheduler": True, 43 | "silent": False, 44 | "tensorboard_dir": None, 45 | "train_batch_size": 8, 46 | "use_cached_eval_features": False, 47 | "use_early_stopping": False, 48 | "use_multiprocessing": True, 49 | "wandb_kwargs": {}, 50 | "wandb_project": None, 51 | "warmup_ratio": 0.06, 52 | "warmup_steps": 0, 53 | "weight_decay": 0, 54 | } 55 | 56 | if sys.platform == "win32": 57 | global_args["process_count"] = min(global_args["process_count"], 61) 58 | -------------------------------------------------------------------------------- /simpletransformers/config/utils.py: -------------------------------------------------------------------------------- 1 | def sweep_config_to_sweep_values(sweep_config): 2 | """ 3 | Converts an instance of wandb.Config to plain values map. 4 | 5 | wandb.Config varies across versions quite significantly, 6 | so we use the `keys` method that works consistently. 7 | """ 8 | 9 | return {key: sweep_config[key] for key in sweep_config.keys()} 10 | -------------------------------------------------------------------------------- /simpletransformers/conv_ai/__init__.py: -------------------------------------------------------------------------------- 1 | from simpletransformers.config.model_args import ConvAIArgs 2 | from simpletransformers.conv_ai.conv_ai_model import ConvAIModel 3 | -------------------------------------------------------------------------------- /simpletransformers/custom_models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/simpletransformers/custom_models/__init__.py -------------------------------------------------------------------------------- /simpletransformers/custom_models/retrieval_autoencoder.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | 4 | 5 | class Autoencoder(nn.Module): 6 | def __init__(self, max_position_embeddings=256, hidden_size=768): 7 | super(Autoencoder, self).__init__() 8 | self.vector_count = max_position_embeddings 9 | self.hidden_size = hidden_size 10 | # Encoder 11 | self.encoder = nn.Sequential( 12 | nn.Linear(self.vector_count * hidden_size, 2048), 13 | nn.ReLU(True), 14 | nn.Linear(2048, hidden_size), 15 | ) 16 | # Decoder 17 | self.decoder = nn.Sequential( 18 | nn.Linear(hidden_size, 2048), 19 | nn.ReLU(True), 20 | nn.Linear(2048, self.vector_count * hidden_size), 21 | ) 22 | 23 | self.init_weights() 24 | 25 | def forward(self, x): 26 | x = x.view(x.size(0), -1) 27 | 28 | encoded_x = self.encoder(x) 29 | decoded_x = self.decoder(encoded_x) 30 | 31 | decoded_x = decoded_x.view(x.size(0), self.vector_count, self.hidden_size) 32 | 33 | return encoded_x, decoded_x 34 | 35 | def encode(self, x): 36 | x = x.view(x.size(0), -1) 37 | encoded_x = self.encoder(x) 38 | return encoded_x 39 | 40 | def decode(self, x): 41 | decoded_x = self.decoder(x) 42 | decoded_x = decoded_x.view(x.size(0), self.vector_count, self.hidden_size) 43 | return decoded_x 44 | 45 | def init_weights(self): 46 | for module in self.modules(): 47 | if isinstance(module, nn.Linear): 48 | module.weight.data.normal_(mean=0.0, std=0.02) 49 | if module.bias is not None: 50 | module.bias.data.zero_() 51 | elif isinstance(module, nn.Embedding): 52 | module.weight.data.normal_(mean=0.0, std=0.02) 53 | if module.padding_idx is not None: 54 | module.weight.data[module.padding_idx].zero_() 55 | elif isinstance(module, nn.LayerNorm): 56 | module.bias.data.zero_() 57 | module.weight.data.fill_(1.0) 58 | -------------------------------------------------------------------------------- /simpletransformers/experimental/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/simpletransformers/experimental/__init__.py -------------------------------------------------------------------------------- /simpletransformers/experimental/classification/__init__.py: -------------------------------------------------------------------------------- 1 | from simpletransformers.experimental.classification.classification_model import ( 2 | ClassificationModel, 3 | ) 4 | from simpletransformers.experimental.classification.multi_label_classification_model import ( 5 | MultiLabelClassificationModel, 6 | ) 7 | -------------------------------------------------------------------------------- /simpletransformers/experimental/classification/transformer_models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/simpletransformers/experimental/classification/transformer_models/__init__.py -------------------------------------------------------------------------------- /simpletransformers/language_generation/__init__.py: -------------------------------------------------------------------------------- 1 | from simpletransformers.config.model_args import LanguageGenerationArgs 2 | from simpletransformers.language_generation.language_generation_model import ( 3 | LanguageGenerationModel, 4 | ) 5 | -------------------------------------------------------------------------------- /simpletransformers/language_modeling/__init__.py: -------------------------------------------------------------------------------- 1 | from simpletransformers.config.model_args import LanguageModelingArgs, GenerationArgs 2 | from simpletransformers.language_modeling.language_modeling_model import ( 3 | LanguageModelingModel, 4 | ) 5 | -------------------------------------------------------------------------------- /simpletransformers/language_representation/__init__.py: -------------------------------------------------------------------------------- 1 | from simpletransformers.language_representation.representation_model import ( 2 | RepresentationModel, 3 | ) 4 | -------------------------------------------------------------------------------- /simpletransformers/language_representation/transformer_models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/simpletransformers/language_representation/transformer_models/__init__.py -------------------------------------------------------------------------------- /simpletransformers/language_representation/transformer_models/bert_model.py: -------------------------------------------------------------------------------- 1 | from transformers.models.bert.modeling_bert import BertModel, BertPreTrainedModel 2 | 3 | 4 | # supports both BERT & ROBERTA BASED MODELS 5 | class BertForTextRepresentation(BertPreTrainedModel): 6 | r""" 7 | Outputs: `List` of token vectors, 1 list of max_seq token vectors per sentence given 8 | """ # noqa: ignore flake8" 9 | 10 | def __init__(self, config, weight=None): 11 | super(BertForTextRepresentation, self).__init__(config) 12 | self.bert = BertModel(config) 13 | self.weight = weight 14 | self.init_weights() 15 | 16 | def forward( 17 | self, 18 | input_ids=None, 19 | attention_mask=None, 20 | token_type_ids=None, 21 | position_ids=None, 22 | head_mask=None, 23 | ): 24 | outputs = self.bert( 25 | input_ids, 26 | attention_mask=attention_mask, 27 | token_type_ids=token_type_ids, 28 | position_ids=position_ids, 29 | head_mask=head_mask, 30 | output_hidden_states=True, 31 | ) 32 | hidden_states = outputs[2] 33 | return hidden_states[-1] 34 | -------------------------------------------------------------------------------- /simpletransformers/language_representation/transformer_models/gpt2_model.py: -------------------------------------------------------------------------------- 1 | from transformers.models.gpt2.modeling_gpt2 import GPT2Model, GPT2PreTrainedModel 2 | 3 | 4 | # supports both BERT & ROBERTA BASED MODELS 5 | class GPT2ForTextRepresentation(GPT2PreTrainedModel): 6 | r""" 7 | Outputs: `List` of token vectors, 1 list of max_seq token vectors per sentence given 8 | """ # noqa: ignore flake8" 9 | 10 | def __init__(self, config, weight=None): 11 | super(GPT2ForTextRepresentation, self).__init__(config) 12 | self.gpt2 = GPT2Model(config) 13 | self.weight = weight 14 | self.init_weights() 15 | 16 | def resize_token_embeddings(self, new_len): 17 | return self.gpt2.resize_token_embeddings(new_len) 18 | 19 | def forward( 20 | self, 21 | input_ids=None, 22 | attention_mask=None, 23 | token_type_ids=None, 24 | position_ids=None, 25 | head_mask=None, 26 | ): 27 | outputs = self.gpt2( 28 | input_ids, 29 | attention_mask=attention_mask, 30 | token_type_ids=token_type_ids, 31 | position_ids=position_ids, 32 | head_mask=head_mask, 33 | output_hidden_states=True, 34 | ) 35 | hidden_states = outputs[2] 36 | return hidden_states[-1] 37 | -------------------------------------------------------------------------------- /simpletransformers/losses/__init__.py: -------------------------------------------------------------------------------- 1 | from simpletransformers.losses.focal_loss import FocalLoss 2 | from simpletransformers.losses.dice_loss import DiceLoss 3 | from simpletransformers.losses.tversky_loss import TverskyLoss 4 | -------------------------------------------------------------------------------- /simpletransformers/losses/loss_utils.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import warnings 3 | from torch.nn import CrossEntropyLoss 4 | from simpletransformers.losses import FocalLoss, DiceLoss, TverskyLoss 5 | 6 | 7 | def init_loss(weight, device, args): 8 | if weight and args.loss_type: 9 | warnings.warn( 10 | f"weight and args.loss_type parametters are set at the same time" 11 | f"will use weighted cross entropy loss. To use {args.loss_type} set weight to None" 12 | ) 13 | if weight: 14 | loss_fct = CrossEntropyLoss(weight=torch.Tensor(weight).to(device)) 15 | elif args.loss_type: 16 | if args.loss_type == "focal": 17 | loss_fct = FocalLoss(**args.loss_args) 18 | elif args.loss_type == "dice": 19 | loss_fct = DiceLoss(**args.loss_args) 20 | elif args.loss_type == "tversky": 21 | loss_fct = TverskyLoss(**args.loss_args) 22 | else: 23 | raise NotImplementedError(f"unknown {args.loss_type} loss function") 24 | else: 25 | loss_fct = None 26 | 27 | return loss_fct 28 | -------------------------------------------------------------------------------- /simpletransformers/model.py: -------------------------------------------------------------------------------- 1 | from simpletransformers.classification.classification_model import ClassificationModel 2 | 3 | TransformerModel = ClassificationModel 4 | -------------------------------------------------------------------------------- /simpletransformers/ner/__init__.py: -------------------------------------------------------------------------------- 1 | from simpletransformers.config.model_args import NERArgs 2 | from simpletransformers.ner.ner_model import NERModel 3 | -------------------------------------------------------------------------------- /simpletransformers/pretrain_retrieval/__init__.py: -------------------------------------------------------------------------------- 1 | from simpletransformers.config.model_args import RetrievalArgs 2 | from simpletransformers.pretrain_retrieval.pretrain_retrieval_model import ( 3 | PretrainRetrievalModel, 4 | ) 5 | -------------------------------------------------------------------------------- /simpletransformers/question_answering/__init__.py: -------------------------------------------------------------------------------- 1 | from simpletransformers.config.model_args import QuestionAnsweringArgs 2 | from simpletransformers.question_answering.question_answering_model import ( 3 | QuestionAnsweringModel, 4 | ) 5 | -------------------------------------------------------------------------------- /simpletransformers/question_answering/qa_dataset_loading_script/qa_dataset_loading_script.py.lock: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/simpletransformers/question_answering/qa_dataset_loading_script/qa_dataset_loading_script.py.lock -------------------------------------------------------------------------------- /simpletransformers/retrieval/__init__.py: -------------------------------------------------------------------------------- 1 | from simpletransformers.config.model_args import RetrievalArgs 2 | from simpletransformers.retrieval.retrieval_model import RetrievalModel 3 | -------------------------------------------------------------------------------- /simpletransformers/retrieval/retrieval_dataset_loading_script/retrieval_dataset_loading_script.py.lock: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/simpletransformers/retrieval/retrieval_dataset_loading_script/retrieval_dataset_loading_script.py.lock -------------------------------------------------------------------------------- /simpletransformers/seq2seq/__init__.py: -------------------------------------------------------------------------------- 1 | from simpletransformers.config.model_args import Seq2SeqArgs 2 | from simpletransformers.seq2seq.seq2seq_model import Seq2SeqModel 3 | -------------------------------------------------------------------------------- /simpletransformers/streamlit/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThilinaRajapakse/simpletransformers/eb29aa241cf89a1e5f11ba76e73a695fb831932f/simpletransformers/streamlit/__init__.py -------------------------------------------------------------------------------- /simpletransformers/t5/__init__.py: -------------------------------------------------------------------------------- 1 | from simpletransformers.config.model_args import T5Args 2 | from simpletransformers.t5.t5_model import T5Model 3 | -------------------------------------------------------------------------------- /tests/test_language_representation.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from simpletransformers.language_representation import RepresentationModel 4 | 5 | 6 | @pytest.mark.parametrize( 7 | "model_type, model_name", 8 | [ 9 | ("bert", "bert-base-uncased"), 10 | ("roberta", "roberta-base"), 11 | ("gpt2", "distilgpt2"), 12 | ], 13 | ) 14 | @pytest.mark.parametrize("combine_strategy", ["mean", "concat", None]) 15 | def test_shapes(model_type, model_name, combine_strategy): 16 | sentence_list = ["Example sentence 1", "Example sentence 2"] 17 | # Create a ClassificationModel 18 | model = RepresentationModel( 19 | model_type, 20 | model_name, 21 | use_cuda=False, 22 | args={ 23 | "no_save": True, 24 | "reprocess_input_data": True, 25 | "overwrite_output_dir": True, 26 | }, 27 | ) 28 | encoded_sentences = model.encode_sentences( 29 | sentence_list, combine_strategy=combine_strategy 30 | ) 31 | longest_seq = ( 32 | 3 # RepresentationModel truncates sentences to the longest sentence in the list 33 | ) 34 | if model_type == "bert" or model_type == "roberta": 35 | longest_seq += 2 # add [CLS] & [SEP] tokens added by BERT & ROBERTA Models 36 | # last dimention is the embedding dimension, it depends on the model 37 | if combine_strategy == None: 38 | assert encoded_sentences.shape == (len(sentence_list), longest_seq, 768) 39 | if combine_strategy == "concat": 40 | assert encoded_sentences.shape == (len(sentence_list), longest_seq * 768) 41 | if combine_strategy == "mean": 42 | assert encoded_sentences.shape == (len(sentence_list), 768) 43 | -------------------------------------------------------------------------------- /tests/test_seq2seq.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pandas as pd 4 | import pytest 5 | 6 | from simpletransformers.seq2seq import Seq2SeqArgs, Seq2SeqModel 7 | 8 | 9 | @pytest.mark.parametrize( 10 | "encoder_decoder_type, encoder_decoder_name, encoder_type, use_hf_datasets", 11 | [ 12 | ("bart", "facebook/bart-base", "bart", True), 13 | ("bart", "facebook/bart-base", "bart", False), 14 | ("roberta-base", "bert-base-cased", "roberta", True), 15 | ("roberta-base", "bert-base-cased", "roberta", False), 16 | ], 17 | ) 18 | def test_seq2seq( 19 | encoder_decoder_type, encoder_decoder_name, encoder_type, use_hf_datasets 20 | ): 21 | train_data = [ 22 | ["one", "1"], 23 | ["two", "2"], 24 | ] 25 | 26 | train_df = pd.DataFrame(train_data, columns=["input_text", "target_text"]) 27 | 28 | eval_data = [ 29 | ["three", "3"], 30 | ["four", "4"], 31 | ] 32 | 33 | eval_df = pd.DataFrame(eval_data, columns=["input_text", "target_text"]) 34 | 35 | model_args = { 36 | "reprocess_input_data": True, 37 | "overwrite_output_dir": True, 38 | "max_seq_length": 128, 39 | "train_batch_size": 2, 40 | "num_train_epochs": 2, 41 | "use_multiprocessing": False, 42 | "max_length": 15, 43 | "manual_seed": 4, 44 | "do_sample": False, 45 | "num_return_sequences": 1, 46 | "use_hf_datasets": use_hf_datasets, 47 | } 48 | 49 | if encoder_type == "bart": 50 | model = Seq2SeqModel( 51 | encoder_decoder_type=encoder_decoder_type, 52 | encoder_decoder_name=encoder_decoder_name, 53 | args=model_args, 54 | use_cuda=False, 55 | ) 56 | else: 57 | model = Seq2SeqModel( 58 | encoder_type=encoder_type, 59 | encoder_name=encoder_decoder_type, 60 | decoder_name=encoder_decoder_name, 61 | args=model_args, 62 | use_cuda=False, 63 | ) 64 | 65 | model.train_model(train_df) 66 | 67 | model.eval_model(eval_df) 68 | 69 | a = model.predict(["five"])[0] 70 | -------------------------------------------------------------------------------- /tests/test_t5.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import pytest 3 | 4 | from simpletransformers.t5 import T5Model 5 | 6 | 7 | def test_t5(): 8 | train_data = [ 9 | ["convert", "one", "1"], 10 | ["convert", "two", "2"], 11 | ] 12 | 13 | train_df = pd.DataFrame(train_data, columns=["prefix", "input_text", "target_text"]) 14 | 15 | eval_data = [ 16 | ["convert", "three", "3"], 17 | ["convert", "four", "4"], 18 | ] 19 | 20 | eval_df = pd.DataFrame(eval_data, columns=["prefix", "input_text", "target_text"]) 21 | 22 | eval_df = train_df.copy() 23 | 24 | model_args = { 25 | "reprocess_input_data": True, 26 | "overwrite_output_dir": True, 27 | "max_seq_length": 10, 28 | "train_batch_size": 2, 29 | "num_train_epochs": 2, 30 | "save_model_every_epoch": False, 31 | "max_length": 20, 32 | "num_beams": 1, 33 | } 34 | 35 | # Create T5 Model 36 | model = T5Model("t5", "t5-base", args=model_args, use_cuda=False) 37 | 38 | # Train T5 Model on new task 39 | model.train_model(train_df) 40 | 41 | # Evaluate T5 Model on new task 42 | model.eval_model(eval_df) 43 | 44 | # Predict with trained T5 model 45 | model.predict(["convert: four", "convert: five"]) 46 | 47 | # Load test 48 | model = T5Model("t5", "outputs", args=model_args, use_cuda=False) 49 | 50 | # Evaluate T5 Model on new task 51 | model.eval_model(eval_df) 52 | 53 | # Predict with trained T5 model 54 | model.predict(["convert: four", "convert: five"]) 55 | --------------------------------------------------------------------------------