├── LICENSE ├── README.md ├── data ├── x-fact-including-en │ ├── dev.all.tsv │ ├── ood.tsv │ ├── test.all.tsv │ ├── train.all.tsv │ └── zeroshot.tsv └── x-fact │ ├── dev.all.tsv │ ├── label_maps │ ├── aajtak.in.txt │ ├── agi.it.txt │ ├── aosfatos.org.txt │ ├── apublica.org.txt │ ├── cekfakta.com.txt │ ├── cekfakta.tempo.co.txt │ ├── chequeado.com.txt │ ├── correctiv.org.txt │ ├── demagog.org.pl.txt │ ├── dogrulukpayi.com.txt │ ├── en.politifact.com.txt │ ├── factcheck.ge.txt │ ├── factcheck.kz.txt │ ├── factnameh.com.txt │ ├── factual.ro.txt │ ├── faktisk.no.txt │ ├── faktistfakt.com.txt │ ├── faktoje.al.txt │ ├── faktyoxla.info.txt │ ├── fr.africacheck.org.txt │ ├── gujarati.newschecker.in.txt │ ├── hindi.newschecker.in.txt │ ├── istinomer.rs.txt │ ├── kallxo.com.txt │ ├── marathi.newschecker.in.txt │ ├── master_mapping.tsv │ ├── misbar.com.txt │ ├── nieuwscheckers.nl.txt │ ├── pagellapolitica.it.txt │ ├── piaui.folha.uol.com.br.txt │ ├── poligrafo.sapo.pt.txt │ ├── politifact.com.txt │ ├── punjabi.newschecker.in.txt │ ├── srilanka.factcrescendo.com.txt │ ├── teyit.org.txt │ └── youturn.in.txt │ ├── ood.tsv │ ├── test.all.tsv │ ├── train.all.tsv │ └── zeroshot.tsv ├── requirements_tested_py370.txt ├── scripts ├── analyse_data.py ├── calculate_fscore.py ├── format_xfact_to_sst2.py ├── majority_class.py ├── randomize_snippets.py ├── reformat_xfact_for_hf.py ├── remove_labels.py ├── run_all_single_models.sh └── run_commands.txt └── transformers ├── .circleci ├── config.yml └── deploy.sh ├── .coveragerc ├── .github ├── ISSUE_TEMPLATE │ ├── ---new-benchmark.md │ ├── --new-model-addition.md │ ├── bug-report.md │ ├── feature-request.md │ ├── migration.md │ └── question-help.md ├── stale.yml └── workflows │ ├── github-push.yml │ ├── github-torch-hub.yml │ ├── self-push.yml │ └── self-scheduled.yml ├── .gitignore ├── CONTRIBUTING.md ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.md ├── README_extras.md ├── analyse_data.py ├── calculate_fscore.py ├── codecov.yml ├── dataset ├── README.md ├── dev.tsv ├── ood.tsv ├── test.tsv ├── train.tsv └── zeroshot.tsv ├── deploy_multi_version_doc.sh ├── docker ├── transformers-cpu │ └── Dockerfile ├── transformers-gpu │ └── Dockerfile ├── transformers-pytorch-cpu │ └── Dockerfile ├── transformers-pytorch-gpu │ └── Dockerfile ├── transformers-tensorflow-cpu │ └── Dockerfile └── transformers-tensorflow-gpu │ └── Dockerfile ├── docs ├── Makefile ├── README.md └── source │ ├── _static │ ├── css │ │ ├── Calibre-Light.ttf │ │ ├── Calibre-Medium.otf │ │ ├── Calibre-Regular.otf │ │ ├── Calibre-Thin.otf │ │ ├── code-snippets.css │ │ └── huggingface.css │ └── js │ │ ├── custom.js │ │ └── huggingface_logo.svg │ ├── benchmarks.rst │ ├── bertology.rst │ ├── conf.py │ ├── contributing.md │ ├── converting_tensorflow_models.rst │ ├── examples.md │ ├── favicon.ico │ ├── glossary.rst │ ├── imgs │ ├── local_attention_mask.png │ ├── transformers_logo_name.png │ ├── warmup_constant_schedule.png │ ├── warmup_cosine_hard_restarts_schedule.png │ ├── warmup_cosine_schedule.png │ ├── warmup_cosine_warm_restarts_schedule.png │ └── warmup_linear_schedule.png │ ├── index.rst │ ├── installation.md │ ├── main_classes │ ├── configuration.rst │ ├── model.rst │ ├── optimizer_schedules.rst │ ├── pipelines.rst │ ├── processors.rst │ ├── tokenizer.rst │ └── trainer.rst │ ├── migration.md │ ├── model_doc │ ├── albert.rst │ ├── auto.rst │ ├── bart.rst │ ├── bert.rst │ ├── camembert.rst │ ├── ctrl.rst │ ├── dialogpt.rst │ ├── distilbert.rst │ ├── electra.rst │ ├── encoderdecoder.rst │ ├── flaubert.rst │ ├── gpt.rst │ ├── gpt2.rst │ ├── longformer.rst │ ├── marian.rst │ ├── mobilebert.rst │ ├── reformer.rst │ ├── retribert.rst │ ├── roberta.rst │ ├── t5.rst │ ├── transformerxl.rst │ ├── xlm.rst │ ├── xlmroberta.rst │ └── xlnet.rst │ ├── model_sharing.rst │ ├── model_summary.rst │ ├── multilingual.rst │ ├── notebooks.md │ ├── philosophy.rst │ ├── preprocessing.rst │ ├── pretrained_models.rst │ ├── quicktour.rst │ ├── task_summary.rst │ ├── tokenizer_summary.rst │ ├── torchscript.rst │ └── training.rst ├── examples ├── README.md ├── adversarial │ ├── README.md │ ├── run_hans.py │ └── utils_hans.py ├── benchmarking │ ├── plot_csv_file.py │ ├── run_benchmark.py │ └── run_benchmark_tf.py ├── bert-loses-patience │ ├── README.md │ ├── pabee │ │ ├── __init__.py │ │ ├── modeling_pabee_albert.py │ │ └── modeling_pabee_bert.py │ ├── run_glue_with_pabee.py │ └── test_run_glue_with_pabee.py ├── bertology │ └── run_bertology.py ├── contrib │ ├── README.md │ ├── mm-imdb │ │ ├── README.md │ │ ├── run_mmimdb.py │ │ └── utils_mmimdb.py │ ├── run_camembert.py │ ├── run_openai_gpt.py │ ├── run_swag.py │ └── run_transfo_xl.py ├── distillation │ ├── README.md │ ├── distiller.py │ ├── grouped_batch_sampler.py │ ├── lm_seqs_dataset.py │ ├── requirements.txt │ ├── run_squad_w_distillation.py │ ├── scripts │ │ ├── binarized_data.py │ │ ├── extract.py │ │ ├── extract_distilbert.py │ │ └── token_counts.py │ ├── train.py │ ├── training_configs │ │ ├── distilbert-base-cased.json │ │ ├── distilbert-base-multilingual-cased.json │ │ ├── distilbert-base-uncased.json │ │ ├── distilgpt2.json │ │ └── distilroberta-base.json │ └── utils.py ├── language-modeling │ ├── README.md │ └── run_language_modeling.py ├── lightning_base.py ├── longform-qa │ ├── README.md │ ├── eli5_app.py │ └── eli5_utils.py ├── movement-pruning │ ├── README.md │ ├── Saving_PruneBERT.ipynb │ ├── bertarize.py │ ├── counts_parameters.py │ ├── emmental │ │ ├── __init__.py │ │ ├── configuration_bert_masked.py │ │ ├── modeling_bert_masked.py │ │ └── modules │ │ │ ├── __init__.py │ │ │ ├── binarizer.py │ │ │ └── masked_nn.py │ ├── masked_run_glue.py │ ├── masked_run_squad.py │ └── requirements.txt ├── multiple-choice │ ├── README.md │ ├── run_multiple_choice.py │ ├── run_tf_multiple_choice.py │ └── utils_multiple_choice.py ├── question-answering │ ├── README.md │ ├── run_squad.py │ └── run_tf_squad.py ├── requirements.txt ├── seq2seq │ ├── README.md │ ├── __init__.py │ ├── bertabs │ │ ├── README.md │ │ ├── __init__.py │ │ ├── configuration_bertabs.py │ │ ├── convert_bertabs_original_pytorch_checkpoint.py │ │ ├── modeling_bertabs.py │ │ ├── requirements.txt │ │ ├── run_summarization.py │ │ ├── test_utils_summarization.py │ │ └── utils_summarization.py │ ├── callbacks.py │ ├── distillation.py │ ├── finetune.py │ ├── finetune.sh │ ├── finetune_bart_tiny.sh │ ├── finetune_t5.sh │ ├── initialization_utils.py │ ├── run_distiller.sh │ ├── run_eval.py │ ├── test_seq2seq_examples.py │ ├── train_distilbart_cnn.sh │ ├── train_distilbart_xsum.sh │ └── utils.py ├── test_examples.py ├── text-classification │ ├── README.md │ ├── run_glue.py │ ├── run_pl.sh │ ├── run_pl_glue.py │ ├── run_tf_glue.py │ ├── run_xfact.py │ ├── run_xfact_evidence.py │ ├── run_xfact_evidence_attention.py │ ├── run_xnli.py │ └── xfact_evidence.py ├── text-generation │ ├── README.md │ ├── pplm │ │ ├── README.md │ │ ├── imgs │ │ │ ├── headfigure.png │ │ │ └── wooly.png │ │ ├── pplm_classification_head.py │ │ ├── run_pplm.py │ │ └── run_pplm_discrim_train.py │ └── run_generation.py ├── token-classification │ ├── README.md │ ├── run.sh │ ├── run_ner.py │ ├── run_pl.sh │ ├── run_pl_ner.py │ ├── run_tf_ner.py │ ├── scripts │ │ └── preprocess.py │ ├── test_ner_examples.py │ └── utils_ner.py └── xla_spawn.py ├── format_xfact_to_sst2.py ├── hubconf.py ├── majority_class.py ├── model_cards ├── DeepPavlov │ ├── bert-base-bg-cs-pl-ru-cased │ │ └── README.md │ ├── bert-base-cased-conversational │ │ └── README.md │ ├── bert-base-multilingual-cased-sentence │ │ └── README.md │ ├── rubert-base-cased-conversational │ │ └── README.md │ ├── rubert-base-cased-sentence │ │ └── README.md │ └── rubert-base-cased │ │ └── README.md ├── Hate-speech-CNERG │ ├── dehatebert-mono-arabic │ │ └── README.md │ ├── dehatebert-mono-english │ │ └── README.md │ ├── dehatebert-mono-french │ │ └── README.md │ ├── dehatebert-mono-german │ │ └── README.md │ ├── dehatebert-mono-indonesian │ │ └── README.md │ ├── dehatebert-mono-italian │ │ └── README.md │ ├── dehatebert-mono-polish │ │ └── README.md │ ├── dehatebert-mono-portugese │ │ └── README.md │ └── dehatebert-mono-spanish │ │ └── README.md ├── HooshvareLab │ ├── bert-base-parsbert-armanner-uncased │ │ └── README.md │ ├── bert-base-parsbert-ner-uncased │ │ └── README.md │ ├── bert-base-parsbert-peymaner-uncased │ │ └── README.md │ └── bert-base-parsbert-uncased │ │ └── README.md ├── KB │ ├── albert-base-swedish-cased-alpha │ │ └── README.md │ ├── bert-base-swedish-cased-ner │ │ └── README.md │ └── bert-base-swedish-cased │ │ └── README.md ├── LorenzoDeMattei │ └── GePpeTto │ │ └── README.md ├── MoseliMotsoehli │ └── TswanaBert │ │ └── README.md ├── Musixmatch │ ├── umberto-commoncrawl-cased-v1 │ │ └── README.md │ └── umberto-wikipedia-uncased-v1 │ │ └── README.md ├── NLP4H │ └── ms_bert │ │ └── README.md ├── NeuML │ ├── bert-small-cord19-squad2 │ │ └── README.md │ ├── bert-small-cord19 │ │ └── README.md │ └── bert-small-cord19qa │ │ └── README.md ├── SparkBeyond │ └── roberta-large-sts-b │ │ └── README.md ├── Tereveni-AI │ └── gpt2-124M-uk-fiction │ │ └── README.md ├── TurkuNLP │ ├── bert-base-finnish-cased-v1 │ │ └── README.md │ └── bert-base-finnish-uncased-v1 │ │ └── README.md ├── ViktorAlm │ └── electra-base-norwegian-uncased-discriminator │ │ └── README.md ├── a-ware │ ├── bart-squadv2 │ │ └── README.md │ ├── roberta-large-squad-classification │ │ └── README.md │ └── xlmroberta-squadv2 │ │ └── README.md ├── activebus │ ├── BERT-DK_laptop │ │ └── README.md │ ├── BERT-DK_rest │ │ └── README.md │ ├── BERT-PT_laptop │ │ └── README.md │ ├── BERT-PT_rest │ │ └── README.md │ ├── BERT-XD_Review │ │ └── README.md │ └── BERT_Review │ │ └── README.md ├── ahotrod │ ├── albert_xxlargev1_squad2_512 │ │ └── README.md │ ├── electra_large_discriminator_squad2_512 │ │ └── README.md │ ├── roberta_large_squad2 │ │ └── README.md │ └── xlnet_large_squad2_512 │ │ └── README.md ├── albert-base-v1-README.md ├── albert-xxlarge-v2-README.md ├── allegro │ ├── herbert-klej-cased-tokenizer-v1 │ │ └── README.md │ └── herbert-klej-cased-v1 │ │ └── README.md ├── allenai │ ├── biomed_roberta_base │ │ └── README.md │ ├── longformer-base-4096-extra.pos.embd.only │ │ └── README.md │ ├── longformer-base-4096 │ │ └── README.md │ ├── scibert_scivocab_cased │ │ └── README.md │ └── scibert_scivocab_uncased │ │ └── README.md ├── aodiniz │ ├── bert_uncased_L-10_H-512_A-8_cord19-200616 │ │ └── README.md │ ├── bert_uncased_L-10_H-512_A-8_cord19-200616_squad2 │ │ └── README.md │ ├── bert_uncased_L-2_H-512_A-8_cord19-200616 │ │ └── README.md │ └── bert_uncased_L-4_H-256_A-4_cord19-200616 │ │ └── README.md ├── asafaya │ ├── bert-base-arabic │ │ └── README.md │ ├── bert-large-arabic │ │ └── README.md │ ├── bert-medium-arabic │ │ └── README.md │ └── bert-mini-arabic │ │ └── README.md ├── aubmindlab │ ├── bert-base-arabert │ │ └── README.md │ └── bert-base-arabertv01 │ │ └── README.md ├── bart-large-cnn │ └── README.md ├── bart-large-xsum │ └── README.md ├── bayartsogt │ └── albert-mongolian │ │ └── README.md ├── bert-base-cased-README.md ├── bert-base-chinese-README.md ├── bert-base-german-cased-README.md ├── bert-base-german-dbmdz-cased-README.md ├── bert-base-german-dbmdz-uncased-README.md ├── bert-base-multilingual-cased-README.md ├── bert-base-multilingual-uncased-README.md ├── bert-base-uncased-README.md ├── bert-large-cased-README.md ├── binwang │ └── xlnet-base-cased │ │ └── README.md ├── camembert-base-README.md ├── camembert │ ├── camembert-base-ccnet-4gb │ │ └── README.md │ ├── camembert-base-ccnet │ │ └── README.md │ ├── camembert-base-oscar-4gb │ │ └── README.md │ ├── camembert-base-wikipedia-4gb │ │ └── README.md │ └── camembert-large │ │ └── README.md ├── canwenxu │ └── BERT-of-Theseus-MNLI │ │ └── README.md ├── chrisliu298 │ └── arxiv_ai_gpt2 │ │ └── README.md ├── clue │ ├── albert_chinese_small │ │ └── README.md │ ├── albert_chinese_tiny │ │ └── README.md │ ├── roberta_chinese_3L312_clue_tiny │ │ └── README.md │ ├── roberta_chinese_base │ │ └── README.md │ ├── roberta_chinese_large │ │ └── README.md │ └── xlnet_chinese_large │ │ └── README.md ├── codegram │ └── calbert-base-uncased │ │ └── README.md ├── daigo │ └── bert-base-japanese-sentiment │ │ └── README.md ├── dbmdz │ ├── bert-base-german-cased │ │ └── README.md │ ├── bert-base-german-europeana-cased │ │ └── README.md │ ├── bert-base-german-europeana-uncased │ │ └── README.md │ ├── bert-base-german-uncased │ │ └── README.md │ ├── bert-base-italian-cased │ │ └── README.md │ ├── bert-base-italian-uncased │ │ └── README.md │ ├── bert-base-italian-xxl-cased │ │ └── README.md │ ├── bert-base-italian-xxl-uncased │ │ └── README.md │ ├── bert-base-turkish-128k-cased │ │ └── README.md │ ├── bert-base-turkish-128k-uncased │ │ └── README.md │ ├── bert-base-turkish-cased │ │ └── README.md │ ├── bert-base-turkish-uncased │ │ └── README.md │ ├── distilbert-base-turkish-cased │ │ └── README.md │ ├── electra-base-turkish-cased-discriminator │ │ └── README.md │ └── electra-small-turkish-cased-discriminator │ │ └── README.md ├── deepset │ ├── bert-base-german-cased-oldvocab │ │ └── README.md │ ├── quora_dedup_bert_base │ │ └── README.md │ ├── roberta-base-squad2-covid │ │ └── README.md │ ├── roberta-base-squad2 │ │ └── README.md │ └── sentence_bert │ │ └── README.md ├── digitalepidemiologylab │ └── covid-twitter-bert │ │ └── README.md ├── distilbert-base-multilingual-cased-README.md ├── distilbert-base-uncased-README.md ├── distilbert-base-uncased-distilled-squad-README.md ├── distilgpt2-README.md ├── distilroberta-base-README.md ├── djstrong │ └── bg_cs_pl_ru_cased_L-12_H-768_A-12 │ │ └── README.md ├── dkleczek │ ├── bert-base-polish-cased-v1 │ │ └── README.md │ └── bert-base-polish-uncased-v1 │ │ └── README.md ├── dumitrescustefan │ ├── bert-base-romanian-cased-v1 │ │ └── README.md │ └── bert-base-romanian-uncased-v1 │ │ └── README.md ├── elgeish │ ├── cs224n-squad2.0-albert-base-v2 │ │ └── README.md │ ├── cs224n-squad2.0-albert-large-v2 │ │ └── README.md │ ├── cs224n-squad2.0-albert-xxlarge-v1 │ │ └── README.md │ ├── cs224n-squad2.0-distilbert-base-uncased │ │ └── README.md │ └── cs224n-squad2.0-roberta-base │ │ └── README.md ├── emilyalsentzer │ ├── Bio_ClinicalBERT │ │ └── README.md │ └── Bio_Discharge_Summary_BERT │ │ └── README.md ├── facebook │ ├── bart-large-cnn │ │ └── README.md │ └── bart-large │ │ └── README.md ├── fmikaelian │ ├── camembert-base-fquad │ │ └── README.md │ ├── camembert-base-squad │ │ └── README.md │ └── flaubert-base-uncased-squad │ │ └── README.md ├── fran-martinez │ └── scibert_scivocab_cased_ner_jnlpba │ │ └── README.md ├── gaochangkuan │ └── model_dir │ │ └── README.md ├── giganticode │ └── StackOBERTflow-comments-small-v1 │ │ └── README.md ├── google │ ├── bert_uncased_L-10_H-128_A-2 │ │ └── README.md │ ├── bert_uncased_L-10_H-256_A-4 │ │ └── README.md │ ├── bert_uncased_L-10_H-512_A-8 │ │ └── README.md │ ├── bert_uncased_L-10_H-768_A-12 │ │ └── README.md │ ├── bert_uncased_L-12_H-128_A-2 │ │ └── README.md │ ├── bert_uncased_L-12_H-256_A-4 │ │ └── README.md │ ├── bert_uncased_L-12_H-512_A-8 │ │ └── README.md │ ├── bert_uncased_L-12_H-768_A-12 │ │ └── README.md │ ├── bert_uncased_L-2_H-128_A-2 │ │ └── README.md │ ├── bert_uncased_L-2_H-256_A-4 │ │ └── README.md │ ├── bert_uncased_L-2_H-512_A-8 │ │ └── README.md │ ├── bert_uncased_L-2_H-768_A-12 │ │ └── README.md │ ├── bert_uncased_L-4_H-128_A-2 │ │ └── README.md │ ├── bert_uncased_L-4_H-256_A-4 │ │ └── README.md │ ├── bert_uncased_L-4_H-512_A-8 │ │ └── README.md │ ├── bert_uncased_L-4_H-768_A-12 │ │ └── README.md │ ├── bert_uncased_L-6_H-128_A-2 │ │ └── README.md │ ├── bert_uncased_L-6_H-256_A-4 │ │ └── README.md │ ├── bert_uncased_L-6_H-512_A-8 │ │ └── README.md │ ├── bert_uncased_L-6_H-768_A-12 │ │ └── README.md │ ├── bert_uncased_L-8_H-128_A-2 │ │ └── README.md │ ├── bert_uncased_L-8_H-256_A-4 │ │ └── README.md │ ├── bert_uncased_L-8_H-512_A-8 │ │ └── README.md │ ├── bert_uncased_L-8_H-768_A-12 │ │ └── README.md │ ├── electra-base-discriminator │ │ └── README.md │ ├── electra-base-generator │ │ └── README.md │ ├── electra-large-discriminator │ │ └── README.md │ ├── electra-large-generator │ │ └── README.md │ ├── electra-small-discriminator │ │ └── README.md │ ├── electra-small-generator │ │ └── README.md │ ├── mobilebert-uncased │ │ └── README.md │ ├── reformer-crime-and-punishment │ │ └── README.md │ └── reformer-enwik8 │ │ └── README.md ├── gpt2-README.md ├── gsarti │ ├── biobert-nli │ │ └── README.md │ ├── covidbert-nli │ │ └── README.md │ └── scibert-nli │ │ └── README.md ├── healx │ ├── gpt-2-pubmed-large │ │ └── README.md │ └── gpt-2-pubmed-medium │ │ └── README.md ├── henryk │ ├── bert-base-multilingual-cased-finetuned-dutch-squad2 │ │ └── README.md │ ├── bert-base-multilingual-cased-finetuned-polish-squad1 │ │ └── README.md │ └── bert-base-multilingual-cased-finetuned-polish-squad2 │ │ └── README.md ├── huggingface │ ├── CodeBERTa-language-id │ │ └── README.md │ └── CodeBERTa-small-v1 │ │ └── README.md ├── huseinzol05 │ ├── albert-base-bahasa-cased │ │ └── README.md │ ├── albert-tiny-bahasa-cased │ │ └── README.md │ ├── bert-base-bahasa-cased │ │ └── README.md │ ├── electra-base-discriminator-bahasa-cased │ │ └── README.md │ ├── electra-base-generator-bahasa-cased │ │ └── README.md │ ├── electra-small-discriminator-bahasa-cased │ │ └── README.md │ ├── electra-small-generator-bahasa-cased │ │ └── README.md │ ├── gpt2-117M-bahasa-cased │ │ └── README.md │ ├── gpt2-345M-bahasa-cased │ │ └── README.md │ ├── t5-base-bahasa-cased │ │ └── README.md │ ├── t5-small-bahasa-cased │ │ └── README.md │ ├── tiny-bert-bahasa-cased │ │ └── README.md │ └── xlnet-base-bahasa-cased │ │ └── README.md ├── illuin │ ├── camembert-base-fquad │ │ └── README.md │ └── camembert-large-fquad │ │ └── README.md ├── ipuneetrathore │ └── bert-base-cased-finetuned-finBERT │ │ └── README.md ├── iuliaturc │ └── bert_uncased_L-2_H-128_A-2 │ │ └── README.md ├── ixa-ehu │ └── berteus-base-cased │ │ └── README.md ├── jannesg │ └── bertsson │ │ └── README.md ├── jplu │ ├── tf-camembert-base │ │ └── README.md │ ├── tf-xlm-r-ner-40-lang │ │ └── README.md │ ├── tf-xlm-roberta-base │ │ └── README.md │ └── tf-xlm-roberta-large │ │ └── README.md ├── julien-c │ ├── EsperBERTo-small-pos │ │ └── README.md │ ├── EsperBERTo-small │ │ └── README.md │ ├── bert-xsmall-dummy │ │ └── README.md │ └── dummy-unknown │ │ └── README.md ├── krevas │ ├── finance-koelectra-base-discriminator │ │ └── README.md │ ├── finance-koelectra-base-generator │ │ └── README.md │ ├── finance-koelectra-small-discriminator │ │ └── README.md │ └── finance-koelectra-small-generator │ │ └── README.md ├── ktrapeznikov │ ├── albert-xlarge-v2-squad-v2 │ │ └── README.md │ ├── biobert_v1.1_pubmed_squad_v2 │ │ └── README.md │ └── scibert_scivocab_uncased_squad_v2 │ │ └── README.md ├── lserinol │ └── bert-turkish-question-answering │ │ └── README.md ├── lvwerra │ ├── bert-imdb │ │ └── README.md │ ├── gpt2-imdb-ctrl │ │ └── README.md │ ├── gpt2-imdb-pos │ │ └── README.md │ ├── gpt2-imdb │ │ └── README.md │ └── gpt2-medium-taboo │ │ └── README.md ├── lysandre │ ├── arxiv-nlp │ │ └── README.md │ └── arxiv │ │ └── README.md ├── microsoft │ ├── DialoGPT-large │ │ └── README.md │ ├── DialoGPT-medium │ │ └── README.md │ ├── DialoGPT-small │ │ └── README.md │ ├── MiniLM-L12-H384-uncased │ │ └── README.md │ └── Multilingual-MiniLM-L12-H384 │ │ └── README.md ├── monologg │ ├── koelectra-base-discriminator │ │ └── README.md │ ├── koelectra-base-generator │ │ └── README.md │ ├── koelectra-small-discriminator │ │ └── README.md │ └── koelectra-small-generator │ │ └── README.md ├── monsoon-nlp │ └── hindi-bert │ │ └── README.md ├── moumeneb1 │ └── flaubert-base-cased-ecology_crisis │ │ └── README.md ├── mrm8488 │ ├── CodeBERTaPy │ │ └── README.md │ ├── GPT-2-finetuned-CORD19 │ │ └── README.md │ ├── GPT-2-finetuned-covid-bio-medrxiv │ │ └── README.md │ ├── RuPERTa-base-finetuned-ner │ │ └── README.md │ ├── RuPERTa-base-finetuned-pos │ │ └── README.md │ ├── TinyBERT-spanish-uncased-finetuned-ner │ │ └── README.md │ ├── bert-base-spanish-wwm-cased-finetuned-spa-squad2-es │ │ └── README.md │ ├── bert-italian-finedtuned-squadv1-it-alfa │ │ └── README.md │ ├── bert-medium-finetuned-squadv2 │ │ └── README.md │ ├── bert-mini-finetuned-squadv2 │ │ └── README.md │ ├── bert-multi-cased-finedtuned-xquad-tydiqa-goldp │ │ └── README.md │ ├── bert-multi-cased-finetuned-xquadv1 │ │ └── README.md │ ├── bert-multi-uncased-finetuned-xquadv1 │ │ └── README.md │ ├── bert-small-finetuned-squadv2 │ │ └── README.md │ ├── bert-small-finetuned-typo-detection │ │ └── README.md │ ├── bert-spanish-cased-finetuned-ner │ │ └── README.md │ ├── bert-spanish-cased-finetuned-pos-syntax │ │ └── README.md │ ├── bert-spanish-cased-finetuned-pos │ │ └── README.md │ ├── bert-tiny-finetuned-squadv2 │ │ └── README.md │ ├── bert-uncased-finetuned-qnli │ │ └── README.md │ ├── chEMBL_smiles_v1 │ │ └── README.md │ ├── codeBERTaJS │ │ └── README.md │ ├── distilbert-base-multi-cased-finetuned-typo-detection │ │ └── README.md │ ├── distilbert-multi-finetuned-for-xqua-on-tydiqa │ │ └── README.md │ ├── distill-bert-base-spanish-wwm-cased-finetuned-spa-squad2-es │ │ └── README.md │ ├── distilroberta-base-finetuned-sentiment │ │ └── README.md │ ├── electra-base-finetuned-squadv1 │ │ └── README.md │ ├── electra-small-finetuned-squadv2 │ │ └── README.md │ ├── electricidad-small-discriminator │ │ └── README.md │ ├── electricidad-small-finetuned-squadv1-es │ │ └── README.md │ ├── gpt2-imdb-neg │ │ └── README.md │ ├── gpt2-imdb-neutral │ │ └── README.md │ ├── longformer-base-4096-finetuned-squadv2 │ │ └── README.md │ ├── roberta-large-finetuned-wsc │ │ └── README.md │ ├── spanbert-base-finetuned-squadv1 │ │ └── README.md │ ├── spanbert-base-finetuned-squadv2 │ │ └── README.md │ ├── spanbert-base-finetuned-tacred │ │ └── README.md │ ├── spanbert-finetuned-squadv1 │ │ └── README.md │ ├── spanbert-finetuned-squadv2 │ │ └── README.md │ ├── spanbert-large-finetuned-squadv1 │ │ └── README.md │ ├── spanbert-large-finetuned-squadv2 │ │ └── README.md │ ├── spanbert-large-finetuned-tacred │ │ └── README.md │ ├── t5-base-finetuned-emotion │ │ └── README.md │ ├── t5-base-finetuned-imdb-sentiment │ │ └── README.md │ ├── t5-base-finetuned-sarcasm-twitter │ │ └── README.md │ ├── t5-base-finetuned-span-sentiment-extraction │ │ └── README.md │ ├── t5-base-finetuned-squadv2 │ │ └── README.md │ ├── t5-base-finetuned-summarize-news │ │ └── README.md │ └── xlm-multi-finetuned-xquadv1 │ │ └── README.md ├── nlpaueb │ └── bert-base-greek-uncased-v1 │ │ └── README.md ├── nlptown │ └── bert-base-multilingual-uncased-sentiment │ │ └── README.md ├── nyu-mll │ ├── roberta-base-100M-1 │ │ └── README.md │ ├── roberta-base-100M-2 │ │ └── README.md │ ├── roberta-base-100M-3 │ │ └── README.md │ ├── roberta-base-10M-1 │ │ └── README.md │ ├── roberta-base-10M-2 │ │ └── README.md │ ├── roberta-base-10M-3 │ │ └── README.md │ ├── roberta-base-1B-1 │ │ └── README.md │ ├── roberta-base-1B-2 │ │ └── README.md │ ├── roberta-base-1B-3 │ │ └── README.md │ ├── roberta-med-small-1M-1 │ │ └── README.md │ ├── roberta-med-small-1M-2 │ │ └── README.md │ ├── roberta-med-small-1M-3 │ │ └── README.md │ └── roberta_1M_to_1B │ │ └── README.md ├── oliverguhr │ └── german-sentiment-bert │ │ └── README.md ├── pradhyra │ └── AWSBlogBert │ │ └── README.md ├── redewiedergabe │ └── bert-base-historical-german-rw-cased │ │ └── README.md ├── roberta-base-README.md ├── roberta-large-README.md ├── roberta-large-mnli-README.md ├── savasy │ ├── bert-base-turkish-ner-cased │ │ └── README.md │ ├── bert-base-turkish-sentiment-cased │ │ └── README.md │ └── bert-base-turkish-squad │ │ └── README.md ├── schmidek │ └── electra-small-cased │ │ └── README.md ├── seiya │ └── oubiobert-base-uncased │ │ └── README.md ├── severinsimmler │ └── literary-german-bert │ │ ├── README.md │ │ ├── kfold.png │ │ └── prosa-jahre.png ├── seyonec │ └── ChemBERTa-zinc-base-v1 │ │ └── README.md ├── shoarora │ ├── alectra-small-owt │ │ └── README.md │ └── electra-small-owt │ │ └── README.md ├── spentaur │ └── yelp │ │ └── README.md ├── surajp │ ├── SanBERTa │ │ └── README.md │ └── albert-base-sanskrit │ │ └── README.md ├── t5-11b-README.md ├── t5-3b-README.md ├── t5-base-README.md ├── t5-large-README.md ├── t5-small-README.md ├── tblard │ └── tf-allocine │ │ └── README.md ├── twmkn9 │ ├── albert-base-v2-squad2 │ │ └── README.md │ ├── bert-base-uncased-squad2 │ │ └── README.md │ ├── distilbert-base-uncased-squad2 │ │ └── README.md │ └── distilroberta-base-squad2 │ │ └── README.md ├── valhalla │ ├── bart-large-finetuned-squadv1 │ │ └── README.md │ ├── electra-base-discriminator-finetuned_squadv1 │ │ └── README.md │ ├── longformer-base-4096-finetuned-squadv1 │ │ └── README.md │ └── t5-base-squad │ │ └── README.md ├── voidful │ ├── albert_chinese_base │ │ └── README.md │ ├── albert_chinese_large │ │ └── README.md │ ├── albert_chinese_small │ │ └── README.md │ ├── albert_chinese_tiny │ │ └── README.md │ ├── albert_chinese_xlarge │ │ └── README.md │ └── albert_chinese_xxlarge │ │ └── README.md ├── wptoux │ └── albert-chinese-large-qa │ │ └── README.md ├── xlm-mlm-en-2048-README.md └── xlm-roberta-base-README.md ├── notebooks ├── 01-training-tokenizers.ipynb ├── 02-transformers.ipynb ├── 03-pipelines.ipynb ├── 04-onnx-export.ipynb ├── 05-benchmark.ipynb └── README.md ├── randomize_snippets.py ├── remove_labels.py ├── requirements.txt ├── run_all_single_models.sh ├── run_commands.txt ├── setup.cfg ├── setup.py ├── src └── transformers │ ├── __init__.py │ ├── activations.py │ ├── additions │ ├── __init__.py │ └── additional_utils.py │ ├── benchmark │ ├── __init__.py │ ├── benchmark.py │ ├── benchmark_args.py │ ├── benchmark_args_tf.py │ ├── benchmark_args_utils.py │ ├── benchmark_tf.py │ └── benchmark_utils.py │ ├── commands │ ├── __init__.py │ ├── convert.py │ ├── download.py │ ├── env.py │ ├── run.py │ ├── serving.py │ ├── train.py │ ├── transformers_cli.py │ └── user.py │ ├── configuration_albert.py │ ├── configuration_auto.py │ ├── configuration_bart.py │ ├── configuration_bert.py │ ├── configuration_camembert.py │ ├── configuration_ctrl.py │ ├── configuration_distilbert.py │ ├── configuration_electra.py │ ├── configuration_encoder_decoder.py │ ├── configuration_flaubert.py │ ├── configuration_gpt2.py │ ├── configuration_longformer.py │ ├── configuration_marian.py │ ├── configuration_mmbt.py │ ├── configuration_mobilebert.py │ ├── configuration_openai.py │ ├── configuration_reformer.py │ ├── configuration_retribert.py │ ├── configuration_roberta.py │ ├── configuration_t5.py │ ├── configuration_transfo_xl.py │ ├── configuration_utils.py │ ├── configuration_xlm.py │ ├── configuration_xlm_roberta.py │ ├── configuration_xlnet.py │ ├── convert_albert_original_tf_checkpoint_to_pytorch.py │ ├── convert_bart_original_pytorch_checkpoint_to_pytorch.py │ ├── convert_bert_original_tf_checkpoint_to_pytorch.py │ ├── convert_bert_pytorch_checkpoint_to_original_tf.py │ ├── convert_dialogpt_original_pytorch_checkpoint_to_pytorch.py │ ├── convert_electra_original_tf_checkpoint_to_pytorch.py │ ├── convert_gpt2_original_tf_checkpoint_to_pytorch.py │ ├── convert_graph_to_onnx.py │ ├── convert_longformer_original_pytorch_lightning_to_pytorch.py │ ├── convert_marian_to_pytorch.py │ ├── convert_mobilebert_original_tf_checkpoint_to_pytorch.py │ ├── convert_openai_original_tf_checkpoint_to_pytorch.py │ ├── convert_pytorch_checkpoint_to_tf2.py │ ├── convert_reformer_trax_checkpoint_to_pytorch.py │ ├── convert_roberta_original_pytorch_checkpoint_to_pytorch.py │ ├── convert_t5_original_tf_checkpoint_to_pytorch.py │ ├── convert_transfo_xl_original_tf_checkpoint_to_pytorch.py │ ├── convert_xlm_original_pytorch_checkpoint_to_pytorch.py │ ├── convert_xlnet_original_tf_checkpoint_to_pytorch.py │ ├── criterions │ ├── __init__.py │ ├── entropic_regularizer.py │ ├── focal_loss.py │ ├── input_gradients_norm.py │ ├── label_smoothed_cross_entropy.py │ └── temperature_scaling.py │ ├── data │ ├── __init__.py │ ├── data_collator.py │ ├── datasets │ │ ├── __init__.py │ │ ├── glue.py │ │ └── language_modeling.py │ ├── metrics │ │ ├── __init__.py │ │ └── squad_metrics.py │ └── processors │ │ ├── __init__.py │ │ ├── glue.py │ │ ├── squad.py │ │ ├── utils.py │ │ ├── xfact.py │ │ ├── xfact_evidence.py │ │ └── xnli.py │ ├── file_utils.py │ ├── generation_tf_utils.py │ ├── generation_utils.py │ ├── hf_api.py │ ├── hf_argparser.py │ ├── modelcard.py │ ├── modeling_albert.py │ ├── modeling_auto.py │ ├── modeling_bart.py │ ├── modeling_bert.py │ ├── modeling_camembert.py │ ├── modeling_ctrl.py │ ├── modeling_distilbert.py │ ├── modeling_electra.py │ ├── modeling_encoder_decoder.py │ ├── modeling_flaubert.py │ ├── modeling_gpt2.py │ ├── modeling_longformer.py │ ├── modeling_marian.py │ ├── modeling_mmbt.py │ ├── modeling_mobilebert.py │ ├── modeling_openai.py │ ├── modeling_reformer.py │ ├── modeling_retribert.py │ ├── modeling_roberta.py │ ├── modeling_t5.py │ ├── modeling_tf_albert.py │ ├── modeling_tf_auto.py │ ├── modeling_tf_bert.py │ ├── modeling_tf_camembert.py │ ├── modeling_tf_ctrl.py │ ├── modeling_tf_distilbert.py │ ├── modeling_tf_electra.py │ ├── modeling_tf_flaubert.py │ ├── modeling_tf_gpt2.py │ ├── modeling_tf_mobilebert.py │ ├── modeling_tf_openai.py │ ├── modeling_tf_pytorch_utils.py │ ├── modeling_tf_roberta.py │ ├── modeling_tf_t5.py │ ├── modeling_tf_transfo_xl.py │ ├── modeling_tf_transfo_xl_utilities.py │ ├── modeling_tf_utils.py │ ├── modeling_tf_xlm.py │ ├── modeling_tf_xlm_roberta.py │ ├── modeling_tf_xlnet.py │ ├── modeling_transfo_xl.py │ ├── modeling_transfo_xl_utilities.py │ ├── modeling_utils.py │ ├── modeling_xlm.py │ ├── modeling_xlm_roberta.py │ ├── modeling_xlnet.py │ ├── multitask_utils.py │ ├── optimization.py │ ├── optimization_tf.py │ ├── pipelines.py │ ├── testing_utils.py │ ├── tokenization_albert.py │ ├── tokenization_auto.py │ ├── tokenization_bart.py │ ├── tokenization_bert.py │ ├── tokenization_bert_japanese.py │ ├── tokenization_camembert.py │ ├── tokenization_ctrl.py │ ├── tokenization_distilbert.py │ ├── tokenization_electra.py │ ├── tokenization_flaubert.py │ ├── tokenization_gpt2.py │ ├── tokenization_longformer.py │ ├── tokenization_marian.py │ ├── tokenization_mobilebert.py │ ├── tokenization_openai.py │ ├── tokenization_reformer.py │ ├── tokenization_retribert.py │ ├── tokenization_roberta.py │ ├── tokenization_t5.py │ ├── tokenization_transfo_xl.py │ ├── tokenization_utils.py │ ├── tokenization_utils_base.py │ ├── tokenization_utils_fast.py │ ├── tokenization_xlm.py │ ├── tokenization_xlm_roberta.py │ ├── tokenization_xlnet.py │ ├── trainer.py │ ├── trainer_tf.py │ ├── trainer_utils.py │ ├── training_args.py │ └── training_args_tf.py ├── templates ├── adding_a_new_example_script │ ├── README.md │ ├── run_xxx.py │ └── utils_xxx.py └── adding_a_new_model │ ├── README.md │ ├── configuration_xxx.py │ ├── convert_xxx_original_tf_checkpoint_to_pytorch.py │ ├── modeling_tf_xxx.py │ ├── modeling_xxx.py │ ├── tests │ ├── test_modeling_tf_xxx.py │ ├── test_modeling_xxx.py │ └── test_tokenization_xxx.py │ └── tokenization_xxx.py ├── tests ├── __init__.py ├── test_activations.py ├── test_benchmark.py ├── test_benchmark_tf.py ├── test_configuration_auto.py ├── test_configuration_common.py ├── test_doc_samples.py ├── test_hf_api.py ├── test_hf_argparser.py ├── test_model_card.py ├── test_modeling_albert.py ├── test_modeling_auto.py ├── test_modeling_bart.py ├── test_modeling_bert.py ├── test_modeling_camembert.py ├── test_modeling_common.py ├── test_modeling_ctrl.py ├── test_modeling_distilbert.py ├── test_modeling_electra.py ├── test_modeling_encoder_decoder.py ├── test_modeling_flaubert.py ├── test_modeling_gpt2.py ├── test_modeling_longformer.py ├── test_modeling_marian.py ├── test_modeling_mobilebert.py ├── test_modeling_openai.py ├── test_modeling_reformer.py ├── test_modeling_roberta.py ├── test_modeling_t5.py ├── test_modeling_tf_albert.py ├── test_modeling_tf_auto.py ├── test_modeling_tf_bert.py ├── test_modeling_tf_camembert.py ├── test_modeling_tf_common.py ├── test_modeling_tf_ctrl.py ├── test_modeling_tf_distilbert.py ├── test_modeling_tf_electra.py ├── test_modeling_tf_flaubert.py ├── test_modeling_tf_gpt2.py ├── test_modeling_tf_mobilebert.py ├── test_modeling_tf_openai_gpt.py ├── test_modeling_tf_roberta.py ├── test_modeling_tf_t5.py ├── test_modeling_tf_transfo_xl.py ├── test_modeling_tf_xlm.py ├── test_modeling_tf_xlm_roberta.py ├── test_modeling_tf_xlnet.py ├── test_modeling_transfo_xl.py ├── test_modeling_xlm.py ├── test_modeling_xlm_roberta.py ├── test_modeling_xlnet.py ├── test_onnx.py ├── test_optimization.py ├── test_optimization_tf.py ├── test_pipelines.py ├── test_tokenization_albert.py ├── test_tokenization_auto.py ├── test_tokenization_bert.py ├── test_tokenization_bert_japanese.py ├── test_tokenization_common.py ├── test_tokenization_ctrl.py ├── test_tokenization_distilbert.py ├── test_tokenization_fast.py ├── test_tokenization_gpt2.py ├── test_tokenization_marian.py ├── test_tokenization_openai.py ├── test_tokenization_roberta.py ├── test_tokenization_t5.py ├── test_tokenization_transfo_xl.py ├── test_tokenization_utils.py ├── test_tokenization_xlm.py ├── test_tokenization_xlm_roberta.py ├── test_tokenization_xlnet.py ├── test_trainer.py └── test_trainer_distributed.py ├── utils ├── download_glue_data.py └── link_tester.py └── valohai.yaml /LICENSE: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/LICENSE -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/README.md -------------------------------------------------------------------------------- /data/x-fact-including-en/dev.all.tsv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact-including-en/dev.all.tsv -------------------------------------------------------------------------------- /data/x-fact-including-en/ood.tsv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact-including-en/ood.tsv -------------------------------------------------------------------------------- /data/x-fact-including-en/test.all.tsv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact-including-en/test.all.tsv -------------------------------------------------------------------------------- /data/x-fact-including-en/train.all.tsv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact-including-en/train.all.tsv -------------------------------------------------------------------------------- /data/x-fact-including-en/zeroshot.tsv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact-including-en/zeroshot.tsv -------------------------------------------------------------------------------- /data/x-fact/dev.all.tsv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/dev.all.tsv -------------------------------------------------------------------------------- /data/x-fact/label_maps/aajtak.in.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/aajtak.in.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/agi.it.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/agi.it.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/aosfatos.org.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/aosfatos.org.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/apublica.org.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/apublica.org.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/cekfakta.com.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/cekfakta.com.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/cekfakta.tempo.co.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/cekfakta.tempo.co.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/chequeado.com.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/chequeado.com.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/correctiv.org.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/correctiv.org.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/demagog.org.pl.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/demagog.org.pl.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/dogrulukpayi.com.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/dogrulukpayi.com.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/en.politifact.com.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/en.politifact.com.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/factcheck.ge.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/factcheck.ge.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/factcheck.kz.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/factcheck.kz.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/factnameh.com.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/factnameh.com.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/factual.ro.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/factual.ro.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/faktisk.no.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/faktisk.no.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/faktistfakt.com.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/faktistfakt.com.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/faktoje.al.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/faktoje.al.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/faktyoxla.info.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/faktyoxla.info.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/fr.africacheck.org.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/fr.africacheck.org.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/gujarati.newschecker.in.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/gujarati.newschecker.in.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/hindi.newschecker.in.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/hindi.newschecker.in.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/istinomer.rs.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/istinomer.rs.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/kallxo.com.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/kallxo.com.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/marathi.newschecker.in.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/marathi.newschecker.in.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/master_mapping.tsv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/master_mapping.tsv -------------------------------------------------------------------------------- /data/x-fact/label_maps/misbar.com.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/misbar.com.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/nieuwscheckers.nl.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/nieuwscheckers.nl.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/pagellapolitica.it.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/pagellapolitica.it.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/piaui.folha.uol.com.br.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/piaui.folha.uol.com.br.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/poligrafo.sapo.pt.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/poligrafo.sapo.pt.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/politifact.com.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/politifact.com.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/punjabi.newschecker.in.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/punjabi.newschecker.in.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/srilanka.factcrescendo.com.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/srilanka.factcrescendo.com.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/teyit.org.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/teyit.org.txt -------------------------------------------------------------------------------- /data/x-fact/label_maps/youturn.in.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/label_maps/youturn.in.txt -------------------------------------------------------------------------------- /data/x-fact/ood.tsv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/ood.tsv -------------------------------------------------------------------------------- /data/x-fact/test.all.tsv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/test.all.tsv -------------------------------------------------------------------------------- /data/x-fact/train.all.tsv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/train.all.tsv -------------------------------------------------------------------------------- /data/x-fact/zeroshot.tsv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/data/x-fact/zeroshot.tsv -------------------------------------------------------------------------------- /requirements_tested_py370.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/requirements_tested_py370.txt -------------------------------------------------------------------------------- /scripts/analyse_data.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/scripts/analyse_data.py -------------------------------------------------------------------------------- /scripts/calculate_fscore.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/scripts/calculate_fscore.py -------------------------------------------------------------------------------- /scripts/format_xfact_to_sst2.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/scripts/format_xfact_to_sst2.py -------------------------------------------------------------------------------- /scripts/majority_class.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/scripts/majority_class.py -------------------------------------------------------------------------------- /scripts/randomize_snippets.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/scripts/randomize_snippets.py -------------------------------------------------------------------------------- /scripts/reformat_xfact_for_hf.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/scripts/reformat_xfact_for_hf.py -------------------------------------------------------------------------------- /scripts/remove_labels.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/scripts/remove_labels.py -------------------------------------------------------------------------------- /scripts/run_all_single_models.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/scripts/run_all_single_models.sh -------------------------------------------------------------------------------- /scripts/run_commands.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/scripts/run_commands.txt -------------------------------------------------------------------------------- /transformers/.circleci/config.yml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/.circleci/config.yml -------------------------------------------------------------------------------- /transformers/.circleci/deploy.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/.circleci/deploy.sh -------------------------------------------------------------------------------- /transformers/.coveragerc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/.coveragerc -------------------------------------------------------------------------------- /transformers/.github/ISSUE_TEMPLATE/---new-benchmark.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/.github/ISSUE_TEMPLATE/---new-benchmark.md -------------------------------------------------------------------------------- /transformers/.github/ISSUE_TEMPLATE/--new-model-addition.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/.github/ISSUE_TEMPLATE/--new-model-addition.md -------------------------------------------------------------------------------- /transformers/.github/ISSUE_TEMPLATE/bug-report.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/.github/ISSUE_TEMPLATE/bug-report.md -------------------------------------------------------------------------------- /transformers/.github/ISSUE_TEMPLATE/feature-request.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/.github/ISSUE_TEMPLATE/feature-request.md -------------------------------------------------------------------------------- /transformers/.github/ISSUE_TEMPLATE/migration.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/.github/ISSUE_TEMPLATE/migration.md -------------------------------------------------------------------------------- /transformers/.github/ISSUE_TEMPLATE/question-help.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/.github/ISSUE_TEMPLATE/question-help.md -------------------------------------------------------------------------------- /transformers/.github/stale.yml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/.github/stale.yml -------------------------------------------------------------------------------- /transformers/.github/workflows/github-push.yml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/.github/workflows/github-push.yml -------------------------------------------------------------------------------- /transformers/.github/workflows/github-torch-hub.yml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/.github/workflows/github-torch-hub.yml -------------------------------------------------------------------------------- /transformers/.github/workflows/self-push.yml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/.github/workflows/self-push.yml -------------------------------------------------------------------------------- /transformers/.github/workflows/self-scheduled.yml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/.github/workflows/self-scheduled.yml -------------------------------------------------------------------------------- /transformers/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/.gitignore -------------------------------------------------------------------------------- /transformers/CONTRIBUTING.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/CONTRIBUTING.md -------------------------------------------------------------------------------- /transformers/LICENSE: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/LICENSE -------------------------------------------------------------------------------- /transformers/MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | -------------------------------------------------------------------------------- /transformers/Makefile: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/Makefile -------------------------------------------------------------------------------- /transformers/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/README.md -------------------------------------------------------------------------------- /transformers/README_extras.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/README_extras.md -------------------------------------------------------------------------------- /transformers/analyse_data.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/analyse_data.py -------------------------------------------------------------------------------- /transformers/calculate_fscore.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/calculate_fscore.py -------------------------------------------------------------------------------- /transformers/codecov.yml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/codecov.yml -------------------------------------------------------------------------------- /transformers/dataset/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/dataset/README.md -------------------------------------------------------------------------------- /transformers/dataset/dev.tsv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/dataset/dev.tsv -------------------------------------------------------------------------------- /transformers/dataset/ood.tsv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/dataset/ood.tsv -------------------------------------------------------------------------------- /transformers/dataset/test.tsv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/dataset/test.tsv -------------------------------------------------------------------------------- /transformers/dataset/train.tsv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/dataset/train.tsv -------------------------------------------------------------------------------- /transformers/dataset/zeroshot.tsv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/dataset/zeroshot.tsv -------------------------------------------------------------------------------- /transformers/deploy_multi_version_doc.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/deploy_multi_version_doc.sh -------------------------------------------------------------------------------- /transformers/docker/transformers-cpu/Dockerfile: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docker/transformers-cpu/Dockerfile -------------------------------------------------------------------------------- /transformers/docker/transformers-gpu/Dockerfile: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docker/transformers-gpu/Dockerfile -------------------------------------------------------------------------------- /transformers/docker/transformers-pytorch-cpu/Dockerfile: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docker/transformers-pytorch-cpu/Dockerfile -------------------------------------------------------------------------------- /transformers/docker/transformers-pytorch-gpu/Dockerfile: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docker/transformers-pytorch-gpu/Dockerfile -------------------------------------------------------------------------------- /transformers/docker/transformers-tensorflow-cpu/Dockerfile: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docker/transformers-tensorflow-cpu/Dockerfile -------------------------------------------------------------------------------- /transformers/docker/transformers-tensorflow-gpu/Dockerfile: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docker/transformers-tensorflow-gpu/Dockerfile -------------------------------------------------------------------------------- /transformers/docs/Makefile: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/Makefile -------------------------------------------------------------------------------- /transformers/docs/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/README.md -------------------------------------------------------------------------------- /transformers/docs/source/_static/css/Calibre-Light.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/_static/css/Calibre-Light.ttf -------------------------------------------------------------------------------- /transformers/docs/source/_static/css/Calibre-Medium.otf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/_static/css/Calibre-Medium.otf -------------------------------------------------------------------------------- /transformers/docs/source/_static/css/Calibre-Regular.otf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/_static/css/Calibre-Regular.otf -------------------------------------------------------------------------------- /transformers/docs/source/_static/css/Calibre-Thin.otf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/_static/css/Calibre-Thin.otf -------------------------------------------------------------------------------- /transformers/docs/source/_static/css/code-snippets.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/_static/css/code-snippets.css -------------------------------------------------------------------------------- /transformers/docs/source/_static/css/huggingface.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/_static/css/huggingface.css -------------------------------------------------------------------------------- /transformers/docs/source/_static/js/custom.js: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/_static/js/custom.js -------------------------------------------------------------------------------- /transformers/docs/source/_static/js/huggingface_logo.svg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/_static/js/huggingface_logo.svg -------------------------------------------------------------------------------- /transformers/docs/source/benchmarks.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/benchmarks.rst -------------------------------------------------------------------------------- /transformers/docs/source/bertology.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/bertology.rst -------------------------------------------------------------------------------- /transformers/docs/source/conf.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/conf.py -------------------------------------------------------------------------------- /transformers/docs/source/contributing.md: -------------------------------------------------------------------------------- 1 | ../../CONTRIBUTING.md -------------------------------------------------------------------------------- /transformers/docs/source/converting_tensorflow_models.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/converting_tensorflow_models.rst -------------------------------------------------------------------------------- /transformers/docs/source/examples.md: -------------------------------------------------------------------------------- 1 | ../../examples/README.md -------------------------------------------------------------------------------- /transformers/docs/source/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/favicon.ico -------------------------------------------------------------------------------- /transformers/docs/source/glossary.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/glossary.rst -------------------------------------------------------------------------------- /transformers/docs/source/imgs/local_attention_mask.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/imgs/local_attention_mask.png -------------------------------------------------------------------------------- /transformers/docs/source/imgs/transformers_logo_name.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/imgs/transformers_logo_name.png -------------------------------------------------------------------------------- /transformers/docs/source/imgs/warmup_constant_schedule.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/imgs/warmup_constant_schedule.png -------------------------------------------------------------------------------- /transformers/docs/source/imgs/warmup_cosine_hard_restarts_schedule.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/imgs/warmup_cosine_hard_restarts_schedule.png -------------------------------------------------------------------------------- /transformers/docs/source/imgs/warmup_cosine_schedule.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/imgs/warmup_cosine_schedule.png -------------------------------------------------------------------------------- /transformers/docs/source/imgs/warmup_cosine_warm_restarts_schedule.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/imgs/warmup_cosine_warm_restarts_schedule.png -------------------------------------------------------------------------------- /transformers/docs/source/imgs/warmup_linear_schedule.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/imgs/warmup_linear_schedule.png -------------------------------------------------------------------------------- /transformers/docs/source/index.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/index.rst -------------------------------------------------------------------------------- /transformers/docs/source/installation.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/installation.md -------------------------------------------------------------------------------- /transformers/docs/source/main_classes/configuration.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/main_classes/configuration.rst -------------------------------------------------------------------------------- /transformers/docs/source/main_classes/model.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/main_classes/model.rst -------------------------------------------------------------------------------- /transformers/docs/source/main_classes/optimizer_schedules.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/main_classes/optimizer_schedules.rst -------------------------------------------------------------------------------- /transformers/docs/source/main_classes/pipelines.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/main_classes/pipelines.rst -------------------------------------------------------------------------------- /transformers/docs/source/main_classes/processors.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/main_classes/processors.rst -------------------------------------------------------------------------------- /transformers/docs/source/main_classes/tokenizer.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/main_classes/tokenizer.rst -------------------------------------------------------------------------------- /transformers/docs/source/main_classes/trainer.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/main_classes/trainer.rst -------------------------------------------------------------------------------- /transformers/docs/source/migration.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/migration.md -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/albert.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/albert.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/auto.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/auto.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/bart.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/bart.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/bert.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/bert.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/camembert.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/camembert.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/ctrl.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/ctrl.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/dialogpt.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/dialogpt.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/distilbert.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/distilbert.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/electra.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/electra.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/encoderdecoder.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/encoderdecoder.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/flaubert.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/flaubert.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/gpt.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/gpt.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/gpt2.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/gpt2.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/longformer.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/longformer.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/marian.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/marian.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/mobilebert.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/mobilebert.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/reformer.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/reformer.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/retribert.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/retribert.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/roberta.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/roberta.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/t5.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/t5.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/transformerxl.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/transformerxl.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/xlm.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/xlm.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/xlmroberta.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/xlmroberta.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_doc/xlnet.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_doc/xlnet.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_sharing.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_sharing.rst -------------------------------------------------------------------------------- /transformers/docs/source/model_summary.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/model_summary.rst -------------------------------------------------------------------------------- /transformers/docs/source/multilingual.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/multilingual.rst -------------------------------------------------------------------------------- /transformers/docs/source/notebooks.md: -------------------------------------------------------------------------------- 1 | ../../notebooks/README.md -------------------------------------------------------------------------------- /transformers/docs/source/philosophy.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/philosophy.rst -------------------------------------------------------------------------------- /transformers/docs/source/preprocessing.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/preprocessing.rst -------------------------------------------------------------------------------- /transformers/docs/source/pretrained_models.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/pretrained_models.rst -------------------------------------------------------------------------------- /transformers/docs/source/quicktour.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/quicktour.rst -------------------------------------------------------------------------------- /transformers/docs/source/task_summary.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/task_summary.rst -------------------------------------------------------------------------------- /transformers/docs/source/tokenizer_summary.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/tokenizer_summary.rst -------------------------------------------------------------------------------- /transformers/docs/source/torchscript.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/torchscript.rst -------------------------------------------------------------------------------- /transformers/docs/source/training.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/docs/source/training.rst -------------------------------------------------------------------------------- /transformers/examples/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/README.md -------------------------------------------------------------------------------- /transformers/examples/adversarial/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/adversarial/README.md -------------------------------------------------------------------------------- /transformers/examples/adversarial/run_hans.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/adversarial/run_hans.py -------------------------------------------------------------------------------- /transformers/examples/adversarial/utils_hans.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/adversarial/utils_hans.py -------------------------------------------------------------------------------- /transformers/examples/benchmarking/plot_csv_file.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/benchmarking/plot_csv_file.py -------------------------------------------------------------------------------- /transformers/examples/benchmarking/run_benchmark.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/benchmarking/run_benchmark.py -------------------------------------------------------------------------------- /transformers/examples/benchmarking/run_benchmark_tf.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/benchmarking/run_benchmark_tf.py -------------------------------------------------------------------------------- /transformers/examples/bert-loses-patience/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/bert-loses-patience/README.md -------------------------------------------------------------------------------- /transformers/examples/bert-loses-patience/pabee/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /transformers/examples/bert-loses-patience/pabee/modeling_pabee_bert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/bert-loses-patience/pabee/modeling_pabee_bert.py -------------------------------------------------------------------------------- /transformers/examples/bert-loses-patience/run_glue_with_pabee.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/bert-loses-patience/run_glue_with_pabee.py -------------------------------------------------------------------------------- /transformers/examples/bert-loses-patience/test_run_glue_with_pabee.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/bert-loses-patience/test_run_glue_with_pabee.py -------------------------------------------------------------------------------- /transformers/examples/bertology/run_bertology.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/bertology/run_bertology.py -------------------------------------------------------------------------------- /transformers/examples/contrib/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/contrib/README.md -------------------------------------------------------------------------------- /transformers/examples/contrib/mm-imdb/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/contrib/mm-imdb/README.md -------------------------------------------------------------------------------- /transformers/examples/contrib/mm-imdb/run_mmimdb.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/contrib/mm-imdb/run_mmimdb.py -------------------------------------------------------------------------------- /transformers/examples/contrib/mm-imdb/utils_mmimdb.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/contrib/mm-imdb/utils_mmimdb.py -------------------------------------------------------------------------------- /transformers/examples/contrib/run_camembert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/contrib/run_camembert.py -------------------------------------------------------------------------------- /transformers/examples/contrib/run_openai_gpt.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/contrib/run_openai_gpt.py -------------------------------------------------------------------------------- /transformers/examples/contrib/run_swag.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/contrib/run_swag.py -------------------------------------------------------------------------------- /transformers/examples/contrib/run_transfo_xl.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/contrib/run_transfo_xl.py -------------------------------------------------------------------------------- /transformers/examples/distillation/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/distillation/README.md -------------------------------------------------------------------------------- /transformers/examples/distillation/distiller.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/distillation/distiller.py -------------------------------------------------------------------------------- /transformers/examples/distillation/grouped_batch_sampler.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/distillation/grouped_batch_sampler.py -------------------------------------------------------------------------------- /transformers/examples/distillation/lm_seqs_dataset.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/distillation/lm_seqs_dataset.py -------------------------------------------------------------------------------- /transformers/examples/distillation/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/distillation/requirements.txt -------------------------------------------------------------------------------- /transformers/examples/distillation/run_squad_w_distillation.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/distillation/run_squad_w_distillation.py -------------------------------------------------------------------------------- /transformers/examples/distillation/scripts/binarized_data.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/distillation/scripts/binarized_data.py -------------------------------------------------------------------------------- /transformers/examples/distillation/scripts/extract.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/distillation/scripts/extract.py -------------------------------------------------------------------------------- /transformers/examples/distillation/scripts/extract_distilbert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/distillation/scripts/extract_distilbert.py -------------------------------------------------------------------------------- /transformers/examples/distillation/scripts/token_counts.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/distillation/scripts/token_counts.py -------------------------------------------------------------------------------- /transformers/examples/distillation/train.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/distillation/train.py -------------------------------------------------------------------------------- /transformers/examples/distillation/training_configs/distilgpt2.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/distillation/training_configs/distilgpt2.json -------------------------------------------------------------------------------- /transformers/examples/distillation/utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/distillation/utils.py -------------------------------------------------------------------------------- /transformers/examples/language-modeling/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/language-modeling/README.md -------------------------------------------------------------------------------- /transformers/examples/language-modeling/run_language_modeling.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/language-modeling/run_language_modeling.py -------------------------------------------------------------------------------- /transformers/examples/lightning_base.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/lightning_base.py -------------------------------------------------------------------------------- /transformers/examples/longform-qa/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/longform-qa/README.md -------------------------------------------------------------------------------- /transformers/examples/longform-qa/eli5_app.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/longform-qa/eli5_app.py -------------------------------------------------------------------------------- /transformers/examples/longform-qa/eli5_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/longform-qa/eli5_utils.py -------------------------------------------------------------------------------- /transformers/examples/movement-pruning/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/movement-pruning/README.md -------------------------------------------------------------------------------- /transformers/examples/movement-pruning/Saving_PruneBERT.ipynb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/movement-pruning/Saving_PruneBERT.ipynb -------------------------------------------------------------------------------- /transformers/examples/movement-pruning/bertarize.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/movement-pruning/bertarize.py -------------------------------------------------------------------------------- /transformers/examples/movement-pruning/counts_parameters.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/movement-pruning/counts_parameters.py -------------------------------------------------------------------------------- /transformers/examples/movement-pruning/emmental/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/movement-pruning/emmental/__init__.py -------------------------------------------------------------------------------- /transformers/examples/movement-pruning/emmental/modeling_bert_masked.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/movement-pruning/emmental/modeling_bert_masked.py -------------------------------------------------------------------------------- /transformers/examples/movement-pruning/emmental/modules/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/movement-pruning/emmental/modules/__init__.py -------------------------------------------------------------------------------- /transformers/examples/movement-pruning/emmental/modules/binarizer.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/movement-pruning/emmental/modules/binarizer.py -------------------------------------------------------------------------------- /transformers/examples/movement-pruning/emmental/modules/masked_nn.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/movement-pruning/emmental/modules/masked_nn.py -------------------------------------------------------------------------------- /transformers/examples/movement-pruning/masked_run_glue.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/movement-pruning/masked_run_glue.py -------------------------------------------------------------------------------- /transformers/examples/movement-pruning/masked_run_squad.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/movement-pruning/masked_run_squad.py -------------------------------------------------------------------------------- /transformers/examples/movement-pruning/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/movement-pruning/requirements.txt -------------------------------------------------------------------------------- /transformers/examples/multiple-choice/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/multiple-choice/README.md -------------------------------------------------------------------------------- /transformers/examples/multiple-choice/run_multiple_choice.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/multiple-choice/run_multiple_choice.py -------------------------------------------------------------------------------- /transformers/examples/multiple-choice/run_tf_multiple_choice.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/multiple-choice/run_tf_multiple_choice.py -------------------------------------------------------------------------------- /transformers/examples/multiple-choice/utils_multiple_choice.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/multiple-choice/utils_multiple_choice.py -------------------------------------------------------------------------------- /transformers/examples/question-answering/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/question-answering/README.md -------------------------------------------------------------------------------- /transformers/examples/question-answering/run_squad.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/question-answering/run_squad.py -------------------------------------------------------------------------------- /transformers/examples/question-answering/run_tf_squad.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/question-answering/run_tf_squad.py -------------------------------------------------------------------------------- /transformers/examples/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/requirements.txt -------------------------------------------------------------------------------- /transformers/examples/seq2seq/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/seq2seq/README.md -------------------------------------------------------------------------------- /transformers/examples/seq2seq/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /transformers/examples/seq2seq/bertabs/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/seq2seq/bertabs/README.md -------------------------------------------------------------------------------- /transformers/examples/seq2seq/bertabs/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /transformers/examples/seq2seq/bertabs/configuration_bertabs.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/seq2seq/bertabs/configuration_bertabs.py -------------------------------------------------------------------------------- /transformers/examples/seq2seq/bertabs/modeling_bertabs.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/seq2seq/bertabs/modeling_bertabs.py -------------------------------------------------------------------------------- /transformers/examples/seq2seq/bertabs/requirements.txt: -------------------------------------------------------------------------------- 1 | transformers 2 | 3 | # For ROUGE 4 | nltk 5 | py-rouge 6 | -------------------------------------------------------------------------------- /transformers/examples/seq2seq/bertabs/run_summarization.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/seq2seq/bertabs/run_summarization.py -------------------------------------------------------------------------------- /transformers/examples/seq2seq/bertabs/test_utils_summarization.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/seq2seq/bertabs/test_utils_summarization.py -------------------------------------------------------------------------------- /transformers/examples/seq2seq/bertabs/utils_summarization.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/seq2seq/bertabs/utils_summarization.py -------------------------------------------------------------------------------- /transformers/examples/seq2seq/callbacks.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/seq2seq/callbacks.py -------------------------------------------------------------------------------- /transformers/examples/seq2seq/distillation.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/seq2seq/distillation.py -------------------------------------------------------------------------------- /transformers/examples/seq2seq/finetune.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/seq2seq/finetune.py -------------------------------------------------------------------------------- /transformers/examples/seq2seq/finetune.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/seq2seq/finetune.sh -------------------------------------------------------------------------------- /transformers/examples/seq2seq/finetune_bart_tiny.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/seq2seq/finetune_bart_tiny.sh -------------------------------------------------------------------------------- /transformers/examples/seq2seq/finetune_t5.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/seq2seq/finetune_t5.sh -------------------------------------------------------------------------------- /transformers/examples/seq2seq/initialization_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/seq2seq/initialization_utils.py -------------------------------------------------------------------------------- /transformers/examples/seq2seq/run_distiller.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/seq2seq/run_distiller.sh -------------------------------------------------------------------------------- /transformers/examples/seq2seq/run_eval.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/seq2seq/run_eval.py -------------------------------------------------------------------------------- /transformers/examples/seq2seq/test_seq2seq_examples.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/seq2seq/test_seq2seq_examples.py -------------------------------------------------------------------------------- /transformers/examples/seq2seq/train_distilbart_cnn.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/seq2seq/train_distilbart_cnn.sh -------------------------------------------------------------------------------- /transformers/examples/seq2seq/train_distilbart_xsum.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/seq2seq/train_distilbart_xsum.sh -------------------------------------------------------------------------------- /transformers/examples/seq2seq/utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/seq2seq/utils.py -------------------------------------------------------------------------------- /transformers/examples/test_examples.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/test_examples.py -------------------------------------------------------------------------------- /transformers/examples/text-classification/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/text-classification/README.md -------------------------------------------------------------------------------- /transformers/examples/text-classification/run_glue.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/text-classification/run_glue.py -------------------------------------------------------------------------------- /transformers/examples/text-classification/run_pl.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/text-classification/run_pl.sh -------------------------------------------------------------------------------- /transformers/examples/text-classification/run_pl_glue.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/text-classification/run_pl_glue.py -------------------------------------------------------------------------------- /transformers/examples/text-classification/run_tf_glue.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/text-classification/run_tf_glue.py -------------------------------------------------------------------------------- /transformers/examples/text-classification/run_xfact.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/text-classification/run_xfact.py -------------------------------------------------------------------------------- /transformers/examples/text-classification/run_xfact_evidence.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/text-classification/run_xfact_evidence.py -------------------------------------------------------------------------------- /transformers/examples/text-classification/run_xnli.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/text-classification/run_xnli.py -------------------------------------------------------------------------------- /transformers/examples/text-classification/xfact_evidence.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/text-classification/xfact_evidence.py -------------------------------------------------------------------------------- /transformers/examples/text-generation/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/text-generation/README.md -------------------------------------------------------------------------------- /transformers/examples/text-generation/pplm/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/text-generation/pplm/README.md -------------------------------------------------------------------------------- /transformers/examples/text-generation/pplm/imgs/headfigure.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/text-generation/pplm/imgs/headfigure.png -------------------------------------------------------------------------------- /transformers/examples/text-generation/pplm/imgs/wooly.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/text-generation/pplm/imgs/wooly.png -------------------------------------------------------------------------------- /transformers/examples/text-generation/pplm/pplm_classification_head.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/text-generation/pplm/pplm_classification_head.py -------------------------------------------------------------------------------- /transformers/examples/text-generation/pplm/run_pplm.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/text-generation/pplm/run_pplm.py -------------------------------------------------------------------------------- /transformers/examples/text-generation/pplm/run_pplm_discrim_train.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/text-generation/pplm/run_pplm_discrim_train.py -------------------------------------------------------------------------------- /transformers/examples/text-generation/run_generation.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/text-generation/run_generation.py -------------------------------------------------------------------------------- /transformers/examples/token-classification/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/token-classification/README.md -------------------------------------------------------------------------------- /transformers/examples/token-classification/run.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/token-classification/run.sh -------------------------------------------------------------------------------- /transformers/examples/token-classification/run_ner.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/token-classification/run_ner.py -------------------------------------------------------------------------------- /transformers/examples/token-classification/run_pl.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/token-classification/run_pl.sh -------------------------------------------------------------------------------- /transformers/examples/token-classification/run_pl_ner.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/token-classification/run_pl_ner.py -------------------------------------------------------------------------------- /transformers/examples/token-classification/run_tf_ner.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/token-classification/run_tf_ner.py -------------------------------------------------------------------------------- /transformers/examples/token-classification/scripts/preprocess.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/token-classification/scripts/preprocess.py -------------------------------------------------------------------------------- /transformers/examples/token-classification/test_ner_examples.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/token-classification/test_ner_examples.py -------------------------------------------------------------------------------- /transformers/examples/token-classification/utils_ner.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/token-classification/utils_ner.py -------------------------------------------------------------------------------- /transformers/examples/xla_spawn.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/examples/xla_spawn.py -------------------------------------------------------------------------------- /transformers/format_xfact_to_sst2.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/format_xfact_to_sst2.py -------------------------------------------------------------------------------- /transformers/hubconf.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/hubconf.py -------------------------------------------------------------------------------- /transformers/majority_class.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/majority_class.py -------------------------------------------------------------------------------- /transformers/model_cards/DeepPavlov/rubert-base-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/DeepPavlov/rubert-base-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/KB/albert-base-swedish-cased-alpha/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/KB/albert-base-swedish-cased-alpha/README.md -------------------------------------------------------------------------------- /transformers/model_cards/KB/bert-base-swedish-cased-ner/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/KB/bert-base-swedish-cased-ner/README.md -------------------------------------------------------------------------------- /transformers/model_cards/KB/bert-base-swedish-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/KB/bert-base-swedish-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/LorenzoDeMattei/GePpeTto/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/LorenzoDeMattei/GePpeTto/README.md -------------------------------------------------------------------------------- /transformers/model_cards/MoseliMotsoehli/TswanaBert/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/MoseliMotsoehli/TswanaBert/README.md -------------------------------------------------------------------------------- /transformers/model_cards/NLP4H/ms_bert/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/NLP4H/ms_bert/README.md -------------------------------------------------------------------------------- /transformers/model_cards/NeuML/bert-small-cord19-squad2/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/NeuML/bert-small-cord19-squad2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/NeuML/bert-small-cord19/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/NeuML/bert-small-cord19/README.md -------------------------------------------------------------------------------- /transformers/model_cards/NeuML/bert-small-cord19qa/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/NeuML/bert-small-cord19qa/README.md -------------------------------------------------------------------------------- /transformers/model_cards/SparkBeyond/roberta-large-sts-b/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/SparkBeyond/roberta-large-sts-b/README.md -------------------------------------------------------------------------------- /transformers/model_cards/Tereveni-AI/gpt2-124M-uk-fiction/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/Tereveni-AI/gpt2-124M-uk-fiction/README.md -------------------------------------------------------------------------------- /transformers/model_cards/TurkuNLP/bert-base-finnish-cased-v1/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/TurkuNLP/bert-base-finnish-cased-v1/README.md -------------------------------------------------------------------------------- /transformers/model_cards/a-ware/bart-squadv2/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/a-ware/bart-squadv2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/a-ware/xlmroberta-squadv2/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/a-ware/xlmroberta-squadv2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/activebus/BERT-DK_laptop/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/activebus/BERT-DK_laptop/README.md -------------------------------------------------------------------------------- /transformers/model_cards/activebus/BERT-DK_rest/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/activebus/BERT-DK_rest/README.md -------------------------------------------------------------------------------- /transformers/model_cards/activebus/BERT-PT_laptop/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/activebus/BERT-PT_laptop/README.md -------------------------------------------------------------------------------- /transformers/model_cards/activebus/BERT-PT_rest/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/activebus/BERT-PT_rest/README.md -------------------------------------------------------------------------------- /transformers/model_cards/activebus/BERT-XD_Review/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/activebus/BERT-XD_Review/README.md -------------------------------------------------------------------------------- /transformers/model_cards/activebus/BERT_Review/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/activebus/BERT_Review/README.md -------------------------------------------------------------------------------- /transformers/model_cards/ahotrod/albert_xxlargev1_squad2_512/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/ahotrod/albert_xxlargev1_squad2_512/README.md -------------------------------------------------------------------------------- /transformers/model_cards/ahotrod/roberta_large_squad2/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/ahotrod/roberta_large_squad2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/ahotrod/xlnet_large_squad2_512/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/ahotrod/xlnet_large_squad2_512/README.md -------------------------------------------------------------------------------- /transformers/model_cards/albert-base-v1-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/albert-base-v1-README.md -------------------------------------------------------------------------------- /transformers/model_cards/albert-xxlarge-v2-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/albert-xxlarge-v2-README.md -------------------------------------------------------------------------------- /transformers/model_cards/allegro/herbert-klej-cased-v1/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/allegro/herbert-klej-cased-v1/README.md -------------------------------------------------------------------------------- /transformers/model_cards/allenai/biomed_roberta_base/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/allenai/biomed_roberta_base/README.md -------------------------------------------------------------------------------- /transformers/model_cards/allenai/longformer-base-4096/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/allenai/longformer-base-4096/README.md -------------------------------------------------------------------------------- /transformers/model_cards/allenai/scibert_scivocab_cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/allenai/scibert_scivocab_cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/allenai/scibert_scivocab_uncased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/allenai/scibert_scivocab_uncased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/asafaya/bert-base-arabic/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/asafaya/bert-base-arabic/README.md -------------------------------------------------------------------------------- /transformers/model_cards/asafaya/bert-large-arabic/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/asafaya/bert-large-arabic/README.md -------------------------------------------------------------------------------- /transformers/model_cards/asafaya/bert-medium-arabic/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/asafaya/bert-medium-arabic/README.md -------------------------------------------------------------------------------- /transformers/model_cards/asafaya/bert-mini-arabic/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/asafaya/bert-mini-arabic/README.md -------------------------------------------------------------------------------- /transformers/model_cards/aubmindlab/bert-base-arabert/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/aubmindlab/bert-base-arabert/README.md -------------------------------------------------------------------------------- /transformers/model_cards/aubmindlab/bert-base-arabertv01/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/aubmindlab/bert-base-arabertv01/README.md -------------------------------------------------------------------------------- /transformers/model_cards/bart-large-cnn/README.md: -------------------------------------------------------------------------------- 1 | --- 2 | tags: 3 | - summarization 4 | --- 5 | 6 | -------------------------------------------------------------------------------- /transformers/model_cards/bart-large-xsum/README.md: -------------------------------------------------------------------------------- 1 | --- 2 | tags: 3 | - summarization 4 | --- 5 | 6 | -------------------------------------------------------------------------------- /transformers/model_cards/bayartsogt/albert-mongolian/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/bayartsogt/albert-mongolian/README.md -------------------------------------------------------------------------------- /transformers/model_cards/bert-base-cased-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/bert-base-cased-README.md -------------------------------------------------------------------------------- /transformers/model_cards/bert-base-chinese-README.md: -------------------------------------------------------------------------------- 1 | --- 2 | language: chinese 3 | --- 4 | -------------------------------------------------------------------------------- /transformers/model_cards/bert-base-german-cased-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/bert-base-german-cased-README.md -------------------------------------------------------------------------------- /transformers/model_cards/bert-base-german-dbmdz-cased-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/bert-base-german-dbmdz-cased-README.md -------------------------------------------------------------------------------- /transformers/model_cards/bert-base-german-dbmdz-uncased-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/bert-base-german-dbmdz-uncased-README.md -------------------------------------------------------------------------------- /transformers/model_cards/bert-base-multilingual-cased-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/bert-base-multilingual-cased-README.md -------------------------------------------------------------------------------- /transformers/model_cards/bert-base-multilingual-uncased-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/bert-base-multilingual-uncased-README.md -------------------------------------------------------------------------------- /transformers/model_cards/bert-base-uncased-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/bert-base-uncased-README.md -------------------------------------------------------------------------------- /transformers/model_cards/bert-large-cased-README.md: -------------------------------------------------------------------------------- 1 | --- 2 | license: apache-2.0 3 | --- 4 | -------------------------------------------------------------------------------- /transformers/model_cards/binwang/xlnet-base-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/binwang/xlnet-base-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/camembert-base-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/camembert-base-README.md -------------------------------------------------------------------------------- /transformers/model_cards/camembert/camembert-base-ccnet-4gb/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/camembert/camembert-base-ccnet-4gb/README.md -------------------------------------------------------------------------------- /transformers/model_cards/camembert/camembert-base-ccnet/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/camembert/camembert-base-ccnet/README.md -------------------------------------------------------------------------------- /transformers/model_cards/camembert/camembert-base-oscar-4gb/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/camembert/camembert-base-oscar-4gb/README.md -------------------------------------------------------------------------------- /transformers/model_cards/camembert/camembert-large/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/camembert/camembert-large/README.md -------------------------------------------------------------------------------- /transformers/model_cards/canwenxu/BERT-of-Theseus-MNLI/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/canwenxu/BERT-of-Theseus-MNLI/README.md -------------------------------------------------------------------------------- /transformers/model_cards/chrisliu298/arxiv_ai_gpt2/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/chrisliu298/arxiv_ai_gpt2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/clue/albert_chinese_small/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/clue/albert_chinese_small/README.md -------------------------------------------------------------------------------- /transformers/model_cards/clue/albert_chinese_tiny/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/clue/albert_chinese_tiny/README.md -------------------------------------------------------------------------------- /transformers/model_cards/clue/roberta_chinese_3L312_clue_tiny/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/clue/roberta_chinese_3L312_clue_tiny/README.md -------------------------------------------------------------------------------- /transformers/model_cards/clue/roberta_chinese_base/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/clue/roberta_chinese_base/README.md -------------------------------------------------------------------------------- /transformers/model_cards/clue/roberta_chinese_large/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/clue/roberta_chinese_large/README.md -------------------------------------------------------------------------------- /transformers/model_cards/clue/xlnet_chinese_large/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/clue/xlnet_chinese_large/README.md -------------------------------------------------------------------------------- /transformers/model_cards/codegram/calbert-base-uncased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/codegram/calbert-base-uncased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/daigo/bert-base-japanese-sentiment/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/daigo/bert-base-japanese-sentiment/README.md -------------------------------------------------------------------------------- /transformers/model_cards/dbmdz/bert-base-german-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/dbmdz/bert-base-german-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/dbmdz/bert-base-german-uncased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/dbmdz/bert-base-german-uncased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/dbmdz/bert-base-italian-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/dbmdz/bert-base-italian-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/dbmdz/bert-base-italian-uncased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/dbmdz/bert-base-italian-uncased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/dbmdz/bert-base-italian-xxl-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/dbmdz/bert-base-italian-xxl-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/dbmdz/bert-base-italian-xxl-uncased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/dbmdz/bert-base-italian-xxl-uncased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/dbmdz/bert-base-turkish-128k-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/dbmdz/bert-base-turkish-128k-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/dbmdz/bert-base-turkish-128k-uncased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/dbmdz/bert-base-turkish-128k-uncased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/dbmdz/bert-base-turkish-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/dbmdz/bert-base-turkish-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/dbmdz/bert-base-turkish-uncased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/dbmdz/bert-base-turkish-uncased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/dbmdz/distilbert-base-turkish-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/dbmdz/distilbert-base-turkish-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/deepset/quora_dedup_bert_base/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/deepset/quora_dedup_bert_base/README.md -------------------------------------------------------------------------------- /transformers/model_cards/deepset/roberta-base-squad2-covid/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/deepset/roberta-base-squad2-covid/README.md -------------------------------------------------------------------------------- /transformers/model_cards/deepset/roberta-base-squad2/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/deepset/roberta-base-squad2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/deepset/sentence_bert/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/deepset/sentence_bert/README.md -------------------------------------------------------------------------------- /transformers/model_cards/distilbert-base-multilingual-cased-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/distilbert-base-multilingual-cased-README.md -------------------------------------------------------------------------------- /transformers/model_cards/distilbert-base-uncased-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/distilbert-base-uncased-README.md -------------------------------------------------------------------------------- /transformers/model_cards/distilgpt2-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/distilgpt2-README.md -------------------------------------------------------------------------------- /transformers/model_cards/distilroberta-base-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/distilroberta-base-README.md -------------------------------------------------------------------------------- /transformers/model_cards/dkleczek/bert-base-polish-cased-v1/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/dkleczek/bert-base-polish-cased-v1/README.md -------------------------------------------------------------------------------- /transformers/model_cards/dkleczek/bert-base-polish-uncased-v1/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/dkleczek/bert-base-polish-uncased-v1/README.md -------------------------------------------------------------------------------- /transformers/model_cards/elgeish/cs224n-squad2.0-roberta-base/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/elgeish/cs224n-squad2.0-roberta-base/README.md -------------------------------------------------------------------------------- /transformers/model_cards/emilyalsentzer/Bio_ClinicalBERT/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/emilyalsentzer/Bio_ClinicalBERT/README.md -------------------------------------------------------------------------------- /transformers/model_cards/facebook/bart-large-cnn/README.md: -------------------------------------------------------------------------------- 1 | --- 2 | tags: 3 | - summarization 4 | 5 | license: mit 6 | --- 7 | -------------------------------------------------------------------------------- /transformers/model_cards/facebook/bart-large/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/facebook/bart-large/README.md -------------------------------------------------------------------------------- /transformers/model_cards/fmikaelian/camembert-base-fquad/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/fmikaelian/camembert-base-fquad/README.md -------------------------------------------------------------------------------- /transformers/model_cards/fmikaelian/camembert-base-squad/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/fmikaelian/camembert-base-squad/README.md -------------------------------------------------------------------------------- /transformers/model_cards/gaochangkuan/model_dir/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/gaochangkuan/model_dir/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-10_H-128_A-2/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-10_H-256_A-4/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-10_H-512_A-8/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-10_H-768_A-12/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-12_H-128_A-2/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-12_H-256_A-4/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-12_H-512_A-8/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-12_H-768_A-12/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-2_H-128_A-2/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-2_H-256_A-4/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-2_H-512_A-8/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-2_H-768_A-12/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-4_H-128_A-2/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-4_H-256_A-4/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-4_H-512_A-8/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-4_H-768_A-12/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-6_H-128_A-2/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-6_H-256_A-4/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-6_H-512_A-8/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-6_H-768_A-12/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-8_H-128_A-2/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-8_H-256_A-4/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-8_H-512_A-8/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/bert_uncased_L-8_H-768_A-12/README.md: -------------------------------------------------------------------------------- 1 | ../../iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/electra-base-discriminator/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/google/electra-base-discriminator/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/electra-base-generator/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/google/electra-base-generator/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/electra-large-discriminator/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/google/electra-large-discriminator/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/electra-large-generator/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/google/electra-large-generator/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/electra-small-discriminator/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/google/electra-small-discriminator/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/electra-small-generator/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/google/electra-small-generator/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/mobilebert-uncased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/google/mobilebert-uncased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/reformer-crime-and-punishment/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/google/reformer-crime-and-punishment/README.md -------------------------------------------------------------------------------- /transformers/model_cards/google/reformer-enwik8/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/google/reformer-enwik8/README.md -------------------------------------------------------------------------------- /transformers/model_cards/gpt2-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/gpt2-README.md -------------------------------------------------------------------------------- /transformers/model_cards/gsarti/biobert-nli/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/gsarti/biobert-nli/README.md -------------------------------------------------------------------------------- /transformers/model_cards/gsarti/covidbert-nli/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/gsarti/covidbert-nli/README.md -------------------------------------------------------------------------------- /transformers/model_cards/gsarti/scibert-nli/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/gsarti/scibert-nli/README.md -------------------------------------------------------------------------------- /transformers/model_cards/healx/gpt-2-pubmed-large/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/healx/gpt-2-pubmed-large/README.md -------------------------------------------------------------------------------- /transformers/model_cards/healx/gpt-2-pubmed-medium/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/healx/gpt-2-pubmed-medium/README.md -------------------------------------------------------------------------------- /transformers/model_cards/huggingface/CodeBERTa-language-id/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/huggingface/CodeBERTa-language-id/README.md -------------------------------------------------------------------------------- /transformers/model_cards/huggingface/CodeBERTa-small-v1/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/huggingface/CodeBERTa-small-v1/README.md -------------------------------------------------------------------------------- /transformers/model_cards/huseinzol05/albert-base-bahasa-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/huseinzol05/albert-base-bahasa-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/huseinzol05/albert-tiny-bahasa-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/huseinzol05/albert-tiny-bahasa-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/huseinzol05/bert-base-bahasa-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/huseinzol05/bert-base-bahasa-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/huseinzol05/gpt2-117M-bahasa-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/huseinzol05/gpt2-117M-bahasa-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/huseinzol05/gpt2-345M-bahasa-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/huseinzol05/gpt2-345M-bahasa-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/huseinzol05/t5-base-bahasa-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/huseinzol05/t5-base-bahasa-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/huseinzol05/t5-small-bahasa-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/huseinzol05/t5-small-bahasa-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/huseinzol05/tiny-bert-bahasa-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/huseinzol05/tiny-bert-bahasa-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/huseinzol05/xlnet-base-bahasa-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/huseinzol05/xlnet-base-bahasa-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/illuin/camembert-base-fquad/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/illuin/camembert-base-fquad/README.md -------------------------------------------------------------------------------- /transformers/model_cards/illuin/camembert-large-fquad/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/illuin/camembert-large-fquad/README.md -------------------------------------------------------------------------------- /transformers/model_cards/iuliaturc/bert_uncased_L-2_H-128_A-2/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/iuliaturc/bert_uncased_L-2_H-128_A-2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/ixa-ehu/berteus-base-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/ixa-ehu/berteus-base-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/jannesg/bertsson/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/jannesg/bertsson/README.md -------------------------------------------------------------------------------- /transformers/model_cards/jplu/tf-camembert-base/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/jplu/tf-camembert-base/README.md -------------------------------------------------------------------------------- /transformers/model_cards/jplu/tf-xlm-r-ner-40-lang/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/jplu/tf-xlm-r-ner-40-lang/README.md -------------------------------------------------------------------------------- /transformers/model_cards/jplu/tf-xlm-roberta-base/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/jplu/tf-xlm-roberta-base/README.md -------------------------------------------------------------------------------- /transformers/model_cards/jplu/tf-xlm-roberta-large/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/jplu/tf-xlm-roberta-large/README.md -------------------------------------------------------------------------------- /transformers/model_cards/julien-c/EsperBERTo-small-pos/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/julien-c/EsperBERTo-small-pos/README.md -------------------------------------------------------------------------------- /transformers/model_cards/julien-c/EsperBERTo-small/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/julien-c/EsperBERTo-small/README.md -------------------------------------------------------------------------------- /transformers/model_cards/julien-c/bert-xsmall-dummy/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/julien-c/bert-xsmall-dummy/README.md -------------------------------------------------------------------------------- /transformers/model_cards/julien-c/dummy-unknown/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/julien-c/dummy-unknown/README.md -------------------------------------------------------------------------------- /transformers/model_cards/lvwerra/bert-imdb/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/lvwerra/bert-imdb/README.md -------------------------------------------------------------------------------- /transformers/model_cards/lvwerra/gpt2-imdb-ctrl/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/lvwerra/gpt2-imdb-ctrl/README.md -------------------------------------------------------------------------------- /transformers/model_cards/lvwerra/gpt2-imdb-pos/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/lvwerra/gpt2-imdb-pos/README.md -------------------------------------------------------------------------------- /transformers/model_cards/lvwerra/gpt2-imdb/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/lvwerra/gpt2-imdb/README.md -------------------------------------------------------------------------------- /transformers/model_cards/lvwerra/gpt2-medium-taboo/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/lvwerra/gpt2-medium-taboo/README.md -------------------------------------------------------------------------------- /transformers/model_cards/lysandre/arxiv-nlp/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/lysandre/arxiv-nlp/README.md -------------------------------------------------------------------------------- /transformers/model_cards/lysandre/arxiv/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/lysandre/arxiv/README.md -------------------------------------------------------------------------------- /transformers/model_cards/microsoft/DialoGPT-large/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/microsoft/DialoGPT-large/README.md -------------------------------------------------------------------------------- /transformers/model_cards/microsoft/DialoGPT-medium/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/microsoft/DialoGPT-medium/README.md -------------------------------------------------------------------------------- /transformers/model_cards/microsoft/DialoGPT-small/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/microsoft/DialoGPT-small/README.md -------------------------------------------------------------------------------- /transformers/model_cards/microsoft/MiniLM-L12-H384-uncased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/microsoft/MiniLM-L12-H384-uncased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/monologg/koelectra-base-generator/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/monologg/koelectra-base-generator/README.md -------------------------------------------------------------------------------- /transformers/model_cards/monologg/koelectra-small-generator/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/monologg/koelectra-small-generator/README.md -------------------------------------------------------------------------------- /transformers/model_cards/monsoon-nlp/hindi-bert/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/monsoon-nlp/hindi-bert/README.md -------------------------------------------------------------------------------- /transformers/model_cards/mrm8488/CodeBERTaPy/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/mrm8488/CodeBERTaPy/README.md -------------------------------------------------------------------------------- /transformers/model_cards/mrm8488/GPT-2-finetuned-CORD19/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/mrm8488/GPT-2-finetuned-CORD19/README.md -------------------------------------------------------------------------------- /transformers/model_cards/mrm8488/RuPERTa-base-finetuned-ner/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/mrm8488/RuPERTa-base-finetuned-ner/README.md -------------------------------------------------------------------------------- /transformers/model_cards/mrm8488/RuPERTa-base-finetuned-pos/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/mrm8488/RuPERTa-base-finetuned-pos/README.md -------------------------------------------------------------------------------- /transformers/model_cards/mrm8488/bert-mini-finetuned-squadv2/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/mrm8488/bert-mini-finetuned-squadv2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/mrm8488/bert-small-finetuned-squadv2/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/mrm8488/bert-small-finetuned-squadv2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/mrm8488/bert-tiny-finetuned-squadv2/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/mrm8488/bert-tiny-finetuned-squadv2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/mrm8488/bert-uncased-finetuned-qnli/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/mrm8488/bert-uncased-finetuned-qnli/README.md -------------------------------------------------------------------------------- /transformers/model_cards/mrm8488/chEMBL_smiles_v1/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/mrm8488/chEMBL_smiles_v1/README.md -------------------------------------------------------------------------------- /transformers/model_cards/mrm8488/codeBERTaJS/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/mrm8488/codeBERTaJS/README.md -------------------------------------------------------------------------------- /transformers/model_cards/mrm8488/gpt2-imdb-neg/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/mrm8488/gpt2-imdb-neg/README.md -------------------------------------------------------------------------------- /transformers/model_cards/mrm8488/gpt2-imdb-neutral/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/mrm8488/gpt2-imdb-neutral/README.md -------------------------------------------------------------------------------- /transformers/model_cards/mrm8488/roberta-large-finetuned-wsc/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/mrm8488/roberta-large-finetuned-wsc/README.md -------------------------------------------------------------------------------- /transformers/model_cards/mrm8488/spanbert-finetuned-squadv1/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/mrm8488/spanbert-finetuned-squadv1/README.md -------------------------------------------------------------------------------- /transformers/model_cards/mrm8488/spanbert-finetuned-squadv2/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/mrm8488/spanbert-finetuned-squadv2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/mrm8488/t5-base-finetuned-emotion/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/mrm8488/t5-base-finetuned-emotion/README.md -------------------------------------------------------------------------------- /transformers/model_cards/mrm8488/t5-base-finetuned-squadv2/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/mrm8488/t5-base-finetuned-squadv2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/mrm8488/xlm-multi-finetuned-xquadv1/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/mrm8488/xlm-multi-finetuned-xquadv1/README.md -------------------------------------------------------------------------------- /transformers/model_cards/nlpaueb/bert-base-greek-uncased-v1/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/nlpaueb/bert-base-greek-uncased-v1/README.md -------------------------------------------------------------------------------- /transformers/model_cards/nyu-mll/roberta-base-100M-1/README.md: -------------------------------------------------------------------------------- 1 | ../roberta_1M_to_1B/README.md -------------------------------------------------------------------------------- /transformers/model_cards/nyu-mll/roberta-base-100M-2/README.md: -------------------------------------------------------------------------------- 1 | ../roberta_1M_to_1B/README.md -------------------------------------------------------------------------------- /transformers/model_cards/nyu-mll/roberta-base-100M-3/README.md: -------------------------------------------------------------------------------- 1 | ../roberta_1M_to_1B/README.md -------------------------------------------------------------------------------- /transformers/model_cards/nyu-mll/roberta-base-10M-1/README.md: -------------------------------------------------------------------------------- 1 | ../roberta_1M_to_1B/README.md -------------------------------------------------------------------------------- /transformers/model_cards/nyu-mll/roberta-base-10M-2/README.md: -------------------------------------------------------------------------------- 1 | ../roberta_1M_to_1B/README.md -------------------------------------------------------------------------------- /transformers/model_cards/nyu-mll/roberta-base-10M-3/README.md: -------------------------------------------------------------------------------- 1 | ../roberta_1M_to_1B/README.md -------------------------------------------------------------------------------- /transformers/model_cards/nyu-mll/roberta-base-1B-1/README.md: -------------------------------------------------------------------------------- 1 | ../roberta_1M_to_1B/README.md -------------------------------------------------------------------------------- /transformers/model_cards/nyu-mll/roberta-base-1B-2/README.md: -------------------------------------------------------------------------------- 1 | ../roberta_1M_to_1B/README.md -------------------------------------------------------------------------------- /transformers/model_cards/nyu-mll/roberta-base-1B-3/README.md: -------------------------------------------------------------------------------- 1 | ../roberta_1M_to_1B/README.md -------------------------------------------------------------------------------- /transformers/model_cards/nyu-mll/roberta-med-small-1M-1/README.md: -------------------------------------------------------------------------------- 1 | ../roberta_1M_to_1B/README.md -------------------------------------------------------------------------------- /transformers/model_cards/nyu-mll/roberta-med-small-1M-2/README.md: -------------------------------------------------------------------------------- 1 | ../roberta_1M_to_1B/README.md -------------------------------------------------------------------------------- /transformers/model_cards/nyu-mll/roberta-med-small-1M-3/README.md: -------------------------------------------------------------------------------- 1 | ../roberta_1M_to_1B/README.md -------------------------------------------------------------------------------- /transformers/model_cards/nyu-mll/roberta_1M_to_1B/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/nyu-mll/roberta_1M_to_1B/README.md -------------------------------------------------------------------------------- /transformers/model_cards/oliverguhr/german-sentiment-bert/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/oliverguhr/german-sentiment-bert/README.md -------------------------------------------------------------------------------- /transformers/model_cards/pradhyra/AWSBlogBert/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/pradhyra/AWSBlogBert/README.md -------------------------------------------------------------------------------- /transformers/model_cards/roberta-base-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/roberta-base-README.md -------------------------------------------------------------------------------- /transformers/model_cards/roberta-large-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/roberta-large-README.md -------------------------------------------------------------------------------- /transformers/model_cards/roberta-large-mnli-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/roberta-large-mnli-README.md -------------------------------------------------------------------------------- /transformers/model_cards/savasy/bert-base-turkish-ner-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/savasy/bert-base-turkish-ner-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/savasy/bert-base-turkish-squad/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/savasy/bert-base-turkish-squad/README.md -------------------------------------------------------------------------------- /transformers/model_cards/schmidek/electra-small-cased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/schmidek/electra-small-cased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/seiya/oubiobert-base-uncased/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/seiya/oubiobert-base-uncased/README.md -------------------------------------------------------------------------------- /transformers/model_cards/severinsimmler/literary-german-bert/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/severinsimmler/literary-german-bert/README.md -------------------------------------------------------------------------------- /transformers/model_cards/severinsimmler/literary-german-bert/kfold.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/severinsimmler/literary-german-bert/kfold.png -------------------------------------------------------------------------------- /transformers/model_cards/seyonec/ChemBERTa-zinc-base-v1/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/seyonec/ChemBERTa-zinc-base-v1/README.md -------------------------------------------------------------------------------- /transformers/model_cards/shoarora/alectra-small-owt/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/shoarora/alectra-small-owt/README.md -------------------------------------------------------------------------------- /transformers/model_cards/shoarora/electra-small-owt/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/shoarora/electra-small-owt/README.md -------------------------------------------------------------------------------- /transformers/model_cards/spentaur/yelp/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/spentaur/yelp/README.md -------------------------------------------------------------------------------- /transformers/model_cards/surajp/SanBERTa/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/surajp/SanBERTa/README.md -------------------------------------------------------------------------------- /transformers/model_cards/surajp/albert-base-sanskrit/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/surajp/albert-base-sanskrit/README.md -------------------------------------------------------------------------------- /transformers/model_cards/t5-11b-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/t5-11b-README.md -------------------------------------------------------------------------------- /transformers/model_cards/t5-3b-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/t5-3b-README.md -------------------------------------------------------------------------------- /transformers/model_cards/t5-base-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/t5-base-README.md -------------------------------------------------------------------------------- /transformers/model_cards/t5-large-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/t5-large-README.md -------------------------------------------------------------------------------- /transformers/model_cards/t5-small-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/t5-small-README.md -------------------------------------------------------------------------------- /transformers/model_cards/tblard/tf-allocine/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/tblard/tf-allocine/README.md -------------------------------------------------------------------------------- /transformers/model_cards/twmkn9/albert-base-v2-squad2/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/twmkn9/albert-base-v2-squad2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/twmkn9/bert-base-uncased-squad2/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/twmkn9/bert-base-uncased-squad2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/twmkn9/distilroberta-base-squad2/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/twmkn9/distilroberta-base-squad2/README.md -------------------------------------------------------------------------------- /transformers/model_cards/valhalla/t5-base-squad/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/valhalla/t5-base-squad/README.md -------------------------------------------------------------------------------- /transformers/model_cards/voidful/albert_chinese_base/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/voidful/albert_chinese_base/README.md -------------------------------------------------------------------------------- /transformers/model_cards/voidful/albert_chinese_large/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/voidful/albert_chinese_large/README.md -------------------------------------------------------------------------------- /transformers/model_cards/voidful/albert_chinese_small/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/voidful/albert_chinese_small/README.md -------------------------------------------------------------------------------- /transformers/model_cards/voidful/albert_chinese_tiny/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/voidful/albert_chinese_tiny/README.md -------------------------------------------------------------------------------- /transformers/model_cards/voidful/albert_chinese_xlarge/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/voidful/albert_chinese_xlarge/README.md -------------------------------------------------------------------------------- /transformers/model_cards/voidful/albert_chinese_xxlarge/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/voidful/albert_chinese_xxlarge/README.md -------------------------------------------------------------------------------- /transformers/model_cards/wptoux/albert-chinese-large-qa/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/wptoux/albert-chinese-large-qa/README.md -------------------------------------------------------------------------------- /transformers/model_cards/xlm-mlm-en-2048-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/xlm-mlm-en-2048-README.md -------------------------------------------------------------------------------- /transformers/model_cards/xlm-roberta-base-README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/model_cards/xlm-roberta-base-README.md -------------------------------------------------------------------------------- /transformers/notebooks/01-training-tokenizers.ipynb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/notebooks/01-training-tokenizers.ipynb -------------------------------------------------------------------------------- /transformers/notebooks/02-transformers.ipynb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/notebooks/02-transformers.ipynb -------------------------------------------------------------------------------- /transformers/notebooks/03-pipelines.ipynb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/notebooks/03-pipelines.ipynb -------------------------------------------------------------------------------- /transformers/notebooks/04-onnx-export.ipynb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/notebooks/04-onnx-export.ipynb -------------------------------------------------------------------------------- /transformers/notebooks/05-benchmark.ipynb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/notebooks/05-benchmark.ipynb -------------------------------------------------------------------------------- /transformers/notebooks/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/notebooks/README.md -------------------------------------------------------------------------------- /transformers/randomize_snippets.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/randomize_snippets.py -------------------------------------------------------------------------------- /transformers/remove_labels.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/remove_labels.py -------------------------------------------------------------------------------- /transformers/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/requirements.txt -------------------------------------------------------------------------------- /transformers/run_all_single_models.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/run_all_single_models.sh -------------------------------------------------------------------------------- /transformers/run_commands.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/run_commands.txt -------------------------------------------------------------------------------- /transformers/setup.cfg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/setup.cfg -------------------------------------------------------------------------------- /transformers/setup.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/setup.py -------------------------------------------------------------------------------- /transformers/src/transformers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/__init__.py -------------------------------------------------------------------------------- /transformers/src/transformers/activations.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/activations.py -------------------------------------------------------------------------------- /transformers/src/transformers/additions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/additions/__init__.py -------------------------------------------------------------------------------- /transformers/src/transformers/additions/additional_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/additions/additional_utils.py -------------------------------------------------------------------------------- /transformers/src/transformers/benchmark/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /transformers/src/transformers/benchmark/benchmark.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/benchmark/benchmark.py -------------------------------------------------------------------------------- /transformers/src/transformers/benchmark/benchmark_args.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/benchmark/benchmark_args.py -------------------------------------------------------------------------------- /transformers/src/transformers/benchmark/benchmark_args_tf.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/benchmark/benchmark_args_tf.py -------------------------------------------------------------------------------- /transformers/src/transformers/benchmark/benchmark_args_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/benchmark/benchmark_args_utils.py -------------------------------------------------------------------------------- /transformers/src/transformers/benchmark/benchmark_tf.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/benchmark/benchmark_tf.py -------------------------------------------------------------------------------- /transformers/src/transformers/benchmark/benchmark_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/benchmark/benchmark_utils.py -------------------------------------------------------------------------------- /transformers/src/transformers/commands/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/commands/__init__.py -------------------------------------------------------------------------------- /transformers/src/transformers/commands/convert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/commands/convert.py -------------------------------------------------------------------------------- /transformers/src/transformers/commands/download.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/commands/download.py -------------------------------------------------------------------------------- /transformers/src/transformers/commands/env.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/commands/env.py -------------------------------------------------------------------------------- /transformers/src/transformers/commands/run.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/commands/run.py -------------------------------------------------------------------------------- /transformers/src/transformers/commands/serving.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/commands/serving.py -------------------------------------------------------------------------------- /transformers/src/transformers/commands/train.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/commands/train.py -------------------------------------------------------------------------------- /transformers/src/transformers/commands/transformers_cli.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/commands/transformers_cli.py -------------------------------------------------------------------------------- /transformers/src/transformers/commands/user.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/commands/user.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_albert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_albert.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_auto.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_auto.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_bart.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_bart.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_bert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_bert.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_camembert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_camembert.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_ctrl.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_ctrl.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_distilbert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_distilbert.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_electra.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_electra.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_encoder_decoder.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_encoder_decoder.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_flaubert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_flaubert.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_gpt2.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_gpt2.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_longformer.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_longformer.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_marian.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_marian.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_mmbt.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_mmbt.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_mobilebert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_mobilebert.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_openai.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_openai.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_reformer.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_reformer.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_retribert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_retribert.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_roberta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_roberta.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_t5.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_t5.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_transfo_xl.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_transfo_xl.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_utils.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_xlm.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_xlm.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_xlm_roberta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_xlm_roberta.py -------------------------------------------------------------------------------- /transformers/src/transformers/configuration_xlnet.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/configuration_xlnet.py -------------------------------------------------------------------------------- /transformers/src/transformers/convert_graph_to_onnx.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/convert_graph_to_onnx.py -------------------------------------------------------------------------------- /transformers/src/transformers/convert_marian_to_pytorch.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/convert_marian_to_pytorch.py -------------------------------------------------------------------------------- /transformers/src/transformers/convert_pytorch_checkpoint_to_tf2.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/convert_pytorch_checkpoint_to_tf2.py -------------------------------------------------------------------------------- /transformers/src/transformers/criterions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/criterions/__init__.py -------------------------------------------------------------------------------- /transformers/src/transformers/criterions/entropic_regularizer.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/criterions/entropic_regularizer.py -------------------------------------------------------------------------------- /transformers/src/transformers/criterions/focal_loss.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/criterions/focal_loss.py -------------------------------------------------------------------------------- /transformers/src/transformers/criterions/input_gradients_norm.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/criterions/input_gradients_norm.py -------------------------------------------------------------------------------- /transformers/src/transformers/criterions/temperature_scaling.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/criterions/temperature_scaling.py -------------------------------------------------------------------------------- /transformers/src/transformers/data/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/data/__init__.py -------------------------------------------------------------------------------- /transformers/src/transformers/data/data_collator.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/data/data_collator.py -------------------------------------------------------------------------------- /transformers/src/transformers/data/datasets/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/data/datasets/__init__.py -------------------------------------------------------------------------------- /transformers/src/transformers/data/datasets/glue.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/data/datasets/glue.py -------------------------------------------------------------------------------- /transformers/src/transformers/data/datasets/language_modeling.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/data/datasets/language_modeling.py -------------------------------------------------------------------------------- /transformers/src/transformers/data/metrics/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/data/metrics/__init__.py -------------------------------------------------------------------------------- /transformers/src/transformers/data/metrics/squad_metrics.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/data/metrics/squad_metrics.py -------------------------------------------------------------------------------- /transformers/src/transformers/data/processors/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/data/processors/__init__.py -------------------------------------------------------------------------------- /transformers/src/transformers/data/processors/glue.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/data/processors/glue.py -------------------------------------------------------------------------------- /transformers/src/transformers/data/processors/squad.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/data/processors/squad.py -------------------------------------------------------------------------------- /transformers/src/transformers/data/processors/utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/data/processors/utils.py -------------------------------------------------------------------------------- /transformers/src/transformers/data/processors/xfact.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/data/processors/xfact.py -------------------------------------------------------------------------------- /transformers/src/transformers/data/processors/xfact_evidence.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/data/processors/xfact_evidence.py -------------------------------------------------------------------------------- /transformers/src/transformers/data/processors/xnli.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/data/processors/xnli.py -------------------------------------------------------------------------------- /transformers/src/transformers/file_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/file_utils.py -------------------------------------------------------------------------------- /transformers/src/transformers/generation_tf_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/generation_tf_utils.py -------------------------------------------------------------------------------- /transformers/src/transformers/generation_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/generation_utils.py -------------------------------------------------------------------------------- /transformers/src/transformers/hf_api.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/hf_api.py -------------------------------------------------------------------------------- /transformers/src/transformers/hf_argparser.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/hf_argparser.py -------------------------------------------------------------------------------- /transformers/src/transformers/modelcard.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modelcard.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_albert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_albert.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_auto.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_auto.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_bart.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_bart.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_bert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_bert.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_camembert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_camembert.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_ctrl.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_ctrl.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_distilbert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_distilbert.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_electra.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_electra.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_encoder_decoder.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_encoder_decoder.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_flaubert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_flaubert.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_gpt2.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_gpt2.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_longformer.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_longformer.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_marian.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_marian.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_mmbt.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_mmbt.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_mobilebert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_mobilebert.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_openai.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_openai.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_reformer.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_reformer.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_retribert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_retribert.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_roberta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_roberta.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_t5.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_t5.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_tf_albert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_tf_albert.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_tf_auto.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_tf_auto.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_tf_bert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_tf_bert.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_tf_camembert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_tf_camembert.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_tf_ctrl.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_tf_ctrl.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_tf_distilbert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_tf_distilbert.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_tf_electra.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_tf_electra.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_tf_flaubert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_tf_flaubert.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_tf_gpt2.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_tf_gpt2.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_tf_mobilebert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_tf_mobilebert.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_tf_openai.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_tf_openai.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_tf_pytorch_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_tf_pytorch_utils.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_tf_roberta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_tf_roberta.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_tf_t5.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_tf_t5.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_tf_transfo_xl.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_tf_transfo_xl.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_tf_transfo_xl_utilities.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_tf_transfo_xl_utilities.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_tf_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_tf_utils.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_tf_xlm.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_tf_xlm.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_tf_xlm_roberta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_tf_xlm_roberta.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_tf_xlnet.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_tf_xlnet.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_transfo_xl.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_transfo_xl.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_transfo_xl_utilities.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_transfo_xl_utilities.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_utils.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_xlm.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_xlm.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_xlm_roberta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_xlm_roberta.py -------------------------------------------------------------------------------- /transformers/src/transformers/modeling_xlnet.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/modeling_xlnet.py -------------------------------------------------------------------------------- /transformers/src/transformers/multitask_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/multitask_utils.py -------------------------------------------------------------------------------- /transformers/src/transformers/optimization.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/optimization.py -------------------------------------------------------------------------------- /transformers/src/transformers/optimization_tf.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/optimization_tf.py -------------------------------------------------------------------------------- /transformers/src/transformers/pipelines.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/pipelines.py -------------------------------------------------------------------------------- /transformers/src/transformers/testing_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/testing_utils.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_albert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_albert.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_auto.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_auto.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_bart.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_bart.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_bert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_bert.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_bert_japanese.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_bert_japanese.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_camembert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_camembert.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_ctrl.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_ctrl.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_distilbert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_distilbert.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_electra.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_electra.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_flaubert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_flaubert.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_gpt2.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_gpt2.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_longformer.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_longformer.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_marian.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_marian.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_mobilebert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_mobilebert.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_openai.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_openai.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_reformer.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_reformer.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_retribert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_retribert.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_roberta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_roberta.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_t5.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_t5.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_transfo_xl.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_transfo_xl.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_utils.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_utils_base.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_utils_base.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_utils_fast.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_utils_fast.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_xlm.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_xlm.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_xlm_roberta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_xlm_roberta.py -------------------------------------------------------------------------------- /transformers/src/transformers/tokenization_xlnet.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/tokenization_xlnet.py -------------------------------------------------------------------------------- /transformers/src/transformers/trainer.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/trainer.py -------------------------------------------------------------------------------- /transformers/src/transformers/trainer_tf.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/trainer_tf.py -------------------------------------------------------------------------------- /transformers/src/transformers/trainer_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/trainer_utils.py -------------------------------------------------------------------------------- /transformers/src/transformers/training_args.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/training_args.py -------------------------------------------------------------------------------- /transformers/src/transformers/training_args_tf.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/src/transformers/training_args_tf.py -------------------------------------------------------------------------------- /transformers/templates/adding_a_new_example_script/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/templates/adding_a_new_example_script/README.md -------------------------------------------------------------------------------- /transformers/templates/adding_a_new_example_script/run_xxx.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/templates/adding_a_new_example_script/run_xxx.py -------------------------------------------------------------------------------- /transformers/templates/adding_a_new_example_script/utils_xxx.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/templates/adding_a_new_example_script/utils_xxx.py -------------------------------------------------------------------------------- /transformers/templates/adding_a_new_model/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/templates/adding_a_new_model/README.md -------------------------------------------------------------------------------- /transformers/templates/adding_a_new_model/configuration_xxx.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/templates/adding_a_new_model/configuration_xxx.py -------------------------------------------------------------------------------- /transformers/templates/adding_a_new_model/modeling_tf_xxx.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/templates/adding_a_new_model/modeling_tf_xxx.py -------------------------------------------------------------------------------- /transformers/templates/adding_a_new_model/modeling_xxx.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/templates/adding_a_new_model/modeling_xxx.py -------------------------------------------------------------------------------- /transformers/templates/adding_a_new_model/tests/test_modeling_tf_xxx.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/templates/adding_a_new_model/tests/test_modeling_tf_xxx.py -------------------------------------------------------------------------------- /transformers/templates/adding_a_new_model/tests/test_modeling_xxx.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/templates/adding_a_new_model/tests/test_modeling_xxx.py -------------------------------------------------------------------------------- /transformers/templates/adding_a_new_model/tokenization_xxx.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/templates/adding_a_new_model/tokenization_xxx.py -------------------------------------------------------------------------------- /transformers/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /transformers/tests/test_activations.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_activations.py -------------------------------------------------------------------------------- /transformers/tests/test_benchmark.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_benchmark.py -------------------------------------------------------------------------------- /transformers/tests/test_benchmark_tf.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_benchmark_tf.py -------------------------------------------------------------------------------- /transformers/tests/test_configuration_auto.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_configuration_auto.py -------------------------------------------------------------------------------- /transformers/tests/test_configuration_common.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_configuration_common.py -------------------------------------------------------------------------------- /transformers/tests/test_doc_samples.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_doc_samples.py -------------------------------------------------------------------------------- /transformers/tests/test_hf_api.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_hf_api.py -------------------------------------------------------------------------------- /transformers/tests/test_hf_argparser.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_hf_argparser.py -------------------------------------------------------------------------------- /transformers/tests/test_model_card.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_model_card.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_albert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_albert.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_auto.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_auto.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_bart.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_bart.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_bert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_bert.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_camembert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_camembert.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_common.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_common.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_ctrl.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_ctrl.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_distilbert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_distilbert.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_electra.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_electra.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_encoder_decoder.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_encoder_decoder.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_flaubert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_flaubert.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_gpt2.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_gpt2.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_longformer.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_longformer.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_marian.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_marian.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_mobilebert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_mobilebert.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_openai.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_openai.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_reformer.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_reformer.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_roberta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_roberta.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_t5.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_t5.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_tf_albert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_tf_albert.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_tf_auto.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_tf_auto.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_tf_bert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_tf_bert.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_tf_camembert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_tf_camembert.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_tf_common.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_tf_common.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_tf_ctrl.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_tf_ctrl.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_tf_distilbert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_tf_distilbert.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_tf_electra.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_tf_electra.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_tf_flaubert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_tf_flaubert.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_tf_gpt2.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_tf_gpt2.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_tf_mobilebert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_tf_mobilebert.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_tf_openai_gpt.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_tf_openai_gpt.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_tf_roberta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_tf_roberta.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_tf_t5.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_tf_t5.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_tf_transfo_xl.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_tf_transfo_xl.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_tf_xlm.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_tf_xlm.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_tf_xlm_roberta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_tf_xlm_roberta.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_tf_xlnet.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_tf_xlnet.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_transfo_xl.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_transfo_xl.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_xlm.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_xlm.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_xlm_roberta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_xlm_roberta.py -------------------------------------------------------------------------------- /transformers/tests/test_modeling_xlnet.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_modeling_xlnet.py -------------------------------------------------------------------------------- /transformers/tests/test_onnx.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_onnx.py -------------------------------------------------------------------------------- /transformers/tests/test_optimization.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_optimization.py -------------------------------------------------------------------------------- /transformers/tests/test_optimization_tf.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_optimization_tf.py -------------------------------------------------------------------------------- /transformers/tests/test_pipelines.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_pipelines.py -------------------------------------------------------------------------------- /transformers/tests/test_tokenization_albert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_tokenization_albert.py -------------------------------------------------------------------------------- /transformers/tests/test_tokenization_auto.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_tokenization_auto.py -------------------------------------------------------------------------------- /transformers/tests/test_tokenization_bert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_tokenization_bert.py -------------------------------------------------------------------------------- /transformers/tests/test_tokenization_bert_japanese.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_tokenization_bert_japanese.py -------------------------------------------------------------------------------- /transformers/tests/test_tokenization_common.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_tokenization_common.py -------------------------------------------------------------------------------- /transformers/tests/test_tokenization_ctrl.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_tokenization_ctrl.py -------------------------------------------------------------------------------- /transformers/tests/test_tokenization_distilbert.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_tokenization_distilbert.py -------------------------------------------------------------------------------- /transformers/tests/test_tokenization_fast.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_tokenization_fast.py -------------------------------------------------------------------------------- /transformers/tests/test_tokenization_gpt2.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_tokenization_gpt2.py -------------------------------------------------------------------------------- /transformers/tests/test_tokenization_marian.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_tokenization_marian.py -------------------------------------------------------------------------------- /transformers/tests/test_tokenization_openai.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_tokenization_openai.py -------------------------------------------------------------------------------- /transformers/tests/test_tokenization_roberta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_tokenization_roberta.py -------------------------------------------------------------------------------- /transformers/tests/test_tokenization_t5.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_tokenization_t5.py -------------------------------------------------------------------------------- /transformers/tests/test_tokenization_transfo_xl.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_tokenization_transfo_xl.py -------------------------------------------------------------------------------- /transformers/tests/test_tokenization_utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_tokenization_utils.py -------------------------------------------------------------------------------- /transformers/tests/test_tokenization_xlm.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_tokenization_xlm.py -------------------------------------------------------------------------------- /transformers/tests/test_tokenization_xlm_roberta.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_tokenization_xlm_roberta.py -------------------------------------------------------------------------------- /transformers/tests/test_tokenization_xlnet.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_tokenization_xlnet.py -------------------------------------------------------------------------------- /transformers/tests/test_trainer.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_trainer.py -------------------------------------------------------------------------------- /transformers/tests/test_trainer_distributed.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/tests/test_trainer_distributed.py -------------------------------------------------------------------------------- /transformers/utils/download_glue_data.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/utils/download_glue_data.py -------------------------------------------------------------------------------- /transformers/utils/link_tester.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/utils/link_tester.py -------------------------------------------------------------------------------- /transformers/valohai.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/utahnlp/x-fact/HEAD/transformers/valohai.yaml --------------------------------------------------------------------------------