├── LICENSE
├── README.md
├── src
├── Weights & Biases.pdf
├── bart_config_10_7.json
├── baseline
│ ├── .ipynb_checkpoints
│ │ └── test_generations-checkpoint.txt
│ └── test_generations.txt
├── callbacks.py
├── composit
│ ├── .ipynb_checkpoints
│ │ ├── test_generations-checkpoint.txt
│ │ └── test_results-checkpoint.txt
│ ├── hparams.pkl
│ ├── metrics.json
│ ├── test_generations.txt
│ └── test_results.txt
├── data
│ ├── extract_actions.ipynb
│ └── pre_process.ipynb
├── eval.ipynb
├── lightning_base.py
├── save_len_file.py
├── train.py
├── train_base.sh
├── train_multi_graph.sh
└── utils.py
└── transformers
├── .coveragerc
├── MANIFEST.in
├── Makefile
├── README.md
├── docker
├── transformers-cpu
│ └── Dockerfile
├── transformers-gpu
│ └── Dockerfile
├── transformers-pytorch-cpu
│ └── Dockerfile
├── transformers-pytorch-gpu
│ └── Dockerfile
├── transformers-pytorch-tpu
│ ├── Dockerfile
│ ├── bert-base-cased.jsonnet
│ ├── dataset.yaml
│ └── docker-entrypoint.sh
├── transformers-tensorflow-cpu
│ └── Dockerfile
└── transformers-tensorflow-gpu
│ └── Dockerfile
├── docs
├── Makefile
├── README.md
└── source
│ ├── _static
│ ├── css
│ │ ├── Calibre-Light.ttf
│ │ ├── Calibre-Medium.otf
│ │ ├── Calibre-Regular.otf
│ │ ├── Calibre-Thin.otf
│ │ ├── code-snippets.css
│ │ └── huggingface.css
│ └── js
│ │ ├── custom.js
│ │ └── huggingface_logo.svg
│ ├── benchmarks.rst
│ ├── bertology.rst
│ ├── conf.py
│ ├── contributing.md
│ ├── converting_tensorflow_models.rst
│ ├── custom_datasets.rst
│ ├── examples.md
│ ├── favicon.ico
│ ├── glossary.rst
│ ├── imgs
│ ├── local_attention_mask.png
│ ├── ppl_chunked.gif
│ ├── ppl_full.gif
│ ├── ppl_sliding.gif
│ ├── transformers_logo_name.png
│ ├── warmup_constant_schedule.png
│ ├── warmup_cosine_hard_restarts_schedule.png
│ ├── warmup_cosine_schedule.png
│ ├── warmup_cosine_warm_restarts_schedule.png
│ └── warmup_linear_schedule.png
│ ├── index.rst
│ ├── installation.md
│ ├── internal
│ ├── modeling_utils.rst
│ ├── pipelines_utils.rst
│ └── tokenization_utils.rst
│ ├── main_classes
│ ├── configuration.rst
│ ├── logging.rst
│ ├── model.rst
│ ├── optimizer_schedules.rst
│ ├── output.rst
│ ├── pipelines.rst
│ ├── processors.rst
│ ├── tokenizer.rst
│ └── trainer.rst
│ ├── migration.md
│ ├── model_doc
│ ├── albert.rst
│ ├── auto.rst
│ ├── bart.rst
│ ├── bert.rst
│ ├── bertgeneration.rst
│ ├── camembert.rst
│ ├── ctrl.rst
│ ├── dialogpt.rst
│ ├── distilbert.rst
│ ├── dpr.rst
│ ├── electra.rst
│ ├── encoderdecoder.rst
│ ├── flaubert.rst
│ ├── fsmt.rst
│ ├── funnel.rst
│ ├── gpt.rst
│ ├── gpt2.rst
│ ├── layoutlm.rst
│ ├── longformer.rst
│ ├── lxmert.rst
│ ├── marian.rst
│ ├── mbart.rst
│ ├── mobilebert.rst
│ ├── pegasus.rst
│ ├── reformer.rst
│ ├── retribert.rst
│ ├── roberta.rst
│ ├── t5.rst
│ ├── transformerxl.rst
│ ├── xlm.rst
│ ├── xlmroberta.rst
│ └── xlnet.rst
│ ├── model_sharing.rst
│ ├── model_summary.rst
│ ├── multilingual.rst
│ ├── notebooks.md
│ ├── perplexity.rst
│ ├── philosophy.rst
│ ├── preprocessing.rst
│ ├── pretrained_models.rst
│ ├── quicktour.rst
│ ├── serialization.rst
│ ├── task_summary.rst
│ ├── testing.rst
│ ├── tokenizer_summary.rst
│ └── training.rst
├── examples
├── README.md
├── adversarial
│ ├── README.md
│ ├── run_hans.py
│ └── utils_hans.py
├── benchmarking
│ ├── README.md
│ ├── plot_csv_file.py
│ ├── run_benchmark.py
│ └── run_benchmark_tf.py
├── bert-loses-patience
│ ├── README.md
│ ├── pabee
│ │ ├── __init__.py
│ │ ├── modeling_pabee_albert.py
│ │ └── modeling_pabee_bert.py
│ ├── run_glue_with_pabee.py
│ └── test_run_glue_with_pabee.py
├── bertology
│ └── run_bertology.py
├── conftest.py
├── contrib
│ ├── README.md
│ ├── mm-imdb
│ │ ├── README.md
│ │ ├── run_mmimdb.py
│ │ └── utils_mmimdb.py
│ ├── run_camembert.py
│ ├── run_openai_gpt.py
│ ├── run_swag.py
│ └── run_transfo_xl.py
├── deebert
│ ├── README.md
│ ├── entropy_eval.sh
│ ├── eval_deebert.sh
│ ├── run_glue_deebert.py
│ ├── src
│ │ ├── __init__.py
│ │ ├── modeling_highway_bert.py
│ │ └── modeling_highway_roberta.py
│ ├── test_glue_deebert.py
│ └── train_deebert.sh
├── distillation
│ ├── README.md
│ ├── distiller.py
│ ├── grouped_batch_sampler.py
│ ├── lm_seqs_dataset.py
│ ├── requirements.txt
│ ├── run_squad_w_distillation.py
│ ├── scripts
│ │ ├── binarized_data.py
│ │ ├── extract.py
│ │ ├── extract_distilbert.py
│ │ └── token_counts.py
│ ├── train.py
│ ├── training_configs
│ │ ├── distilbert-base-cased.json
│ │ ├── distilbert-base-multilingual-cased.json
│ │ ├── distilbert-base-uncased.json
│ │ ├── distilgpt2.json
│ │ └── distilroberta-base.json
│ └── utils.py
├── language-modeling
│ ├── README.md
│ └── run_language_modeling.py
├── lightning_base.py
├── longform-qa
│ ├── README.md
│ ├── eli5_app.py
│ └── eli5_utils.py
├── lxmert
│ ├── README.md
│ ├── demo.ipynb
│ ├── extracting_data.py
│ ├── modeling_frcnn.py
│ ├── processing_image.py
│ ├── requirements.txt
│ ├── utils.py
│ └── visualizing_image.py
├── movement-pruning
│ ├── README.md
│ ├── Saving_PruneBERT.ipynb
│ ├── bertarize.py
│ ├── counts_parameters.py
│ ├── emmental
│ │ ├── __init__.py
│ │ ├── configuration_bert_masked.py
│ │ ├── modeling_bert_masked.py
│ │ └── modules
│ │ │ ├── __init__.py
│ │ │ ├── binarizer.py
│ │ │ └── masked_nn.py
│ ├── masked_run_glue.py
│ ├── masked_run_squad.py
│ └── requirements.txt
├── multiple-choice
│ ├── README.md
│ ├── run_multiple_choice.py
│ ├── run_tf_multiple_choice.py
│ └── utils_multiple_choice.py
├── question-answering
│ ├── README.md
│ ├── run_squad.py
│ ├── run_squad_trainer.py
│ └── run_tf_squad.py
├── rag
│ ├── README.md
│ ├── __init__.py
│ ├── callbacks.py
│ ├── distributed_retriever.py
│ ├── eval_rag.py
│ ├── finetune.py
│ ├── finetune.sh
│ ├── parse_dpr_relevance_data.py
│ ├── requirements.txt
│ ├── test_distributed_retriever.py
│ └── utils.py
├── requirements.txt
├── seq2seq
│ ├── README.md
│ ├── __init__.py
│ ├── bertabs
│ │ ├── README.md
│ │ ├── __init__.py
│ │ ├── configuration_bertabs.py
│ │ ├── convert_bertabs_original_pytorch_checkpoint.py
│ │ ├── modeling_bertabs.py
│ │ ├── requirements.txt
│ │ ├── run_summarization.py
│ │ ├── test_utils_summarization.py
│ │ └── utils_summarization.py
│ ├── builtin_trainer
│ │ ├── finetune.sh
│ │ ├── finetune_tpu.sh
│ │ ├── train_distil_marian_enro.sh
│ │ ├── train_distil_marian_enro_tpu.sh
│ │ ├── train_distilbart_cnn.sh
│ │ └── train_mbart_cc25_enro.sh
│ ├── callbacks.py
│ ├── convert_model_to_fp16.py
│ ├── convert_pl_checkpoint_to_hf.py
│ ├── distil_marian_enro_teacher.sh
│ ├── distil_marian_no_teacher.sh
│ ├── distillation.py
│ ├── download_wmt.py
│ ├── dynamic_bs_example.sh
│ ├── finetune.py
│ ├── finetune.sh
│ ├── finetune_bart_tiny.sh
│ ├── finetune_pegasus_xsum.sh
│ ├── finetune_t5.sh
│ ├── finetune_trainer.py
│ ├── initialization_utils.py
│ ├── minify_dataset.py
│ ├── pack_dataset.py
│ ├── romanian_postprocessing.md
│ ├── run_distiller.sh
│ ├── run_distributed_eval.py
│ ├── run_eval.py
│ ├── run_eval_search.py
│ ├── save_len_file.py
│ ├── seq2seq_trainer.py
│ ├── test_bash_script.py
│ ├── test_data
│ │ ├── fsmt
│ │ │ ├── build-eval-data.py
│ │ │ └── fsmt_val_data.json
│ │ └── wmt_en_ro
│ │ │ ├── test.source
│ │ │ ├── test.target
│ │ │ ├── train.len
│ │ │ ├── train.source
│ │ │ ├── train.target
│ │ │ ├── val.len
│ │ │ ├── val.source
│ │ │ └── val.target
│ ├── test_datasets.py
│ ├── test_finetune_trainer.py
│ ├── test_fsmt_bleu_score.py
│ ├── test_seq2seq_examples.py
│ ├── train_distilbart_cnn.sh
│ ├── train_distilbart_xsum.sh
│ ├── train_mbart_cc25_enro.sh
│ ├── utils.py
│ └── xla_spawn.py
├── test_examples.py
├── test_xla_examples.py
├── text-classification
│ ├── README.md
│ ├── run_glue.py
│ ├── run_pl.sh
│ ├── run_pl_glue.py
│ ├── run_tf_glue.py
│ ├── run_tf_text_classification.py
│ └── run_xnli.py
├── text-generation
│ ├── README.md
│ ├── pplm
│ │ ├── README.md
│ │ ├── imgs
│ │ │ ├── headfigure.png
│ │ │ └── wooly.png
│ │ ├── pplm_classification_head.py
│ │ ├── run_pplm.py
│ │ └── run_pplm_discrim_train.py
│ └── run_generation.py
├── token-classification
│ ├── README.md
│ ├── run.sh
│ ├── run_chunk.sh
│ ├── run_ner.py
│ ├── run_pl.sh
│ ├── run_pl_ner.py
│ ├── run_pos.sh
│ ├── run_pos_pl.sh
│ ├── run_tf_ner.py
│ ├── scripts
│ │ └── preprocess.py
│ ├── tasks.py
│ ├── test_ner_examples.py
│ └── utils_ner.py
└── xla_spawn.py
├── hubconf.py
├── model_cards
├── DeepPavlov
│ ├── bert-base-bg-cs-pl-ru-cased
│ │ └── README.md
│ ├── bert-base-cased-conversational
│ │ └── README.md
│ ├── bert-base-multilingual-cased-sentence
│ │ └── README.md
│ ├── rubert-base-cased-conversational
│ │ └── README.md
│ ├── rubert-base-cased-sentence
│ │ └── README.md
│ └── rubert-base-cased
│ │ └── README.md
├── Hate-speech-CNERG
│ ├── dehatebert-mono-arabic
│ │ └── README.md
│ ├── dehatebert-mono-english
│ │ └── README.md
│ ├── dehatebert-mono-french
│ │ └── README.md
│ ├── dehatebert-mono-german
│ │ └── README.md
│ ├── dehatebert-mono-indonesian
│ │ └── README.md
│ ├── dehatebert-mono-italian
│ │ └── README.md
│ ├── dehatebert-mono-polish
│ │ └── README.md
│ ├── dehatebert-mono-portugese
│ │ └── README.md
│ └── dehatebert-mono-spanish
│ │ └── README.md
├── HooshvareLab
│ ├── bert-base-parsbert-armanner-uncased
│ │ └── README.md
│ ├── bert-base-parsbert-ner-uncased
│ │ └── README.md
│ ├── bert-base-parsbert-peymaner-uncased
│ │ └── README.md
│ ├── bert-base-parsbert-uncased
│ │ └── README.md
│ └── bert-fa-base-uncased
│ │ └── README.md
├── KB
│ ├── albert-base-swedish-cased-alpha
│ │ └── README.md
│ ├── bert-base-swedish-cased-ner
│ │ └── README.md
│ └── bert-base-swedish-cased
│ │ └── README.md
├── LorenzoDeMattei
│ └── GePpeTto
│ │ └── README.md
├── MoseliMotsoehli
│ ├── TswanaBert
│ │ └── README.md
│ └── zuBERTa
│ │ └── README.md
├── Musixmatch
│ ├── umberto-commoncrawl-cased-v1
│ │ └── README.md
│ └── umberto-wikipedia-uncased-v1
│ │ └── README.md
├── NLP4H
│ └── ms_bert
│ │ └── README.md
├── Naveen-k
│ └── KanBERTo
│ │ └── README.md
├── NeuML
│ ├── bert-small-cord19-squad2
│ │ └── README.md
│ ├── bert-small-cord19
│ │ └── README.md
│ └── bert-small-cord19qa
│ │ └── README.md
├── Norod78
│ └── hewiki-articles-distilGPT2py-il
│ │ └── README.md
├── Primer
│ └── bart-squad2
│ │ └── README.md
├── Rostlab
│ ├── prot_bert
│ │ └── README.md
│ └── prot_bert_bfd
│ │ └── README.md
├── SZTAKI-HLT
│ └── hubert-base-cc
│ │ └── README.md
├── SparkBeyond
│ └── roberta-large-sts-b
│ │ └── README.md
├── T-Systems-onsite
│ └── bert-german-dbmdz-uncased-sentence-stsb
│ │ └── README.md
├── Tereveni-AI
│ └── gpt2-124M-uk-fiction
│ │ └── README.md
├── TurkuNLP
│ ├── bert-base-finnish-cased-v1
│ │ └── README.md
│ └── bert-base-finnish-uncased-v1
│ │ └── README.md
├── Vamsi
│ └── T5_Paraphrase_Paws
│ │ └── README.md
├── VictorSanh
│ └── roberta-base-finetuned-yelp-polarity
│ │ └── README.md
├── ViktorAlm
│ └── electra-base-norwegian-uncased-discriminator
│ │ └── README.md
├── a-ware
│ ├── bart-squadv2
│ │ └── README.md
│ ├── roberta-large-squad-classification
│ │ └── README.md
│ └── xlmroberta-squadv2
│ │ └── README.md
├── activebus
│ ├── BERT-DK_laptop
│ │ └── README.md
│ ├── BERT-DK_rest
│ │ └── README.md
│ ├── BERT-PT_laptop
│ │ └── README.md
│ ├── BERT-PT_rest
│ │ └── README.md
│ ├── BERT-XD_Review
│ │ └── README.md
│ └── BERT_Review
│ │ └── README.md
├── ahotrod
│ ├── albert_xxlargev1_squad2_512
│ │ └── README.md
│ ├── electra_large_discriminator_squad2_512
│ │ └── README.md
│ ├── roberta_large_squad2
│ │ └── README.md
│ └── xlnet_large_squad2_512
│ │ └── README.md
├── akhooli
│ ├── gpt2-small-arabic-poetry
│ │ └── README.md
│ ├── gpt2-small-arabic
│ │ └── README.md
│ ├── mbart-large-cc25-ar-en
│ │ └── README.md
│ ├── mbart-large-cc25-en-ar
│ │ └── README.md
│ └── xlm-r-large-arabic-sent
│ │ └── README.md
├── albert-base-v1-README.md
├── albert-xxlarge-v2-README.md
├── aliosm
│ ├── ComVE-distilgpt2
│ │ └── README.md
│ ├── ComVE-gpt2-large
│ │ └── README.md
│ ├── ComVE-gpt2-medium
│ │ └── README.md
│ └── ComVE-gpt2
│ │ └── README.md
├── allegro
│ ├── herbert-klej-cased-tokenizer-v1
│ │ └── README.md
│ └── herbert-klej-cased-v1
│ │ └── README.md
├── allenai
│ ├── biomed_roberta_base
│ │ └── README.md
│ ├── longformer-base-4096-extra.pos.embd.only
│ │ └── README.md
│ ├── longformer-base-4096
│ │ └── README.md
│ ├── scibert_scivocab_cased
│ │ └── README.md
│ ├── scibert_scivocab_uncased
│ │ └── README.md
│ ├── wmt16-en-de-12-1
│ │ └── README.md
│ ├── wmt16-en-de-dist-12-1
│ │ └── README.md
│ ├── wmt16-en-de-dist-6-1
│ │ └── README.md
│ ├── wmt19-de-en-6-6-base
│ │ └── README.md
│ └── wmt19-de-en-6-6-big
│ │ └── README.md
├── amberoad
│ └── bert-multilingual-passage-reranking-msmarco
│ │ └── README.md
├── antoiloui
│ └── belgpt2
│ │ └── README.md
├── aodiniz
│ ├── bert_uncased_L-10_H-512_A-8_cord19-200616
│ │ └── README.md
│ ├── bert_uncased_L-10_H-512_A-8_cord19-200616_squad2
│ │ └── README.md
│ ├── bert_uncased_L-2_H-512_A-8_cord19-200616
│ │ └── README.md
│ └── bert_uncased_L-4_H-256_A-4_cord19-200616
│ │ └── README.md
├── asafaya
│ ├── bert-base-arabic
│ │ └── README.md
│ ├── bert-large-arabic
│ │ └── README.md
│ ├── bert-medium-arabic
│ │ └── README.md
│ └── bert-mini-arabic
│ │ └── README.md
├── aubmindlab
│ ├── bert-base-arabert
│ │ └── README.md
│ └── bert-base-arabertv01
│ │ └── README.md
├── bart-large-cnn
│ └── README.md
├── bart-large-xsum
│ └── README.md
├── bashar-talafha
│ └── multi-dialect-bert-base-arabic
│ │ └── README.md
├── bayartsogt
│ └── albert-mongolian
│ │ └── README.md
├── bert-base-cased-README.md
├── bert-base-chinese-README.md
├── bert-base-german-cased-README.md
├── bert-base-german-dbmdz-cased-README.md
├── bert-base-german-dbmdz-uncased-README.md
├── bert-base-multilingual-cased-README.md
├── bert-base-multilingual-uncased-README.md
├── bert-base-uncased-README.md
├── bert-large-cased-README.md
├── binwang
│ └── xlnet-base-cased
│ │ └── README.md
├── blinoff
│ └── roberta-base-russian-v0
│ │ └── README.md
├── cahya
│ ├── bert-base-indonesian-522M
│ │ └── README.md
│ ├── gpt2-small-indonesian-522M
│ │ └── README.md
│ └── roberta-base-indonesian-522M
│ │ └── README.md
├── camembert-base-README.md
├── camembert
│ ├── camembert-base-ccnet-4gb
│ │ └── README.md
│ ├── camembert-base-ccnet
│ │ └── README.md
│ ├── camembert-base-oscar-4gb
│ │ └── README.md
│ ├── camembert-base-wikipedia-4gb
│ │ └── README.md
│ └── camembert-large
│ │ └── README.md
├── canwenxu
│ └── BERT-of-Theseus-MNLI
│ │ └── README.md
├── cedpsam
│ └── chatbot_fr
│ │ └── README.md
├── chrisliu298
│ └── arxiv_ai_gpt2
│ │ └── README.md
├── cimm-kzn
│ └── rudr-bert
│ │ └── README.md
├── clue
│ ├── albert_chinese_small
│ │ └── README.md
│ ├── albert_chinese_tiny
│ │ └── README.md
│ ├── roberta_chinese_3L312_clue_tiny
│ │ └── README.md
│ ├── roberta_chinese_base
│ │ └── README.md
│ ├── roberta_chinese_large
│ │ └── README.md
│ └── xlnet_chinese_large
│ │ └── README.md
├── codegram
│ ├── calbert-base-uncased
│ │ └── README.md
│ └── calbert-tiny-uncased
│ │ └── README.md
├── csarron
│ ├── bert-base-uncased-squad-v1
│ │ └── README.md
│ ├── mobilebert-uncased-squad-v1
│ │ └── README.md
│ ├── mobilebert-uncased-squad-v2
│ │ └── README.md
│ └── roberta-base-squad-v1
│ │ └── README.md
├── daigo
│ └── bert-base-japanese-sentiment
│ │ └── README.md
├── dbmdz
│ ├── bert-base-german-cased
│ │ └── README.md
│ ├── bert-base-german-europeana-cased
│ │ └── README.md
│ ├── bert-base-german-europeana-uncased
│ │ └── README.md
│ ├── bert-base-german-uncased
│ │ └── README.md
│ ├── bert-base-italian-cased
│ │ └── README.md
│ ├── bert-base-italian-uncased
│ │ └── README.md
│ ├── bert-base-italian-xxl-cased
│ │ └── README.md
│ ├── bert-base-italian-xxl-uncased
│ │ └── README.md
│ ├── bert-base-turkish-128k-cased
│ │ └── README.md
│ ├── bert-base-turkish-128k-uncased
│ │ └── README.md
│ ├── bert-base-turkish-cased
│ │ └── README.md
│ ├── bert-base-turkish-uncased
│ │ └── README.md
│ ├── distilbert-base-turkish-cased
│ │ └── README.md
│ ├── electra-base-turkish-cased-discriminator
│ │ └── README.md
│ └── electra-small-turkish-cased-discriminator
│ │ └── README.md
├── dccuchile
│ ├── bert-base-spanish-wwm-cased
│ │ └── README.md
│ └── bert-base-spanish-wwm-uncased
│ │ └── README.md
├── deepset
│ ├── bert-base-german-cased-oldvocab
│ │ └── README.md
│ ├── electra-base-squad2
│ │ └── README.md
│ ├── minilm-uncased-squad2
│ │ └── README.md
│ ├── quora_dedup_bert_base
│ │ └── README.md
│ ├── roberta-base-squad2-covid
│ │ └── README.md
│ ├── roberta-base-squad2
│ │ └── README.md
│ ├── sentence_bert
│ │ └── README.md
│ └── xlm-roberta-large-squad2
│ │ └── README.md
├── digitalepidemiologylab
│ └── covid-twitter-bert
│ │ └── README.md
├── distilbert-base-cased-distilled-squad-README.md
├── distilbert-base-multilingual-cased-README.md
├── distilbert-base-uncased-README.md
├── distilbert-base-uncased-distilled-squad-README.md
├── distilgpt2-README.md
├── distilroberta-base-README.md
├── djstrong
│ └── bg_cs_pl_ru_cased_L-12_H-768_A-12
│ │ └── README.md
├── dkleczek
│ ├── bert-base-polish-cased-v1
│ │ └── README.md
│ └── bert-base-polish-uncased-v1
│ │ └── README.md
├── dumitrescustefan
│ ├── bert-base-romanian-cased-v1
│ │ └── README.md
│ └── bert-base-romanian-uncased-v1
│ │ └── README.md
├── elgeish
│ ├── cs224n-squad2.0-albert-base-v2
│ │ └── README.md
│ ├── cs224n-squad2.0-albert-large-v2
│ │ └── README.md
│ ├── cs224n-squad2.0-albert-xxlarge-v1
│ │ └── README.md
│ ├── cs224n-squad2.0-distilbert-base-uncased
│ │ └── README.md
│ └── cs224n-squad2.0-roberta-base
│ │ └── README.md
├── emilyalsentzer
│ ├── Bio_ClinicalBERT
│ │ └── README.md
│ └── Bio_Discharge_Summary_BERT
│ │ └── README.md
├── etalab-ia
│ └── camembert-base-squadFR-fquad-piaf
│ │ └── README.md
├── facebook
│ ├── bart-large-cnn
│ │ └── README.md
│ ├── bart-large
│ │ └── README.md
│ ├── rag-sequence-base
│ │ └── README.md
│ ├── rag-sequence-nq
│ │ └── README.md
│ ├── rag-token-base
│ │ └── README.md
│ ├── rag-token-nq
│ │ └── README.md
│ ├── rag-token-nq_new
│ │ └── README.md
│ ├── wmt19-de-en
│ │ └── README.md
│ ├── wmt19-en-de
│ │ └── README.md
│ ├── wmt19-en-ru
│ │ └── README.md
│ └── wmt19-ru-en
│ │ └── README.md
├── flexudy
│ └── t5-base-multi-sentence-doctor
│ │ ├── README.md
│ │ └── sent-banner.png
├── fmikaelian
│ ├── camembert-base-fquad
│ │ └── README.md
│ ├── camembert-base-squad
│ │ └── README.md
│ └── flaubert-base-uncased-squad
│ │ └── README.md
├── fran-martinez
│ └── scibert_scivocab_cased_ner_jnlpba
│ │ └── README.md
├── funnel-transformer
│ ├── intermediate-base
│ │ └── README.md
│ ├── intermediate
│ │ └── README.md
│ ├── large-base
│ │ └── README.md
│ ├── large
│ │ └── README.md
│ ├── medium-base
│ │ └── README.md
│ ├── medium
│ │ └── README.md
│ ├── small-base
│ │ └── README.md
│ ├── small
│ │ └── README.md
│ ├── xlarge-base
│ │ └── README.md
│ └── xlarge
│ │ └── README.md
├── gaochangkuan
│ └── model_dir
│ │ └── README.md
├── german-nlp-group
│ └── electra-base-german-uncased
│ │ └── README.md
├── giganticode
│ └── StackOBERTflow-comments-small-v1
│ │ └── README.md
├── gilf
│ ├── french-camembert-postag-model
│ │ └── README.md
│ └── french-postag-model
│ │ └── README.md
├── google
│ ├── bert2bert_L-24_wmt_de_en
│ │ └── README.md
│ ├── bert2bert_L-24_wmt_en_de
│ │ └── README.md
│ ├── bert_uncased_L-10_H-128_A-2
│ │ └── README.md
│ ├── bert_uncased_L-10_H-256_A-4
│ │ └── README.md
│ ├── bert_uncased_L-10_H-512_A-8
│ │ └── README.md
│ ├── bert_uncased_L-10_H-768_A-12
│ │ └── README.md
│ ├── bert_uncased_L-12_H-128_A-2
│ │ └── README.md
│ ├── bert_uncased_L-12_H-256_A-4
│ │ └── README.md
│ ├── bert_uncased_L-12_H-512_A-8
│ │ └── README.md
│ ├── bert_uncased_L-12_H-768_A-12
│ │ └── README.md
│ ├── bert_uncased_L-2_H-128_A-2
│ │ └── README.md
│ ├── bert_uncased_L-2_H-256_A-4
│ │ └── README.md
│ ├── bert_uncased_L-2_H-512_A-8
│ │ └── README.md
│ ├── bert_uncased_L-2_H-768_A-12
│ │ └── README.md
│ ├── bert_uncased_L-4_H-128_A-2
│ │ └── README.md
│ ├── bert_uncased_L-4_H-256_A-4
│ │ └── README.md
│ ├── bert_uncased_L-4_H-512_A-8
│ │ └── README.md
│ ├── bert_uncased_L-4_H-768_A-12
│ │ └── README.md
│ ├── bert_uncased_L-6_H-128_A-2
│ │ └── README.md
│ ├── bert_uncased_L-6_H-256_A-4
│ │ └── README.md
│ ├── bert_uncased_L-6_H-512_A-8
│ │ └── README.md
│ ├── bert_uncased_L-6_H-768_A-12
│ │ └── README.md
│ ├── bert_uncased_L-8_H-128_A-2
│ │ └── README.md
│ ├── bert_uncased_L-8_H-256_A-4
│ │ └── README.md
│ ├── bert_uncased_L-8_H-512_A-8
│ │ └── README.md
│ ├── bert_uncased_L-8_H-768_A-12
│ │ └── README.md
│ ├── electra-base-discriminator
│ │ └── README.md
│ ├── electra-base-generator
│ │ └── README.md
│ ├── electra-large-discriminator
│ │ └── README.md
│ ├── electra-large-generator
│ │ └── README.md
│ ├── electra-small-discriminator
│ │ └── README.md
│ ├── electra-small-generator
│ │ └── README.md
│ ├── mobilebert-uncased
│ │ └── README.md
│ ├── reformer-crime-and-punishment
│ │ └── README.md
│ ├── reformer-enwik8
│ │ └── README.md
│ ├── roberta2roberta_L-24_bbc
│ │ └── README.md
│ ├── roberta2roberta_L-24_cnn_daily_mail
│ │ └── README.md
│ ├── roberta2roberta_L-24_discofuse
│ │ └── README.md
│ ├── roberta2roberta_L-24_gigaword
│ │ └── README.md
│ └── roberta2roberta_L-24_wikisplit
│ │ └── README.md
├── gpt2-README.md
├── gpt2-large-README.md
├── gpt2-medium-README.md
├── gpt2-xl-README.md
├── gsarti
│ ├── biobert-nli
│ │ └── README.md
│ ├── covidbert-nli
│ │ └── README.md
│ └── scibert-nli
│ │ └── README.md
├── healx
│ ├── gpt-2-pubmed-large
│ │ └── README.md
│ └── gpt-2-pubmed-medium
│ │ └── README.md
├── henryk
│ ├── bert-base-multilingual-cased-finetuned-dutch-squad2
│ │ └── README.md
│ ├── bert-base-multilingual-cased-finetuned-polish-squad1
│ │ └── README.md
│ └── bert-base-multilingual-cased-finetuned-polish-squad2
│ │ └── README.md
├── huggingface
│ ├── CodeBERTa-language-id
│ │ └── README.md
│ └── CodeBERTa-small-v1
│ │ └── README.md
├── huseinzol05
│ ├── albert-base-bahasa-cased
│ │ └── README.md
│ ├── albert-tiny-bahasa-cased
│ │ └── README.md
│ ├── bert-base-bahasa-cased
│ │ └── README.md
│ ├── electra-base-discriminator-bahasa-cased
│ │ └── README.md
│ ├── electra-base-generator-bahasa-cased
│ │ └── README.md
│ ├── electra-small-discriminator-bahasa-cased
│ │ └── README.md
│ ├── electra-small-generator-bahasa-cased
│ │ └── README.md
│ ├── gpt2-117M-bahasa-cased
│ │ └── README.md
│ ├── gpt2-345M-bahasa-cased
│ │ └── README.md
│ ├── t5-base-bahasa-cased
│ │ └── README.md
│ ├── t5-base-bahasa-summarization-cased
│ │ └── README.md
│ ├── t5-small-bahasa-cased
│ │ └── README.md
│ ├── t5-small-bahasa-summarization-cased
│ │ └── README.md
│ ├── tiny-bert-bahasa-cased
│ │ └── README.md
│ └── xlnet-base-bahasa-cased
│ │ └── README.md
├── iarfmoose
│ ├── bert-base-cased-qa-evaluator
│ │ └── README.md
│ ├── roberta-base-bulgarian-pos
│ │ └── README.md
│ ├── roberta-base-bulgarian
│ │ └── README.md
│ ├── roberta-small-bulgarian-pos
│ │ └── README.md
│ ├── roberta-small-bulgarian
│ │ └── README.md
│ └── t5-base-question-generator
│ │ └── README.md
├── illuin
│ ├── camembert-base-fquad
│ │ └── README.md
│ ├── camembert-large-fquad
│ │ └── README.md
│ └── lepetit
│ │ └── README.md
├── indobenchmark
│ ├── indobert-base-p1
│ │ └── README.md
│ ├── indobert-base-p2
│ │ └── README.md
│ ├── indobert-large-p1
│ │ └── README.md
│ ├── indobert-large-p2
│ │ └── README.md
│ ├── indobert-lite-base-p1
│ │ └── README.md
│ ├── indobert-lite-base-p2
│ │ └── README.md
│ ├── indobert-lite-large-p1
│ │ └── README.md
│ └── indobert-lite-large-p2
│ │ └── README.md
├── ipuneetrathore
│ └── bert-base-cased-finetuned-finBERT
│ │ └── README.md
├── iuliaturc
│ └── bert_uncased_L-2_H-128_A-2
│ │ └── README.md
├── ixa-ehu
│ ├── berteus-base-cased
│ │ └── README.md
│ └── ixambert-base-cased
│ │ └── README.md
├── jannesg
│ ├── bertsson
│ │ └── README.md
│ ├── takalane_afr_roberta
│ │ └── README.md
│ ├── takalane_nbl_roberta
│ │ └── README.md
│ ├── takalane_nso_roberta
│ │ └── README.md
│ ├── takalane_sot_roberta
│ │ └── README.md
│ ├── takalane_ssw_roberta
│ │ └── README.md
│ ├── takalane_tsn_roberta
│ │ └── README.md
│ ├── takalane_tso_roberta
│ │ └── README.md
│ ├── takalane_ven_roberta
│ │ └── README.md
│ ├── takalane_xho_roberta
│ │ └── README.md
│ └── takalane_zul_roberta
│ │ └── README.md
├── jimregan
│ └── BERTreach
│ │ └── README.md
├── jme-p
│ └── shrugging-grace-tweet-classifier
│ │ └── README.md
├── joeddav
│ ├── bart-large-mnli-yahoo-answers
│ │ └── README.md
│ └── xlm-roberta-large-xnli
│ │ └── README.md
├── jplu
│ ├── tf-camembert-base
│ │ └── README.md
│ ├── tf-xlm-r-ner-40-lang
│ │ └── README.md
│ ├── tf-xlm-roberta-base
│ │ └── README.md
│ └── tf-xlm-roberta-large
│ │ └── README.md
├── julien-c
│ ├── EsperBERTo-small-pos
│ │ └── README.md
│ ├── EsperBERTo-small
│ │ └── README.md
│ ├── bert-xsmall-dummy
│ │ └── README.md
│ └── dummy-unknown
│ │ └── README.md
├── krevas
│ ├── finance-koelectra-base-discriminator
│ │ └── README.md
│ ├── finance-koelectra-base-generator
│ │ └── README.md
│ ├── finance-koelectra-small-discriminator
│ │ └── README.md
│ └── finance-koelectra-small-generator
│ │ └── README.md
├── ktrapeznikov
│ ├── albert-xlarge-v2-squad-v2
│ │ └── README.md
│ ├── biobert_v1.1_pubmed_squad_v2
│ │ └── README.md
│ └── scibert_scivocab_uncased_squad_v2
│ │ └── README.md
├── kuisailab
│ ├── albert-base-arabic
│ │ └── README.md
│ ├── albert-large-arabic
│ │ └── README.md
│ └── albert-xlarge-arabic
│ │ └── README.md
├── loodos
│ ├── albert-base-turkish-uncased
│ │ └── README.md
│ ├── bert-base-turkish-uncased
│ │ └── README.md
│ ├── electra-base-turkish-64k-uncased-discriminator
│ │ └── README.md
│ ├── electra-base-turkish-uncased-discriminator
│ │ └── README.md
│ ├── electra-small-turkish-cased-discriminator
│ │ └── README.md
│ └── electra-small-turkish-uncased-discriminator
│ │ └── README.md
├── lordtt13
│ ├── COVID-SciBERT
│ │ └── README.md
│ └── emo-mobilebert
│ │ └── README.md
├── lserinol
│ └── bert-turkish-question-answering
│ │ └── README.md
├── lvwerra
│ ├── bert-imdb
│ │ └── README.md
│ ├── gpt2-imdb-ctrl
│ │ └── README.md
│ ├── gpt2-imdb-pos
│ │ └── README.md
│ ├── gpt2-imdb
│ │ └── README.md
│ └── gpt2-medium-taboo
│ │ └── README.md
├── lysandre
│ ├── arxiv-nlp
│ │ └── README.md
│ └── arxiv
│ │ └── README.md
├── m3hrdadfi
│ └── albert-fa-base-v2
│ │ └── README.md
├── microsoft
│ ├── DialoGPT-large
│ │ └── README.md
│ ├── DialoGPT-medium
│ │ └── README.md
│ ├── DialoGPT-small
│ │ └── README.md
│ ├── MiniLM-L12-H384-uncased
│ │ └── README.md
│ ├── Multilingual-MiniLM-L12-H384
│ │ └── README.md
│ ├── codebert-base-mlm
│ │ └── README.md
│ ├── codebert-base
│ │ └── README.md
│ ├── layoutlm-base-uncased
│ │ └── README.md
│ └── layoutlm-large-uncased
│ │ └── README.md
├── monologg
│ ├── koelectra-base-discriminator
│ │ └── README.md
│ ├── koelectra-base-generator
│ │ └── README.md
│ ├── koelectra-small-discriminator
│ │ └── README.md
│ └── koelectra-small-generator
│ │ └── README.md
├── monsoon-nlp
│ └── dv-wave
│ │ └── README.md
├── moumeneb1
│ └── flaubert-base-cased-ecology_crisis
│ │ └── README.md
├── mrm8488
│ ├── CodeBERTaPy
│ │ └── README.md
│ ├── GPT-2-finetuned-CORD19
│ │ └── README.md
│ ├── GPT-2-finetuned-covid-bio-medrxiv
│ │ └── README.md
│ ├── GuaPeTe-2-tiny
│ │ └── README.md
│ ├── RoBERTinha
│ │ └── README.md
│ ├── RoBasquERTa
│ │ └── README.md
│ ├── RuPERTa-base-finetuned-ner
│ │ └── README.md
│ ├── RuPERTa-base-finetuned-pawsx-es
│ │ └── README.md
│ ├── RuPERTa-base-finetuned-pos
│ │ └── README.md
│ ├── RuPERTa-base-finetuned-squadv1
│ │ └── README.md
│ ├── RuPERTa-base-finetuned-squadv2
│ │ └── README.md
│ ├── RuPERTa-base
│ │ └── README.md
│ ├── TinyBERT-spanish-uncased-finetuned-ner
│ │ └── README.md
│ ├── bert-base-german-dbmdz-cased-finetuned-pawsx-de
│ │ └── README.md
│ ├── bert-base-spanish-wwm-cased-finetuned-spa-squad2-es
│ │ └── README.md
│ ├── bert-italian-finedtuned-squadv1-it-alfa
│ │ └── README.md
│ ├── bert-medium-finetuned-squadv2
│ │ └── README.md
│ ├── bert-mini-finetuned-squadv2
│ │ └── README.md
│ ├── bert-multi-cased-finedtuned-xquad-tydiqa-goldp
│ │ └── README.md
│ ├── bert-multi-cased-finetuned-xquadv1
│ │ └── README.md
│ ├── bert-multi-uncased-finetuned-xquadv1
│ │ └── README.md
│ ├── bert-small-finetuned-squadv2
│ │ └── README.md
│ ├── bert-small-finetuned-typo-detection
│ │ └── README.md
│ ├── bert-spanish-cased-finetuned-ner
│ │ └── README.md
│ ├── bert-spanish-cased-finetuned-pos-syntax
│ │ └── README.md
│ ├── bert-spanish-cased-finetuned-pos
│ │ └── README.md
│ ├── bert-tiny-finetuned-squadv2
│ │ └── README.md
│ ├── bert-uncased-finetuned-qnli
│ │ └── README.md
│ ├── camembert-base-finetuned-pawsx-fr
│ │ └── README.md
│ ├── chEMBL_smiles_v1
│ │ └── README.md
│ ├── codeBERTaJS
│ │ └── README.md
│ ├── distilbert-base-multi-cased-finetuned-typo-detection
│ │ └── README.md
│ ├── distilbert-multi-finetuned-for-xqua-on-tydiqa
│ │ └── README.md
│ ├── distill-bert-base-spanish-wwm-cased-finetuned-spa-squad2-es
│ │ └── README.md
│ ├── distilroberta-base-finetuned-sentiment
│ │ └── README.md
│ ├── electra-base-finetuned-squadv1
│ │ └── README.md
│ ├── electra-small-finetuned-squadv1
│ │ └── README.md
│ ├── electra-small-finetuned-squadv2
│ │ └── README.md
│ ├── electricidad-base-discriminator
│ │ └── README.md
│ ├── electricidad-base-finetuned-pawsx-es
│ │ └── README.md
│ ├── electricidad-base-generator
│ │ └── README.md
│ ├── electricidad-small-discriminator
│ │ └── README.md
│ ├── electricidad-small-finetuned-squadv1-es
│ │ └── README.md
│ ├── gpt2-finetuned-recipes-cooking
│ │ └── README.md
│ ├── gpt2-finetuned-recipes-cooking_v2
│ │ └── README.md
│ ├── gpt2-imdb-neg
│ │ └── README.md
│ ├── gpt2-imdb-neutral
│ │ └── README.md
│ ├── longformer-base-4096-finetuned-squadv2
│ │ └── README.md
│ ├── mobilebert-uncased-finetuned-squadv1
│ │ └── README.md
│ ├── mobilebert-uncased-finetuned-squadv2
│ │ └── README.md
│ ├── roberta-base-1B-1-finetuned-squadv1
│ │ └── README.md
│ ├── roberta-base-1B-1-finetuned-squadv2
│ │ └── README.md
│ ├── roberta-large-finetuned-wsc
│ │ └── README.md
│ ├── spanbert-base-finetuned-squadv1
│ │ └── README.md
│ ├── spanbert-base-finetuned-squadv2
│ │ └── README.md
│ ├── spanbert-base-finetuned-tacred
│ │ └── README.md
│ ├── spanbert-finetuned-squadv1
│ │ └── README.md
│ ├── spanbert-finetuned-squadv2
│ │ └── README.md
│ ├── spanbert-large-finetuned-squadv1
│ │ └── README.md
│ ├── spanbert-large-finetuned-squadv2
│ │ └── README.md
│ ├── spanbert-large-finetuned-tacred
│ │ └── README.md
│ ├── t5-base-finetuned-break_data-question-retrieval
│ │ └── README.md
│ ├── t5-base-finetuned-break_data
│ │ └── README.md
│ ├── t5-base-finetuned-e2m-intent
│ │ └── README.md
│ ├── t5-base-finetuned-emotion
│ │ └── README.md
│ ├── t5-base-finetuned-imdb-sentiment
│ │ └── README.md
│ ├── t5-base-finetuned-question-generation-ap
│ │ └── README.md
│ ├── t5-base-finetuned-sarcasm-twitter
│ │ └── README.md
│ ├── t5-base-finetuned-span-sentiment-extraction
│ │ └── README.md
│ ├── t5-base-finetuned-squadv2
│ │ └── README.md
│ ├── t5-base-finetuned-summarize-news
│ │ └── README.md
│ ├── t5-base-finetuned-wikiSQL-sql-to-en
│ │ └── README.md
│ ├── t5-base-finetuned-wikiSQL
│ │ └── README.md
│ ├── t5-small-finetuned-emotion
│ │ └── README.md
│ ├── t5-small-finetuned-imdb-sentiment
│ │ └── README.md
│ ├── t5-small-finetuned-quora-for-paraphrasing
│ │ └── README.md
│ ├── t5-small-finetuned-squadv1
│ │ └── README.md
│ ├── t5-small-finetuned-squadv2
│ │ └── README.md
│ ├── t5-small-finetuned-wikiSQL
│ │ └── README.md
│ ├── umberto-wikipedia-uncased-v1-finetuned-squadv1-it
│ │ └── README.md
│ └── xlm-multi-finetuned-xquadv1
│ │ └── README.md
├── mys
│ └── electra-base-turkish-cased-ner
│ │ └── README.md
├── neuralmind
│ ├── bert-base-portuguese-cased
│ │ └── README.md
│ └── bert-large-portuguese-cased
│ │ └── README.md
├── neuraly
│ └── bert-base-italian-cased-sentiment
│ │ └── README.md
├── nghuyong
│ ├── ernie-1.0
│ │ └── README.md
│ ├── ernie-2.0-en
│ │ └── README.md
│ ├── ernie-2.0-large-en
│ │ └── README.md
│ └── ernie-tiny
│ │ └── README.md
├── nlpaueb
│ └── bert-base-greek-uncased-v1
│ │ └── README.md
├── nlptown
│ └── bert-base-multilingual-uncased-sentiment
│ │ └── README.md
├── nyu-mll
│ ├── roberta-base-100M-1
│ │ └── README.md
│ ├── roberta-base-100M-2
│ │ └── README.md
│ ├── roberta-base-100M-3
│ │ └── README.md
│ ├── roberta-base-10M-1
│ │ └── README.md
│ ├── roberta-base-10M-2
│ │ └── README.md
│ ├── roberta-base-10M-3
│ │ └── README.md
│ ├── roberta-base-1B-1
│ │ └── README.md
│ ├── roberta-base-1B-2
│ │ └── README.md
│ ├── roberta-base-1B-3
│ │ └── README.md
│ ├── roberta-med-small-1M-1
│ │ └── README.md
│ ├── roberta-med-small-1M-2
│ │ └── README.md
│ ├── roberta-med-small-1M-3
│ │ └── README.md
│ └── roberta_1M_to_1B
│ │ └── README.md
├── oliverguhr
│ └── german-sentiment-bert
│ │ └── README.md
├── patrickvonplaten
│ ├── bert2bert-cnn_dailymail-fp16
│ │ └── README.md
│ ├── bert2gpt2-cnn_dailymail-fp16
│ │ └── README.md
│ ├── longformer2roberta-cnn_dailymail-fp16
│ │ └── README.md
│ ├── roberta2roberta-cnn_dailymail-fp16
│ │ └── README.md
│ └── roberta2roberta-share-cnn_dailymail-fp16
│ │ └── README.md
├── pdelobelle
│ └── robbert-v2-dutch-base
│ │ └── README.md
├── pierreguillou
│ └── gpt2-small-portuguese
│ │ └── README.md
├── pradhyra
│ └── AWSBlogBert
│ │ └── README.md
├── pranavpsv
│ └── gpt2-genre-story-generator
│ │ └── README.md
├── pvl
│ └── labse_bert
│ │ └── README.md
├── ramsrigouthamg
│ └── t5_paraphraser
│ │ └── README.md
├── rdenadai
│ └── BR_BERTo
│ │ └── README.md
├── redewiedergabe
│ └── bert-base-historical-german-rw-cased
│ │ └── README.md
├── rjbownes
│ └── Magic-The-Generating
│ │ └── README.md
├── roberta-base-README.md
├── roberta-large-README.md
├── roberta-large-mnli-README.md
├── rohanrajpal
│ ├── bert-base-codemixed-uncased-sentiment
│ │ └── README.md
│ ├── bert-base-en-es-codemix-cased
│ │ └── README.md
│ ├── bert-base-en-hi-codemix-cased
│ │ └── README.md
│ └── bert-base-multilingual-codemixed-cased-sentiment
│ │ └── README.md
├── sagorsarker
│ ├── bangla-bert-base
│ │ └── README.md
│ ├── codeswitch-hineng-lid-lince
│ │ └── README.md
│ ├── codeswitch-hineng-ner-lince
│ │ └── README.md
│ ├── codeswitch-hineng-pos-lince
│ │ └── README.md
│ ├── codeswitch-nepeng-lid-lince
│ │ └── README.md
│ ├── codeswitch-spaeng-lid-lince
│ │ └── README.md
│ ├── codeswitch-spaeng-ner-lince
│ │ └── README.md
│ ├── codeswitch-spaeng-pos-lince
│ │ └── README.md
│ └── codeswitch-spaeng-sentiment-analysis-lince
│ │ └── README.md
├── savasy
│ ├── bert-base-turkish-ner-cased
│ │ └── README.md
│ ├── bert-base-turkish-sentiment-cased
│ │ └── README.md
│ ├── bert-base-turkish-squad
│ │ └── README.md
│ └── bert-turkish-text-classification
│ │ └── README.md
├── schmidek
│ └── electra-small-cased
│ │ └── README.md
├── seiya
│ └── oubiobert-base-uncased
│ │ └── README.md
├── sentence-transformers
│ ├── bert-base-nli-cls-token
│ │ └── README.md
│ ├── bert-base-nli-max-tokens
│ │ └── README.md
│ └── bert-base-nli-mean-tokens
│ │ └── README.md
├── severinsimmler
│ └── literary-german-bert
│ │ ├── README.md
│ │ ├── kfold.png
│ │ └── prosa-jahre.png
├── seyonec
│ └── ChemBERTa-zinc-base-v1
│ │ └── README.md
├── shoarora
│ ├── alectra-small-owt
│ │ └── README.md
│ └── electra-small-owt
│ │ └── README.md
├── shrugging-grace
│ └── tweetclassifier
│ │ └── README.md
├── spentaur
│ └── yelp
│ │ └── README.md
├── stas
│ └── tiny-wmt19-en-de
│ │ └── README.md
├── stevhliu
│ └── astroGPT
│ │ └── README.md
├── surajp
│ ├── RoBERTa-hindi-guj-san
│ │ └── README.md
│ ├── SanBERTa
│ │ └── README.md
│ └── albert-base-sanskrit
│ │ └── README.md
├── t5-11b-README.md
├── t5-3b-README.md
├── t5-base-README.md
├── t5-large-README.md
├── t5-small-README.md
├── tblard
│ └── tf-allocine
│ │ └── README.md
├── tuner007
│ ├── pegasus_paraphrase
│ │ └── README.md
│ ├── pegasus_qa
│ │ └── README.md
│ └── t5_abs_qa
│ │ └── README.md
├── twmkn9
│ ├── albert-base-v2-squad2
│ │ └── README.md
│ ├── bert-base-uncased-squad2
│ │ └── README.md
│ ├── distilbert-base-uncased-squad2
│ │ └── README.md
│ └── distilroberta-base-squad2
│ │ └── README.md
├── uncnlp
│ └── lxmert-base-uncased
│ │ ├── LICENSE
│ │ ├── README.md
│ │ └── lxmert_model-1.jpg
├── unideeplearning
│ └── polibert_sa
│ │ └── README.md
├── urduhack
│ └── roberta-urdu-small
│ │ └── README.md
├── valhalla
│ ├── bart-large-finetuned-squadv1
│ │ └── README.md
│ ├── distilbart-mnli-12-1
│ │ └── README.md
│ ├── distilbart-mnli-12-3
│ │ └── README.md
│ ├── distilbart-mnli-12-6
│ │ └── README.md
│ ├── distilbart-mnli-12-9
│ │ └── README.md
│ ├── electra-base-discriminator-finetuned_squadv1
│ │ └── README.md
│ ├── longformer-base-4096-finetuned-squadv1
│ │ └── README.md
│ ├── t5-base-e2e-qg
│ │ └── README.md
│ ├── t5-base-qa-qg-hl
│ │ └── README.md
│ ├── t5-base-qg-hl
│ │ └── README.md
│ ├── t5-base-squad
│ │ └── README.md
│ ├── t5-samll-qg-prepend
│ │ └── README.md
│ ├── t5-small-e2e-qg
│ │ └── README.md
│ ├── t5-small-qa-qg-hl
│ │ └── README.md
│ └── t5-small-qg-hl
│ │ └── README.md
├── vinai
│ ├── bertweet-base
│ │ └── README.md
│ ├── bertweet-covid19-base-cased
│ │ └── README.md
│ ├── bertweet-covid19-base-uncased
│ │ └── README.md
│ ├── phobert-base
│ │ └── README.md
│ └── phobert-large
│ │ └── README.md
├── voidful
│ ├── albert_chinese_base
│ │ └── README.md
│ ├── albert_chinese_large
│ │ └── README.md
│ ├── albert_chinese_small
│ │ └── README.md
│ ├── albert_chinese_tiny
│ │ └── README.md
│ ├── albert_chinese_xlarge
│ │ └── README.md
│ └── albert_chinese_xxlarge
│ │ └── README.md
├── wptoux
│ └── albert-chinese-large-qa
│ │ └── README.md
├── xlm-mlm-en-2048-README.md
├── xlm-roberta-base-README.md
├── xlm-roberta-large-finetuned-conll03-german-README.md
├── yjernite
│ └── bart_eli5
│ │ └── README.md
├── youscan
│ └── ukr-roberta-base
│ │ └── README.md
├── yuvraj
│ ├── summarizer-cnndm
│ │ └── README.md
│ └── xSumm
│ │ └── README.md
└── zanelim
│ ├── singbert-large-sg
│ └── README.md
│ ├── singbert-lite-sg
│ └── README.md
│ └── singbert
│ └── README.md
├── notebooks
├── 01-training-tokenizers.ipynb
├── 02-transformers.ipynb
├── 03-pipelines.ipynb
├── 04-onnx-export.ipynb
├── 05-benchmark.ipynb
└── README.md
├── pyproject.toml
├── scripts
└── fsmt
│ ├── convert-allenai-wmt16.sh
│ ├── convert-allenai-wmt19.sh
│ ├── convert-facebook-wmt19.sh
│ ├── eval-allenai-wmt16.sh
│ ├── eval-allenai-wmt19.sh
│ ├── eval-facebook-wmt19.sh
│ ├── fsmt-make-tiny-model.py
│ ├── gen-card-allenai-wmt16.py
│ ├── gen-card-allenai-wmt19.py
│ ├── gen-card-facebook-wmt19.py
│ ├── s3-move.sh
│ └── tests-to-run.sh
├── setup.cfg
├── setup.py
├── src
├── transformers.egg-info
│ ├── PKG-INFO
│ ├── SOURCES.txt
│ ├── dependency_links.txt
│ ├── entry_points.txt
│ ├── requires.txt
│ └── top_level.txt
└── transformers
│ ├── .ipynb_checkpoints
│ ├── modeling_bart-checkpoint.py
│ ├── modeling_utils-checkpoint.py
│ └── testing_utils-checkpoint.py
│ ├── __init__.py
│ ├── __pycache__
│ ├── __init__.cpython-37.pyc
│ ├── __init__.cpython-38.pyc
│ ├── activations.cpython-37.pyc
│ ├── activations.cpython-38.pyc
│ ├── activations_tf.cpython-37.pyc
│ ├── configuration_albert.cpython-37.pyc
│ ├── configuration_albert.cpython-38.pyc
│ ├── configuration_auto.cpython-37.pyc
│ ├── configuration_auto.cpython-38.pyc
│ ├── configuration_bart.cpython-37.pyc
│ ├── configuration_bart.cpython-38.pyc
│ ├── configuration_bert.cpython-37.pyc
│ ├── configuration_bert.cpython-38.pyc
│ ├── configuration_bert_generation.cpython-37.pyc
│ ├── configuration_bert_generation.cpython-38.pyc
│ ├── configuration_camembert.cpython-37.pyc
│ ├── configuration_camembert.cpython-38.pyc
│ ├── configuration_ctrl.cpython-37.pyc
│ ├── configuration_ctrl.cpython-38.pyc
│ ├── configuration_distilbert.cpython-37.pyc
│ ├── configuration_distilbert.cpython-38.pyc
│ ├── configuration_dpr.cpython-37.pyc
│ ├── configuration_dpr.cpython-38.pyc
│ ├── configuration_electra.cpython-37.pyc
│ ├── configuration_electra.cpython-38.pyc
│ ├── configuration_encoder_decoder.cpython-37.pyc
│ ├── configuration_encoder_decoder.cpython-38.pyc
│ ├── configuration_flaubert.cpython-37.pyc
│ ├── configuration_flaubert.cpython-38.pyc
│ ├── configuration_fsmt.cpython-37.pyc
│ ├── configuration_fsmt.cpython-38.pyc
│ ├── configuration_funnel.cpython-37.pyc
│ ├── configuration_funnel.cpython-38.pyc
│ ├── configuration_gpt2.cpython-37.pyc
│ ├── configuration_gpt2.cpython-38.pyc
│ ├── configuration_layoutlm.cpython-37.pyc
│ ├── configuration_layoutlm.cpython-38.pyc
│ ├── configuration_longformer.cpython-37.pyc
│ ├── configuration_longformer.cpython-38.pyc
│ ├── configuration_lxmert.cpython-37.pyc
│ ├── configuration_lxmert.cpython-38.pyc
│ ├── configuration_marian.cpython-37.pyc
│ ├── configuration_marian.cpython-38.pyc
│ ├── configuration_mbart.cpython-37.pyc
│ ├── configuration_mbart.cpython-38.pyc
│ ├── configuration_mmbt.cpython-37.pyc
│ ├── configuration_mmbt.cpython-38.pyc
│ ├── configuration_mobilebert.cpython-37.pyc
│ ├── configuration_mobilebert.cpython-38.pyc
│ ├── configuration_openai.cpython-37.pyc
│ ├── configuration_openai.cpython-38.pyc
│ ├── configuration_pegasus.cpython-37.pyc
│ ├── configuration_pegasus.cpython-38.pyc
│ ├── configuration_rag.cpython-37.pyc
│ ├── configuration_rag.cpython-38.pyc
│ ├── configuration_reformer.cpython-37.pyc
│ ├── configuration_reformer.cpython-38.pyc
│ ├── configuration_retribert.cpython-37.pyc
│ ├── configuration_retribert.cpython-38.pyc
│ ├── configuration_roberta.cpython-37.pyc
│ ├── configuration_roberta.cpython-38.pyc
│ ├── configuration_t5.cpython-37.pyc
│ ├── configuration_t5.cpython-38.pyc
│ ├── configuration_transfo_xl.cpython-37.pyc
│ ├── configuration_transfo_xl.cpython-38.pyc
│ ├── configuration_utils.cpython-37.pyc
│ ├── configuration_utils.cpython-38.pyc
│ ├── configuration_xlm.cpython-37.pyc
│ ├── configuration_xlm.cpython-38.pyc
│ ├── configuration_xlm_roberta.cpython-37.pyc
│ ├── configuration_xlm_roberta.cpython-38.pyc
│ ├── configuration_xlnet.cpython-37.pyc
│ ├── configuration_xlnet.cpython-38.pyc
│ ├── file_utils.cpython-37.pyc
│ ├── file_utils.cpython-38.pyc
│ ├── generation_tf_utils.cpython-37.pyc
│ ├── generation_utils.cpython-37.pyc
│ ├── generation_utils.cpython-38.pyc
│ ├── hf_argparser.cpython-37.pyc
│ ├── hf_argparser.cpython-38.pyc
│ ├── integrations.cpython-37.pyc
│ ├── integrations.cpython-38.pyc
│ ├── modelcard.cpython-37.pyc
│ ├── modelcard.cpython-38.pyc
│ ├── modeling_albert.cpython-37.pyc
│ ├── modeling_albert.cpython-38.pyc
│ ├── modeling_auto.cpython-37.pyc
│ ├── modeling_auto.cpython-38.pyc
│ ├── modeling_bart.cpython-37.pyc
│ ├── modeling_bart.cpython-38.pyc
│ ├── modeling_bert.cpython-37.pyc
│ ├── modeling_bert.cpython-38.pyc
│ ├── modeling_bert_generation.cpython-37.pyc
│ ├── modeling_bert_generation.cpython-38.pyc
│ ├── modeling_camembert.cpython-37.pyc
│ ├── modeling_camembert.cpython-38.pyc
│ ├── modeling_ctrl.cpython-37.pyc
│ ├── modeling_ctrl.cpython-38.pyc
│ ├── modeling_distilbert.cpython-37.pyc
│ ├── modeling_distilbert.cpython-38.pyc
│ ├── modeling_dpr.cpython-37.pyc
│ ├── modeling_dpr.cpython-38.pyc
│ ├── modeling_electra.cpython-37.pyc
│ ├── modeling_electra.cpython-38.pyc
│ ├── modeling_encoder_decoder.cpython-37.pyc
│ ├── modeling_encoder_decoder.cpython-38.pyc
│ ├── modeling_flaubert.cpython-37.pyc
│ ├── modeling_flaubert.cpython-38.pyc
│ ├── modeling_fsmt.cpython-37.pyc
│ ├── modeling_fsmt.cpython-38.pyc
│ ├── modeling_funnel.cpython-37.pyc
│ ├── modeling_funnel.cpython-38.pyc
│ ├── modeling_gpt2.cpython-37.pyc
│ ├── modeling_gpt2.cpython-38.pyc
│ ├── modeling_layoutlm.cpython-37.pyc
│ ├── modeling_layoutlm.cpython-38.pyc
│ ├── modeling_longformer.cpython-37.pyc
│ ├── modeling_longformer.cpython-38.pyc
│ ├── modeling_lxmert.cpython-37.pyc
│ ├── modeling_lxmert.cpython-38.pyc
│ ├── modeling_marian.cpython-37.pyc
│ ├── modeling_marian.cpython-38.pyc
│ ├── modeling_mbart.cpython-37.pyc
│ ├── modeling_mbart.cpython-38.pyc
│ ├── modeling_mmbt.cpython-37.pyc
│ ├── modeling_mmbt.cpython-38.pyc
│ ├── modeling_mobilebert.cpython-37.pyc
│ ├── modeling_mobilebert.cpython-38.pyc
│ ├── modeling_openai.cpython-37.pyc
│ ├── modeling_openai.cpython-38.pyc
│ ├── modeling_outputs.cpython-37.pyc
│ ├── modeling_outputs.cpython-38.pyc
│ ├── modeling_pegasus.cpython-37.pyc
│ ├── modeling_pegasus.cpython-38.pyc
│ ├── modeling_rag.cpython-37.pyc
│ ├── modeling_rag.cpython-38.pyc
│ ├── modeling_reformer.cpython-37.pyc
│ ├── modeling_reformer.cpython-38.pyc
│ ├── modeling_retribert.cpython-37.pyc
│ ├── modeling_retribert.cpython-38.pyc
│ ├── modeling_roberta.cpython-37.pyc
│ ├── modeling_roberta.cpython-38.pyc
│ ├── modeling_t5.cpython-37.pyc
│ ├── modeling_t5.cpython-38.pyc
│ ├── modeling_tf_albert.cpython-37.pyc
│ ├── modeling_tf_auto.cpython-37.pyc
│ ├── modeling_tf_bert.cpython-37.pyc
│ ├── modeling_tf_camembert.cpython-37.pyc
│ ├── modeling_tf_ctrl.cpython-37.pyc
│ ├── modeling_tf_distilbert.cpython-37.pyc
│ ├── modeling_tf_electra.cpython-37.pyc
│ ├── modeling_tf_flaubert.cpython-37.pyc
│ ├── modeling_tf_funnel.cpython-37.pyc
│ ├── modeling_tf_gpt2.cpython-37.pyc
│ ├── modeling_tf_longformer.cpython-37.pyc
│ ├── modeling_tf_lxmert.cpython-37.pyc
│ ├── modeling_tf_mobilebert.cpython-37.pyc
│ ├── modeling_tf_openai.cpython-37.pyc
│ ├── modeling_tf_outputs.cpython-37.pyc
│ ├── modeling_tf_pytorch_utils.cpython-37.pyc
│ ├── modeling_tf_pytorch_utils.cpython-38.pyc
│ ├── modeling_tf_roberta.cpython-37.pyc
│ ├── modeling_tf_t5.cpython-37.pyc
│ ├── modeling_tf_transfo_xl.cpython-37.pyc
│ ├── modeling_tf_transfo_xl_utilities.cpython-37.pyc
│ ├── modeling_tf_utils.cpython-37.pyc
│ ├── modeling_tf_xlm.cpython-37.pyc
│ ├── modeling_tf_xlm_roberta.cpython-37.pyc
│ ├── modeling_tf_xlnet.cpython-37.pyc
│ ├── modeling_transfo_xl.cpython-37.pyc
│ ├── modeling_transfo_xl.cpython-38.pyc
│ ├── modeling_transfo_xl_utilities.cpython-37.pyc
│ ├── modeling_transfo_xl_utilities.cpython-38.pyc
│ ├── modeling_utils.cpython-37.pyc
│ ├── modeling_utils.cpython-38.pyc
│ ├── modeling_xlm.cpython-37.pyc
│ ├── modeling_xlm.cpython-38.pyc
│ ├── modeling_xlm_roberta.cpython-37.pyc
│ ├── modeling_xlm_roberta.cpython-38.pyc
│ ├── modeling_xlnet.cpython-37.pyc
│ ├── modeling_xlnet.cpython-38.pyc
│ ├── optimization.cpython-37.pyc
│ ├── optimization.cpython-38.pyc
│ ├── optimization_tf.cpython-37.pyc
│ ├── pipelines.cpython-37.pyc
│ ├── pipelines.cpython-38.pyc
│ ├── retrieval_rag.cpython-37.pyc
│ ├── retrieval_rag.cpython-38.pyc
│ ├── tokenization_albert.cpython-37.pyc
│ ├── tokenization_albert.cpython-38.pyc
│ ├── tokenization_auto.cpython-37.pyc
│ ├── tokenization_auto.cpython-38.pyc
│ ├── tokenization_bart.cpython-37.pyc
│ ├── tokenization_bart.cpython-38.pyc
│ ├── tokenization_bert.cpython-37.pyc
│ ├── tokenization_bert.cpython-38.pyc
│ ├── tokenization_bert_generation.cpython-37.pyc
│ ├── tokenization_bert_generation.cpython-38.pyc
│ ├── tokenization_bert_japanese.cpython-37.pyc
│ ├── tokenization_bert_japanese.cpython-38.pyc
│ ├── tokenization_bertweet.cpython-37.pyc
│ ├── tokenization_bertweet.cpython-38.pyc
│ ├── tokenization_camembert.cpython-37.pyc
│ ├── tokenization_camembert.cpython-38.pyc
│ ├── tokenization_ctrl.cpython-37.pyc
│ ├── tokenization_ctrl.cpython-38.pyc
│ ├── tokenization_distilbert.cpython-37.pyc
│ ├── tokenization_distilbert.cpython-38.pyc
│ ├── tokenization_dpr.cpython-37.pyc
│ ├── tokenization_dpr.cpython-38.pyc
│ ├── tokenization_electra.cpython-37.pyc
│ ├── tokenization_electra.cpython-38.pyc
│ ├── tokenization_flaubert.cpython-37.pyc
│ ├── tokenization_flaubert.cpython-38.pyc
│ ├── tokenization_fsmt.cpython-37.pyc
│ ├── tokenization_fsmt.cpython-38.pyc
│ ├── tokenization_funnel.cpython-37.pyc
│ ├── tokenization_funnel.cpython-38.pyc
│ ├── tokenization_gpt2.cpython-37.pyc
│ ├── tokenization_gpt2.cpython-38.pyc
│ ├── tokenization_layoutlm.cpython-37.pyc
│ ├── tokenization_layoutlm.cpython-38.pyc
│ ├── tokenization_longformer.cpython-37.pyc
│ ├── tokenization_longformer.cpython-38.pyc
│ ├── tokenization_lxmert.cpython-37.pyc
│ ├── tokenization_lxmert.cpython-38.pyc
│ ├── tokenization_marian.cpython-37.pyc
│ ├── tokenization_marian.cpython-38.pyc
│ ├── tokenization_mbart.cpython-37.pyc
│ ├── tokenization_mbart.cpython-38.pyc
│ ├── tokenization_mobilebert.cpython-37.pyc
│ ├── tokenization_mobilebert.cpython-38.pyc
│ ├── tokenization_openai.cpython-37.pyc
│ ├── tokenization_openai.cpython-38.pyc
│ ├── tokenization_pegasus.cpython-37.pyc
│ ├── tokenization_pegasus.cpython-38.pyc
│ ├── tokenization_phobert.cpython-37.pyc
│ ├── tokenization_phobert.cpython-38.pyc
│ ├── tokenization_rag.cpython-37.pyc
│ ├── tokenization_rag.cpython-38.pyc
│ ├── tokenization_reformer.cpython-37.pyc
│ ├── tokenization_reformer.cpython-38.pyc
│ ├── tokenization_retribert.cpython-37.pyc
│ ├── tokenization_retribert.cpython-38.pyc
│ ├── tokenization_roberta.cpython-37.pyc
│ ├── tokenization_roberta.cpython-38.pyc
│ ├── tokenization_t5.cpython-37.pyc
│ ├── tokenization_t5.cpython-38.pyc
│ ├── tokenization_transfo_xl.cpython-37.pyc
│ ├── tokenization_transfo_xl.cpython-38.pyc
│ ├── tokenization_utils.cpython-37.pyc
│ ├── tokenization_utils.cpython-38.pyc
│ ├── tokenization_utils_base.cpython-37.pyc
│ ├── tokenization_utils_base.cpython-38.pyc
│ ├── tokenization_utils_fast.cpython-37.pyc
│ ├── tokenization_utils_fast.cpython-38.pyc
│ ├── tokenization_xlm.cpython-37.pyc
│ ├── tokenization_xlm.cpython-38.pyc
│ ├── tokenization_xlm_roberta.cpython-37.pyc
│ ├── tokenization_xlm_roberta.cpython-38.pyc
│ ├── tokenization_xlnet.cpython-37.pyc
│ ├── tokenization_xlnet.cpython-38.pyc
│ ├── trainer.cpython-37.pyc
│ ├── trainer.cpython-38.pyc
│ ├── trainer_tf.cpython-37.pyc
│ ├── trainer_utils.cpython-37.pyc
│ ├── trainer_utils.cpython-38.pyc
│ ├── training_args.cpython-37.pyc
│ ├── training_args.cpython-38.pyc
│ ├── training_args_tf.cpython-37.pyc
│ └── training_args_tf.cpython-38.pyc
│ ├── activations.py
│ ├── activations_tf.py
│ ├── benchmark
│ ├── __init__.py
│ ├── __pycache__
│ │ ├── __init__.cpython-37.pyc
│ │ ├── __init__.cpython-38.pyc
│ │ ├── benchmark.cpython-37.pyc
│ │ ├── benchmark.cpython-38.pyc
│ │ ├── benchmark_args.cpython-37.pyc
│ │ ├── benchmark_args.cpython-38.pyc
│ │ ├── benchmark_args_tf.cpython-37.pyc
│ │ ├── benchmark_args_utils.cpython-37.pyc
│ │ ├── benchmark_args_utils.cpython-38.pyc
│ │ ├── benchmark_tf.cpython-37.pyc
│ │ ├── benchmark_utils.cpython-37.pyc
│ │ └── benchmark_utils.cpython-38.pyc
│ ├── benchmark.py
│ ├── benchmark_args.py
│ ├── benchmark_args_tf.py
│ ├── benchmark_args_utils.py
│ ├── benchmark_tf.py
│ └── benchmark_utils.py
│ ├── commands
│ ├── __init__.py
│ ├── convert.py
│ ├── download.py
│ ├── env.py
│ ├── run.py
│ ├── serving.py
│ ├── train.py
│ ├── transformers_cli.py
│ └── user.py
│ ├── configuration_albert.py
│ ├── configuration_auto.py
│ ├── configuration_bart.py
│ ├── configuration_bert.py
│ ├── configuration_bert_generation.py
│ ├── configuration_camembert.py
│ ├── configuration_ctrl.py
│ ├── configuration_distilbert.py
│ ├── configuration_dpr.py
│ ├── configuration_electra.py
│ ├── configuration_encoder_decoder.py
│ ├── configuration_flaubert.py
│ ├── configuration_fsmt.py
│ ├── configuration_funnel.py
│ ├── configuration_gpt2.py
│ ├── configuration_layoutlm.py
│ ├── configuration_longformer.py
│ ├── configuration_lxmert.py
│ ├── configuration_marian.py
│ ├── configuration_mbart.py
│ ├── configuration_mmbt.py
│ ├── configuration_mobilebert.py
│ ├── configuration_openai.py
│ ├── configuration_pegasus.py
│ ├── configuration_rag.py
│ ├── configuration_reformer.py
│ ├── configuration_retribert.py
│ ├── configuration_roberta.py
│ ├── configuration_t5.py
│ ├── configuration_transfo_xl.py
│ ├── configuration_utils.py
│ ├── configuration_xlm.py
│ ├── configuration_xlm_roberta.py
│ ├── configuration_xlnet.py
│ ├── convert_albert_original_tf_checkpoint_to_pytorch.py
│ ├── convert_bart_original_pytorch_checkpoint_to_pytorch.py
│ ├── convert_bert_original_tf2_checkpoint_to_pytorch.py
│ ├── convert_bert_original_tf_checkpoint_to_pytorch.py
│ ├── convert_bert_pytorch_checkpoint_to_original_tf.py
│ ├── convert_dialogpt_original_pytorch_checkpoint_to_pytorch.py
│ ├── convert_dpr_original_checkpoint_to_pytorch.py
│ ├── convert_electra_original_tf_checkpoint_to_pytorch.py
│ ├── convert_fsmt_original_pytorch_checkpoint_to_pytorch.py
│ ├── convert_funnel_original_tf_checkpoint_to_pytorch.py
│ ├── convert_gpt2_original_tf_checkpoint_to_pytorch.py
│ ├── convert_graph_to_onnx.py
│ ├── convert_longformer_original_pytorch_lightning_to_pytorch.py
│ ├── convert_lxmert_original_tf_checkpoint_to_pytorch.py
│ ├── convert_marian_to_pytorch.py
│ ├── convert_mbart_original_checkpoint_to_pytorch.py
│ ├── convert_mobilebert_original_tf_checkpoint_to_pytorch.py
│ ├── convert_openai_original_tf_checkpoint_to_pytorch.py
│ ├── convert_pegasus_tf_to_pytorch.py
│ ├── convert_pytorch_checkpoint_to_tf2.py
│ ├── convert_reformer_trax_checkpoint_to_pytorch.py
│ ├── convert_roberta_original_pytorch_checkpoint_to_pytorch.py
│ ├── convert_t5_original_tf_checkpoint_to_pytorch.py
│ ├── convert_tf_hub_seq_to_seq_bert_to_pytorch.py
│ ├── convert_transfo_xl_original_tf_checkpoint_to_pytorch.py
│ ├── convert_xlm_original_pytorch_checkpoint_to_pytorch.py
│ ├── convert_xlnet_original_tf_checkpoint_to_pytorch.py
│ ├── data
│ ├── __init__.py
│ ├── __pycache__
│ │ ├── __init__.cpython-37.pyc
│ │ ├── __init__.cpython-38.pyc
│ │ ├── data_collator.cpython-37.pyc
│ │ └── data_collator.cpython-38.pyc
│ ├── data_collator.py
│ ├── datasets
│ │ ├── __init__.py
│ │ ├── __pycache__
│ │ │ ├── __init__.cpython-37.pyc
│ │ │ ├── __init__.cpython-38.pyc
│ │ │ ├── glue.cpython-37.pyc
│ │ │ ├── glue.cpython-38.pyc
│ │ │ ├── language_modeling.cpython-37.pyc
│ │ │ ├── language_modeling.cpython-38.pyc
│ │ │ ├── squad.cpython-37.pyc
│ │ │ └── squad.cpython-38.pyc
│ │ ├── glue.py
│ │ ├── language_modeling.py
│ │ └── squad.py
│ ├── metrics
│ │ ├── __init__.py
│ │ ├── __pycache__
│ │ │ ├── __init__.cpython-37.pyc
│ │ │ └── __init__.cpython-38.pyc
│ │ └── squad_metrics.py
│ ├── processors
│ │ ├── __init__.py
│ │ ├── __pycache__
│ │ │ ├── __init__.cpython-37.pyc
│ │ │ ├── __init__.cpython-38.pyc
│ │ │ ├── glue.cpython-37.pyc
│ │ │ ├── glue.cpython-38.pyc
│ │ │ ├── squad.cpython-37.pyc
│ │ │ ├── squad.cpython-38.pyc
│ │ │ ├── utils.cpython-37.pyc
│ │ │ ├── utils.cpython-38.pyc
│ │ │ ├── xnli.cpython-37.pyc
│ │ │ └── xnli.cpython-38.pyc
│ │ ├── glue.py
│ │ ├── squad.py
│ │ ├── utils.py
│ │ └── xnli.py
│ └── test_generation_utils.py
│ ├── file_utils.py
│ ├── generation_tf_utils.py
│ ├── generation_utils.py
│ ├── hf_api.py
│ ├── hf_argparser.py
│ ├── integrations.py
│ ├── modelcard.py
│ ├── modeling_albert.py
│ ├── modeling_auto.py
│ ├── modeling_bart.py
│ ├── modeling_bert.py
│ ├── modeling_bert_generation.py
│ ├── modeling_camembert.py
│ ├── modeling_ctrl.py
│ ├── modeling_distilbert.py
│ ├── modeling_dpr.py
│ ├── modeling_electra.py
│ ├── modeling_encoder_decoder.py
│ ├── modeling_flaubert.py
│ ├── modeling_fsmt.py
│ ├── modeling_funnel.py
│ ├── modeling_gpt2.py
│ ├── modeling_layoutlm.py
│ ├── modeling_longformer.py
│ ├── modeling_lxmert.py
│ ├── modeling_marian.py
│ ├── modeling_mbart.py
│ ├── modeling_mmbt.py
│ ├── modeling_mobilebert.py
│ ├── modeling_openai.py
│ ├── modeling_outputs.py
│ ├── modeling_pegasus.py
│ ├── modeling_rag.py
│ ├── modeling_reformer.py
│ ├── modeling_retribert.py
│ ├── modeling_roberta.py
│ ├── modeling_t5.py
│ ├── modeling_tf_albert.py
│ ├── modeling_tf_auto.py
│ ├── modeling_tf_bert.py
│ ├── modeling_tf_camembert.py
│ ├── modeling_tf_ctrl.py
│ ├── modeling_tf_distilbert.py
│ ├── modeling_tf_electra.py
│ ├── modeling_tf_flaubert.py
│ ├── modeling_tf_funnel.py
│ ├── modeling_tf_gpt2.py
│ ├── modeling_tf_longformer.py
│ ├── modeling_tf_lxmert.py
│ ├── modeling_tf_mobilebert.py
│ ├── modeling_tf_openai.py
│ ├── modeling_tf_outputs.py
│ ├── modeling_tf_pytorch_utils.py
│ ├── modeling_tf_roberta.py
│ ├── modeling_tf_t5.py
│ ├── modeling_tf_transfo_xl.py
│ ├── modeling_tf_transfo_xl_utilities.py
│ ├── modeling_tf_utils.py
│ ├── modeling_tf_xlm.py
│ ├── modeling_tf_xlm_roberta.py
│ ├── modeling_tf_xlnet.py
│ ├── modeling_transfo_xl.py
│ ├── modeling_transfo_xl_utilities.py
│ ├── modeling_utils.py
│ ├── modeling_xlm.py
│ ├── modeling_xlm_roberta.py
│ ├── modeling_xlnet.py
│ ├── optimization.py
│ ├── optimization_tf.py
│ ├── pipelines.py
│ ├── retrieval_rag.py
│ ├── testing_utils.py
│ ├── tokenization_albert.py
│ ├── tokenization_auto.py
│ ├── tokenization_bart.py
│ ├── tokenization_bert.py
│ ├── tokenization_bert_generation.py
│ ├── tokenization_bert_japanese.py
│ ├── tokenization_bertweet.py
│ ├── tokenization_camembert.py
│ ├── tokenization_ctrl.py
│ ├── tokenization_distilbert.py
│ ├── tokenization_dpr.py
│ ├── tokenization_electra.py
│ ├── tokenization_flaubert.py
│ ├── tokenization_fsmt.py
│ ├── tokenization_funnel.py
│ ├── tokenization_gpt2.py
│ ├── tokenization_layoutlm.py
│ ├── tokenization_longformer.py
│ ├── tokenization_lxmert.py
│ ├── tokenization_marian.py
│ ├── tokenization_mbart.py
│ ├── tokenization_mobilebert.py
│ ├── tokenization_openai.py
│ ├── tokenization_pegasus.py
│ ├── tokenization_phobert.py
│ ├── tokenization_rag.py
│ ├── tokenization_reformer.py
│ ├── tokenization_retribert.py
│ ├── tokenization_roberta.py
│ ├── tokenization_t5.py
│ ├── tokenization_transfo_xl.py
│ ├── tokenization_utils.py
│ ├── tokenization_utils_base.py
│ ├── tokenization_utils_fast.py
│ ├── tokenization_xlm.py
│ ├── tokenization_xlm_roberta.py
│ ├── tokenization_xlnet.py
│ ├── trainer.py
│ ├── trainer_tf.py
│ ├── trainer_utils.py
│ ├── training_args.py
│ ├── training_args_tf.py
│ └── utils
│ ├── __init__.py
│ ├── __pycache__
│ ├── __init__.cpython-37.pyc
│ ├── __init__.cpython-38.pyc
│ ├── logging.cpython-37.pyc
│ └── logging.cpython-38.pyc
│ └── logging.py
├── templates
├── adding_a_new_example_script
│ ├── README.md
│ ├── run_xxx.py
│ └── utils_xxx.py
└── adding_a_new_model
│ ├── README.md
│ ├── configuration_xxx.py
│ ├── convert_xxx_original_tf_checkpoint_to_pytorch.py
│ ├── modeling_tf_xxx.py
│ ├── modeling_xxx.py
│ ├── tests
│ ├── test_modeling_tf_xxx.py
│ ├── test_modeling_xxx.py
│ └── test_tokenization_xxx.py
│ └── tokenization_xxx.py
├── tests
├── __init__.py
├── conftest.py
├── fixtures
│ ├── dummy-config.json
│ ├── empty.txt
│ ├── input.txt
│ ├── sample_text.txt
│ ├── spiece.model
│ ├── test_sentencepiece.model
│ └── tests_samples
│ │ ├── .gitignore
│ │ ├── GermEval
│ │ ├── dev.txt
│ │ ├── labels.txt
│ │ └── train.txt
│ │ ├── MRPC
│ │ ├── dev.tsv
│ │ └── train.tsv
│ │ ├── SQUAD
│ │ ├── dev-v2.0.json
│ │ └── train-v2.0.json
│ │ ├── STS-B
│ │ ├── dev.tsv
│ │ └── train.tsv
│ │ └── wiki_text
│ │ └── wiki_00
├── test_activations.py
├── test_activations_tf.py
├── test_benchmark.py
├── test_benchmark_tf.py
├── test_cli.py
├── test_configuration_auto.py
├── test_configuration_common.py
├── test_data_collator.py
├── test_doc_samples.py
├── test_hf_api.py
├── test_hf_argparser.py
├── test_logging.py
├── test_model_card.py
├── test_model_output.py
├── test_modeling_albert.py
├── test_modeling_auto.py
├── test_modeling_bart.py
├── test_modeling_bert.py
├── test_modeling_bert_generation.py
├── test_modeling_camembert.py
├── test_modeling_common.py
├── test_modeling_ctrl.py
├── test_modeling_distilbert.py
├── test_modeling_dpr.py
├── test_modeling_electra.py
├── test_modeling_encoder_decoder.py
├── test_modeling_flaubert.py
├── test_modeling_fsmt.py
├── test_modeling_funnel.py
├── test_modeling_gpt2.py
├── test_modeling_layoutlm.py
├── test_modeling_longformer.py
├── test_modeling_lxmert.py
├── test_modeling_marian.py
├── test_modeling_mbart.py
├── test_modeling_mobilebert.py
├── test_modeling_openai.py
├── test_modeling_pegasus.py
├── test_modeling_rag.py
├── test_modeling_reformer.py
├── test_modeling_roberta.py
├── test_modeling_t5.py
├── test_modeling_tf_albert.py
├── test_modeling_tf_auto.py
├── test_modeling_tf_bert.py
├── test_modeling_tf_camembert.py
├── test_modeling_tf_common.py
├── test_modeling_tf_ctrl.py
├── test_modeling_tf_distilbert.py
├── test_modeling_tf_electra.py
├── test_modeling_tf_flaubert.py
├── test_modeling_tf_funnel.py
├── test_modeling_tf_gpt2.py
├── test_modeling_tf_longformer.py
├── test_modeling_tf_lxmert.py
├── test_modeling_tf_mobilebert.py
├── test_modeling_tf_openai.py
├── test_modeling_tf_roberta.py
├── test_modeling_tf_t5.py
├── test_modeling_tf_transfo_xl.py
├── test_modeling_tf_xlm.py
├── test_modeling_tf_xlm_roberta.py
├── test_modeling_tf_xlnet.py
├── test_modeling_transfo_xl.py
├── test_modeling_xlm.py
├── test_modeling_xlm_roberta.py
├── test_modeling_xlnet.py
├── test_onnx.py
├── test_optimization.py
├── test_optimization_tf.py
├── test_pipelines.py
├── test_retrieval_rag.py
├── test_skip_decorators.py
├── test_tokenization_albert.py
├── test_tokenization_auto.py
├── test_tokenization_bart.py
├── test_tokenization_bert.py
├── test_tokenization_bert_generation.py
├── test_tokenization_bert_japanese.py
├── test_tokenization_bertweet.py
├── test_tokenization_common.py
├── test_tokenization_ctrl.py
├── test_tokenization_distilbert.py
├── test_tokenization_dpr.py
├── test_tokenization_fast.py
├── test_tokenization_fsmt.py
├── test_tokenization_funnel.py
├── test_tokenization_gpt2.py
├── test_tokenization_layoutlm.py
├── test_tokenization_lxmert.py
├── test_tokenization_marian.py
├── test_tokenization_mbart.py
├── test_tokenization_openai.py
├── test_tokenization_pegasus.py
├── test_tokenization_phobert.py
├── test_tokenization_rag.py
├── test_tokenization_reformer.py
├── test_tokenization_roberta.py
├── test_tokenization_t5.py
├── test_tokenization_transfo_xl.py
├── test_tokenization_utils.py
├── test_tokenization_xlm.py
├── test_tokenization_xlm_roberta.py
├── test_tokenization_xlnet.py
├── test_trainer.py
├── test_trainer_distributed.py
└── test_utils_check_copies.py
├── utils
├── check_copies.py
├── check_repo.py
├── download_glue_data.py
└── link_tester.py
└── valohai.yaml
/src/Weights & Biases.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/src/Weights & Biases.pdf
--------------------------------------------------------------------------------
/src/composit/.ipynb_checkpoints/test_results-checkpoint.txt:
--------------------------------------------------------------------------------
1 | bs: 3.000000
2 | loss: 167.933685
3 | src_pad_frac: 0.025157
4 | src_pad_tok: 12.000000
5 | step_count: 13.000000
6 | test_avg_gen_len: 31.329268
7 | test_avg_gen_time: 0.312038
8 | test_avg_loss: 156.182098
9 | test_avg_rouge-1: 46.712563
10 | test_avg_rouge-2: 22.947288
11 | test_avg_rouge-l: 45.301813
12 | test_loss: 156.182098
13 | tpb: 545.000000
14 | val_avg_gen_len: 31.995110
15 | val_avg_gen_time: 0.287093
16 | val_avg_loss: 160.214417
17 | val_avg_rouge-1: 47.042550
18 | val_avg_rouge-2: 24.202754
19 | val_avg_rouge-l: 46.366539
20 | val_loss: 160.214417
21 |
--------------------------------------------------------------------------------
/src/composit/hparams.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/src/composit/hparams.pkl
--------------------------------------------------------------------------------
/src/composit/test_results.txt:
--------------------------------------------------------------------------------
1 | bs: 3.000000
2 | loss: 167.933685
3 | src_pad_frac: 0.025157
4 | src_pad_tok: 12.000000
5 | step_count: 13.000000
6 | test_avg_gen_len: 31.329268
7 | test_avg_gen_time: 0.312038
8 | test_avg_loss: 156.182098
9 | test_avg_rouge-1: 46.712563
10 | test_avg_rouge-2: 22.947288
11 | test_avg_rouge-l: 45.301813
12 | test_loss: 156.182098
13 | tpb: 545.000000
14 | val_avg_gen_len: 31.995110
15 | val_avg_gen_time: 0.287093
16 | val_avg_loss: 160.214417
17 | val_avg_rouge-1: 47.042550
18 | val_avg_rouge-2: 24.202754
19 | val_avg_rouge-l: 46.366539
20 | val_loss: 160.214417
21 |
--------------------------------------------------------------------------------
/src/train_base.sh:
--------------------------------------------------------------------------------
1 | CUDA_VISIBLE_DEVICES=0 python train.py \
2 | --data_dir ./data \
3 | --learning_rate=3e-5 \
4 | --gpus 1 \
5 | --do_train \
6 | --do_predict \
7 | --check_val_every_n_epoch 1 \
8 | --early_stopping_patience 5 \
9 | --max_source_length 800 \
10 | --task summarization \
11 | --label_smoothing 0.1 \
12 | --model_name_or_path facebook/bart-base \
13 | --config_name bart_config_10_7.json \
14 | --cache_dir ./cache \
15 | --output_dir ./baseline \
16 | --lr_scheduler polynomial \
17 | --weight_decay 0.01 --warmup_steps 120 --num_train_epochs 20 \
18 | --max_grad_norm 0.1 \
19 | --dropout 0.1 --attention_dropout 0.1 \
20 | --train_batch_size 1 \
21 | --eval_batch_size 2 \
22 | --gradient_accumulation_steps 32 \
23 | --sortish_sampler \
24 | --seed 42 \
25 | --val_metric loss \
26 | --logger_name wandb \
27 | "$@"
--------------------------------------------------------------------------------
/transformers/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | source=transformers
3 | omit =
4 | # skip convertion scripts from testing for now
5 | */convert_*
6 | */__main__.py
7 | [report]
8 | exclude_lines =
9 | pragma: no cover
10 | raise
11 | except
12 | register_parameter
--------------------------------------------------------------------------------
/transformers/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include LICENSE
2 |
--------------------------------------------------------------------------------
/transformers/docker/transformers-cpu/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:18.04
2 | LABEL maintainer="Hugging Face"
3 | LABEL repository="transformers"
4 |
5 | RUN apt update && \
6 | apt install -y bash \
7 | build-essential \
8 | git \
9 | curl \
10 | ca-certificates \
11 | python3 \
12 | python3-pip && \
13 | rm -rf /var/lib/apt/lists
14 |
15 | RUN python3 -m pip install --no-cache-dir --upgrade pip && \
16 | python3 -m pip install --no-cache-dir \
17 | jupyter \
18 | tensorflow-cpu \
19 | torch
20 |
21 | WORKDIR /workspace
22 | COPY . transformers/
23 | RUN cd transformers/ && \
24 | python3 -m pip install --no-cache-dir .
25 |
26 | CMD ["/bin/bash"]
--------------------------------------------------------------------------------
/transformers/docker/transformers-gpu/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM nvidia/cuda:10.1-cudnn7-runtime-ubuntu18.04
2 | LABEL maintainer="Hugging Face"
3 | LABEL repository="transformers"
4 |
5 | RUN apt update && \
6 | apt install -y bash \
7 | build-essential \
8 | git \
9 | curl \
10 | ca-certificates \
11 | python3 \
12 | python3-pip && \
13 | rm -rf /var/lib/apt/lists
14 |
15 | RUN python3 -m pip install --no-cache-dir --upgrade pip && \
16 | python3 -m pip install --no-cache-dir \
17 | jupyter \
18 | tensorflow \
19 | torch
20 |
21 | WORKDIR /workspace
22 | COPY . transformers/
23 | RUN cd transformers/ && \
24 | python3 -m pip install --no-cache-dir .
25 |
26 | CMD ["/bin/bash"]
--------------------------------------------------------------------------------
/transformers/docker/transformers-pytorch-cpu/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:18.04
2 | LABEL maintainer="Hugging Face"
3 | LABEL repository="transformers"
4 |
5 | RUN apt update && \
6 | apt install -y bash \
7 | build-essential \
8 | git \
9 | curl \
10 | ca-certificates \
11 | python3 \
12 | python3-pip && \
13 | rm -rf /var/lib/apt/lists
14 |
15 | RUN python3 -m pip install --no-cache-dir --upgrade pip && \
16 | python3 -m pip install --no-cache-dir \
17 | jupyter \
18 | torch
19 |
20 | WORKDIR /workspace
21 | COPY . transformers/
22 | RUN cd transformers/ && \
23 | python3 -m pip install --no-cache-dir .
24 |
25 | CMD ["/bin/bash"]
--------------------------------------------------------------------------------
/transformers/docker/transformers-pytorch-gpu/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM nvidia/cuda:10.1-cudnn7-runtime-ubuntu18.04
2 | LABEL maintainer="Hugging Face"
3 | LABEL repository="transformers"
4 |
5 | RUN apt update && \
6 | apt install -y bash \
7 | build-essential \
8 | git \
9 | curl \
10 | ca-certificates \
11 | python3 \
12 | python3-pip && \
13 | rm -rf /var/lib/apt/lists
14 |
15 | RUN python3 -m pip install --no-cache-dir --upgrade pip && \
16 | python3 -m pip install --no-cache-dir \
17 | mkl \
18 | torch
19 |
20 | WORKDIR /workspace
21 | COPY . transformers/
22 | RUN cd transformers/ && \
23 | python3 -m pip install --no-cache-dir .
24 |
25 | CMD ["/bin/bash"]
--------------------------------------------------------------------------------
/transformers/docker/transformers-pytorch-tpu/dataset.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: PersistentVolume
3 | metadata:
4 | name: huggingface-cluster-disk
5 | spec:
6 | storageClassName: ""
7 | capacity:
8 | storage: 500Gi
9 | accessModes:
10 | - ReadOnlyMany
11 | claimRef:
12 | namespace: default
13 | name: huggingface-cluster-disk-claim
14 | gcePersistentDisk:
15 | pdName: huggingface-cluster-disk
16 | fsType: ext4
17 | readOnly: true
18 | ---
19 | apiVersion: v1
20 | kind: PersistentVolumeClaim
21 | metadata:
22 | name: huggingface-cluster-disk-claim
23 | spec:
24 | # Specify "" as the storageClassName so it matches the PersistentVolume's StorageClass.
25 | # A nil storageClassName value uses the default StorageClass. For details, see
26 | # https://kubernetes.io/docs/concepts/storage/persistent-volumes/#class-1
27 | storageClassName: ""
28 | accessModes:
29 | - ReadOnlyMany
30 | resources:
31 | requests:
32 | storage: 1Ki
33 |
--------------------------------------------------------------------------------
/transformers/docker/transformers-pytorch-tpu/docker-entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | source ~/.bashrc
3 | echo "running docker-entrypoint.sh"
4 | conda activate container
5 | echo $KUBE_GOOGLE_CLOUD_TPU_ENDPOINTS
6 | echo "printed TPU info"
7 | export XRT_TPU_CONFIG="tpu_worker;0;${KUBE_GOOGLE_CLOUD_TPU_ENDPOINTS:7}"
8 | exec "$@"#!/bin/bash
9 |
--------------------------------------------------------------------------------
/transformers/docker/transformers-tensorflow-cpu/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:18.04
2 | LABEL maintainer="Hugging Face"
3 | LABEL repository="transformers"
4 |
5 | RUN apt update && \
6 | apt install -y bash \
7 | build-essential \
8 | git \
9 | curl \
10 | ca-certificates \
11 | python3 \
12 | python3-pip && \
13 | rm -rf /var/lib/apt/lists
14 |
15 | RUN python3 -m pip install --no-cache-dir --upgrade pip && \
16 | python3 -m pip install --no-cache-dir \
17 | mkl \
18 | tensorflow-cpu
19 |
20 | WORKDIR /workspace
21 | COPY . transformers/
22 | RUN cd transformers/ && \
23 | python3 -m pip install --no-cache-dir .
24 |
25 | CMD ["/bin/bash"]
--------------------------------------------------------------------------------
/transformers/docker/transformers-tensorflow-gpu/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM nvidia/cuda:10.1-cudnn7-runtime-ubuntu18.04
2 | LABEL maintainer="Hugging Face"
3 | LABEL repository="transformers"
4 |
5 | RUN apt update && \
6 | apt install -y bash \
7 | build-essential \
8 | git \
9 | curl \
10 | ca-certificates \
11 | python3 \
12 | python3-pip && \
13 | rm -rf /var/lib/apt/lists
14 |
15 | RUN python3 -m pip install --no-cache-dir --upgrade pip && \
16 | python3 -m pip install --no-cache-dir \
17 | mkl \
18 | tensorflow
19 |
20 | WORKDIR /workspace
21 | COPY . transformers/
22 | RUN cd transformers/ && \
23 | python3 -m pip install --no-cache-dir .
24 |
25 | CMD ["/bin/bash"]
--------------------------------------------------------------------------------
/transformers/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SOURCEDIR = source
8 | BUILDDIR = _build
9 |
10 | # Put it first so that "make" without argument is like "make help".
11 | help:
12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
13 |
14 | .PHONY: help Makefile
15 |
16 | # Catch-all target: route all unknown targets to Sphinx using the new
17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
18 | %: Makefile
19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
--------------------------------------------------------------------------------
/transformers/docs/source/_static/css/Calibre-Light.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/docs/source/_static/css/Calibre-Light.ttf
--------------------------------------------------------------------------------
/transformers/docs/source/_static/css/Calibre-Medium.otf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/docs/source/_static/css/Calibre-Medium.otf
--------------------------------------------------------------------------------
/transformers/docs/source/_static/css/Calibre-Regular.otf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/docs/source/_static/css/Calibre-Regular.otf
--------------------------------------------------------------------------------
/transformers/docs/source/_static/css/Calibre-Thin.otf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/docs/source/_static/css/Calibre-Thin.otf
--------------------------------------------------------------------------------
/transformers/docs/source/_static/css/code-snippets.css:
--------------------------------------------------------------------------------
1 |
2 | .highlight .c1, .highlight .sd{
3 | color: #999
4 | }
5 |
6 | .highlight .nn, .highlight .k, .highlight .s1, .highlight .nb, .highlight .bp, .highlight .kc {
7 | color: #FB8D68;
8 | }
9 |
10 | .highlight .kn, .highlight .nv, .highlight .s2, .highlight .ow {
11 | color: #6670FF;
12 | }
13 |
14 | .highlight .gp {
15 | color: #FB8D68;
16 | }
--------------------------------------------------------------------------------
/transformers/docs/source/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/docs/source/favicon.ico
--------------------------------------------------------------------------------
/transformers/docs/source/imgs/local_attention_mask.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/docs/source/imgs/local_attention_mask.png
--------------------------------------------------------------------------------
/transformers/docs/source/imgs/ppl_chunked.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/docs/source/imgs/ppl_chunked.gif
--------------------------------------------------------------------------------
/transformers/docs/source/imgs/ppl_full.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/docs/source/imgs/ppl_full.gif
--------------------------------------------------------------------------------
/transformers/docs/source/imgs/ppl_sliding.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/docs/source/imgs/ppl_sliding.gif
--------------------------------------------------------------------------------
/transformers/docs/source/imgs/transformers_logo_name.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/docs/source/imgs/transformers_logo_name.png
--------------------------------------------------------------------------------
/transformers/docs/source/imgs/warmup_constant_schedule.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/docs/source/imgs/warmup_constant_schedule.png
--------------------------------------------------------------------------------
/transformers/docs/source/imgs/warmup_cosine_hard_restarts_schedule.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/docs/source/imgs/warmup_cosine_hard_restarts_schedule.png
--------------------------------------------------------------------------------
/transformers/docs/source/imgs/warmup_cosine_schedule.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/docs/source/imgs/warmup_cosine_schedule.png
--------------------------------------------------------------------------------
/transformers/docs/source/imgs/warmup_cosine_warm_restarts_schedule.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/docs/source/imgs/warmup_cosine_warm_restarts_schedule.png
--------------------------------------------------------------------------------
/transformers/docs/source/imgs/warmup_linear_schedule.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/docs/source/imgs/warmup_linear_schedule.png
--------------------------------------------------------------------------------
/transformers/docs/source/main_classes/configuration.rst:
--------------------------------------------------------------------------------
1 | Configuration
2 | -----------------------------------------------------------------------------------------------------------------------
3 |
4 | The base class :class:`~transformers.PretrainedConfig` implements the common methods for loading/saving a configuration
5 | either from a local file or directory, or from a pretrained model configuration provided by the library (downloaded
6 | from HuggingFace's AWS S3 repository).
7 |
8 |
9 | PretrainedConfig
10 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
11 |
12 | .. autoclass:: transformers.PretrainedConfig
13 | :members:
14 |
--------------------------------------------------------------------------------
/transformers/examples/bert-loses-patience/pabee/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/examples/bert-loses-patience/pabee/__init__.py
--------------------------------------------------------------------------------
/transformers/examples/conftest.py:
--------------------------------------------------------------------------------
1 | # tests directory-specific settings - this file is run automatically
2 | # by pytest before any tests are run
3 |
4 | import sys
5 | from os.path import abspath, dirname, join
6 |
7 |
8 | # allow having multiple repository checkouts and not needing to remember to rerun
9 | # 'pip install -e .[dev]' when switching between checkouts and running tests.
10 | git_repo_path = abspath(join(dirname(dirname(__file__)), "src"))
11 | sys.path.insert(1, git_repo_path)
12 |
--------------------------------------------------------------------------------
/transformers/examples/contrib/README.md:
--------------------------------------------------------------------------------
1 | # Community contributed examples
2 |
3 | This folder contains examples which are not actively maintained (mostly contributed by the community).
4 |
5 | Using these examples together with a recent version of the library usually requires to make small (sometimes big) adaptations to get the scripts working.
6 |
--------------------------------------------------------------------------------
/transformers/examples/contrib/mm-imdb/README.md:
--------------------------------------------------------------------------------
1 | ## MM-IMDb
2 |
3 | Based on the script [`run_mmimdb.py`](https://github.com/huggingface/transformers/blob/master/examples/contrib/mm-imdb/run_mmimdb.py).
4 |
5 | [MM-IMDb](http://lisi1.unal.edu.co/mmimdb/) is a Multimodal dataset with around 26,000 movies including images, plots and other metadata.
6 |
7 | ### Training on MM-IMDb
8 |
9 | ```
10 | python run_mmimdb.py \
11 | --data_dir /path/to/mmimdb/dataset/ \
12 | --model_type bert \
13 | --model_name_or_path bert-base-uncased \
14 | --output_dir /path/to/save/dir/ \
15 | --do_train \
16 | --do_eval \
17 | --max_seq_len 512 \
18 | --gradient_accumulation_steps 20 \
19 | --num_image_embeds 3 \
20 | --num_train_epochs 100 \
21 | --patience 5
22 | ```
23 |
24 |
--------------------------------------------------------------------------------
/transformers/examples/deebert/entropy_eval.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export CUDA_VISIBLE_DEVICES=0
3 |
4 | PATH_TO_DATA=/h/xinji/projects/GLUE
5 |
6 | MODEL_TYPE=bert # bert or roberta
7 | MODEL_SIZE=base # base or large
8 | DATASET=MRPC # SST-2, MRPC, RTE, QNLI, QQP, or MNLI
9 |
10 | MODEL_NAME=${MODEL_TYPE}-${MODEL_SIZE}
11 | if [ $MODEL_TYPE = 'bert' ]
12 | then
13 | MODEL_NAME=${MODEL_NAME}-uncased
14 | fi
15 |
16 | ENTROPIES="0 0.1 0.2 0.3 0.4 0.5 0.6 0.7"
17 |
18 | for ENTROPY in $ENTROPIES; do
19 | python -u run_glue_deebert.py \
20 | --model_type $MODEL_TYPE \
21 | --model_name_or_path ./saved_models/${MODEL_TYPE}-${MODEL_SIZE}/$DATASET/two_stage \
22 | --task_name $DATASET \
23 | --do_eval \
24 | --do_lower_case \
25 | --data_dir $PATH_TO_DATA/$DATASET \
26 | --output_dir ./saved_models/${MODEL_TYPE}-${MODEL_SIZE}/$DATASET/two_stage \
27 | --plot_data_dir ./results/ \
28 | --max_seq_length 128 \
29 | --early_exit_entropy $ENTROPY \
30 | --eval_highway \
31 | --overwrite_cache \
32 | --per_gpu_eval_batch_size=1
33 | done
34 |
--------------------------------------------------------------------------------
/transformers/examples/deebert/eval_deebert.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export CUDA_VISIBLE_DEVICES=0
3 |
4 | PATH_TO_DATA=/h/xinji/projects/GLUE
5 |
6 | MODEL_TYPE=bert # bert or roberta
7 | MODEL_SIZE=base # base or large
8 | DATASET=MRPC # SST-2, MRPC, RTE, QNLI, QQP, or MNLI
9 |
10 | MODEL_NAME=${MODEL_TYPE}-${MODEL_SIZE}
11 | if [ $MODEL_TYPE = 'bert' ]
12 | then
13 | MODEL_NAME=${MODEL_NAME}-uncased
14 | fi
15 |
16 |
17 | python -u run_glue_deebert.py \
18 | --model_type $MODEL_TYPE \
19 | --model_name_or_path ./saved_models/${MODEL_TYPE}-${MODEL_SIZE}/$DATASET/two_stage \
20 | --task_name $DATASET \
21 | --do_eval \
22 | --do_lower_case \
23 | --data_dir $PATH_TO_DATA/$DATASET \
24 | --output_dir ./saved_models/${MODEL_TYPE}-${MODEL_SIZE}/$DATASET/two_stage \
25 | --plot_data_dir ./results/ \
26 | --max_seq_length 128 \
27 | --eval_each_highway \
28 | --eval_highway \
29 | --overwrite_cache \
30 | --per_gpu_eval_batch_size=1
31 |
--------------------------------------------------------------------------------
/transformers/examples/deebert/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/examples/deebert/src/__init__.py
--------------------------------------------------------------------------------
/transformers/examples/distillation/requirements.txt:
--------------------------------------------------------------------------------
1 | transformers
2 |
3 | gitpython==3.0.2
4 | tensorboard>=1.14.0
5 | tensorboardX==1.8
6 | psutil==5.6.6
7 | scipy==1.3.1
8 |
--------------------------------------------------------------------------------
/transformers/examples/distillation/training_configs/distilbert-base-cased.json:
--------------------------------------------------------------------------------
1 | {
2 | "activation": "gelu",
3 | "attention_dropout": 0.1,
4 | "dim": 768,
5 | "dropout": 0.1,
6 | "hidden_dim": 3072,
7 | "initializer_range": 0.02,
8 | "max_position_embeddings": 512,
9 | "n_heads": 12,
10 | "n_layers": 6,
11 | "sinusoidal_pos_embds": true,
12 | "tie_weights_": true,
13 | "vocab_size": 28996
14 | }
15 |
--------------------------------------------------------------------------------
/transformers/examples/distillation/training_configs/distilbert-base-multilingual-cased.json:
--------------------------------------------------------------------------------
1 | {
2 | "activation": "gelu",
3 | "attention_dropout": 0.1,
4 | "dim": 768,
5 | "dropout": 0.1,
6 | "hidden_dim": 3072,
7 | "initializer_range": 0.02,
8 | "max_position_embeddings": 512,
9 | "n_heads": 12,
10 | "n_layers": 6,
11 | "sinusoidal_pos_embds": true,
12 | "tie_weights_": true,
13 | "vocab_size": 119547
14 | }
15 |
--------------------------------------------------------------------------------
/transformers/examples/distillation/training_configs/distilbert-base-uncased.json:
--------------------------------------------------------------------------------
1 | {
2 | "activation": "gelu",
3 | "attention_dropout": 0.1,
4 | "dim": 768,
5 | "dropout": 0.1,
6 | "hidden_dim": 3072,
7 | "initializer_range": 0.02,
8 | "max_position_embeddings": 512,
9 | "n_heads": 12,
10 | "n_layers": 6,
11 | "sinusoidal_pos_embds": true,
12 | "tie_weights_": true,
13 | "vocab_size": 30522
14 | }
15 |
--------------------------------------------------------------------------------
/transformers/examples/distillation/training_configs/distilgpt2.json:
--------------------------------------------------------------------------------
1 | {
2 | "initializer_range": 0.02,
3 | "layer_norm_epsilon": 0.00001,
4 | "n_ctx": 1024,
5 | "n_embd": 768,
6 | "n_head": 12,
7 | "n_layer": 6,
8 | "n_positions": 1024,
9 | "vocab_size": 50257
10 | }
--------------------------------------------------------------------------------
/transformers/examples/distillation/training_configs/distilroberta-base.json:
--------------------------------------------------------------------------------
1 | {
2 | "vocab_size": 50265,
3 | "hidden_size": 768,
4 | "num_hidden_layers": 6,
5 | "num_attention_heads": 12,
6 | "intermediate_size": 3072,
7 | "hidden_act": "gelu",
8 | "hidden_dropout_prob": 0.1,
9 | "attention_probs_dropout_prob": 0.1,
10 | "max_position_embeddings": 514,
11 | "type_vocab_size": 1,
12 | "initializer_range": 0.02,
13 | "layer_norm_eps": 0.00001
14 | }
--------------------------------------------------------------------------------
/transformers/examples/longform-qa/README.md:
--------------------------------------------------------------------------------
1 | # Long Form Question Answering
2 |
3 | This folder contains the code for the Long Form Question answering [demo](http://35.226.96.115:8080/) as well as methods to train and use a fully end-to-end Long Form Question Answering system using the [🤗transformers](https://github.com/huggingface/transformers) and [🤗datasets](https://github.com/huggingface/datasets) libraries.
4 |
5 | You can use these methods to train your own system by following along the associate [notebook](https://github.com/huggingface/notebooks/blob/master/longform-qa/Long_Form_Question_Answering_with_ELI5_and_Wikipedia.ipynb) or [blog post](https://yjernite.github.io/lfqa.html).
6 |
--------------------------------------------------------------------------------
/transformers/examples/lxmert/README.md:
--------------------------------------------------------------------------------
1 | # LXMERT DEMO
2 |
3 | 1. make a virtualenv: ``virtualenv venv`` and activate ``source venv/bin/activate``
4 | 2. install reqs: ``pip install -r ./requirements.txt``
5 | 3. usage is as shown in demo.ipynb
6 |
--------------------------------------------------------------------------------
/transformers/examples/movement-pruning/emmental/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 | from .configuration_bert_masked import MaskedBertConfig
3 | from .modeling_bert_masked import (
4 | MaskedBertForMultipleChoice,
5 | MaskedBertForQuestionAnswering,
6 | MaskedBertForSequenceClassification,
7 | MaskedBertForTokenClassification,
8 | MaskedBertModel,
9 | )
10 | from .modules import *
11 |
--------------------------------------------------------------------------------
/transformers/examples/movement-pruning/emmental/modules/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 | from .binarizer import MagnitudeBinarizer, ThresholdBinarizer, TopKBinarizer
3 | from .masked_nn import MaskedLinear
4 |
--------------------------------------------------------------------------------
/transformers/examples/movement-pruning/requirements.txt:
--------------------------------------------------------------------------------
1 | torch>=1.4.0
2 | -e git+https://github.com/huggingface/transformers.git@352d5472b0c1dec0f420d606d16747d851b4bda8#egg=transformers
3 | knockknock>=0.1.8.1
4 | h5py>=2.10.0
5 | numpy>=1.18.2
6 | scipy>=1.4.1
7 |
--------------------------------------------------------------------------------
/transformers/examples/rag/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/examples/rag/__init__.py
--------------------------------------------------------------------------------
/transformers/examples/rag/requirements.txt:
--------------------------------------------------------------------------------
1 | faiss-cpu >= 1.6.3
2 | datasets >= 1.0.1
3 | psutil >= 5.7.0
4 | torch >= 1.4.0
--------------------------------------------------------------------------------
/transformers/examples/requirements.txt:
--------------------------------------------------------------------------------
1 | tensorboard
2 | scikit-learn
3 | seqeval
4 | psutil
5 | sacrebleu
6 | rouge-score
7 | tensorflow_datasets
8 | pytorch-lightning==0.8.5
9 | matplotlib
10 | git-python==1.0.3
11 | faiss-cpu
12 | streamlit
13 | elasticsearch
14 | pandas
15 | datasets
16 | fire
17 | pytest
18 | conllu
19 |
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 |
5 | sys.path.insert(1, os.path.dirname(os.path.realpath(__file__)))
6 |
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/bertabs/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/examples/seq2seq/bertabs/__init__.py
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/bertabs/requirements.txt:
--------------------------------------------------------------------------------
1 | transformers
2 |
3 | # For ROUGE
4 | nltk
5 | py-rouge
6 |
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/builtin_trainer/finetune.sh:
--------------------------------------------------------------------------------
1 | # the proper usage is documented in the README, you need to specify data_dir, output_dir and model_name_or_path
2 | # run ./builtin_trainer/finetune.sh --help to see all the possible options
3 | python finetune_trainer.py \
4 | --learning_rate=3e-5 \
5 | --fp16 \
6 | --do_train --do_eval --do_predict --evaluate_during_training \
7 | --predict_with_generate \
8 | --n_val 1000 \
9 | "$@"
10 |
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/builtin_trainer/finetune_tpu.sh:
--------------------------------------------------------------------------------
1 | export TPU_NUM_CORES=8
2 |
3 | # the proper usage is documented in the README, you need to specify data_dir, output_dir and model_name_or_path
4 | # run ./builtin_trainer/finetune_tpu.sh --help to see all the possible options
5 | python xla_spawn.py --num_cores $TPU_NUM_CORES \
6 | finetune_trainer.py \
7 | --learning_rate=3e-5 \
8 | --do_train --do_eval --evaluate_during_training \
9 | --prediction_loss_only \
10 | --n_val 1000 \
11 | "$@"
12 |
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/builtin_trainer/train_mbart_cc25_enro.sh:
--------------------------------------------------------------------------------
1 | python finetune_trainer.py \
2 | --model_name_or_path=facebook/mbart-large-cc25 \
3 | --data_dir $ENRO_DIR \
4 | --output_dir mbart_cc25_enro --overwrite_output_dir \
5 | --learning_rate=3e-5 \
6 | --warmup_steps 500 \
7 | --fp16 \
8 | --label_smoothing 0.1 \
9 | --adam_eps 1e-06 \
10 | --src_lang en_XX --tgt_lang ro_RO \
11 | --freeze_embeds \
12 | --per_device_train_batch_size=4 --per_device_eval_batch_size=4 \
13 | --max_source_length 128 --max_target_length 128 \
14 | --val_max_target_length 128 --test_max_target_length 128 \
15 | --sortish_sampler \
16 | --num_train_epochs 6 \
17 | --save_steps 25000 --eval_steps 25000 --logging_steps 1000 \
18 | --do_train --do_eval --do_predict --evaluate_during_training \
19 | --predict_with_generate --logging_first_step
20 | --task translation \
21 | --run_name mbart_en_ro \
22 | "$@"
23 |
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/convert_model_to_fp16.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from typing import Union
4 |
5 | import fire
6 | import torch
7 | from tqdm import tqdm
8 |
9 |
10 | def convert(src_path: str, map_location: str = "cpu", save_path: Union[str, None] = None) -> None:
11 | """Convert a pytorch_model.bin or model.pt file to torch.float16 for faster downloads, less disk space."""
12 | state_dict = torch.load(src_path, map_location=map_location)
13 | for k, v in tqdm(state_dict.items()):
14 | if not isinstance(v, torch.Tensor):
15 | raise TypeError("FP16 conversion only works on paths that are saved state dics, like pytorch_model.bin")
16 | state_dict[k] = v.half()
17 | if save_path is None: # overwrite src_path
18 | save_path = src_path
19 | torch.save(state_dict, save_path)
20 |
21 |
22 | if __name__ == "__main__":
23 | fire.Fire(convert)
24 |
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/distil_marian_enro_teacher.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | export PYTHONPATH="../":"${PYTHONPATH}"
3 | export WANDB_PROJECT=dmar
4 | # export MAX_LEN=128
5 | python distillation.py \
6 | --learning_rate=3e-4 \
7 | --do_train \
8 | --fp16 \
9 | --val_check_interval 0.25 \
10 | --teacher Helsinki-NLP/opus-mt-en-ro \
11 | --max_source_length $MAX_LEN --max_target_length $MAX_LEN --val_max_target_length $MAX_LEN --test_max_target_length $MAX_LEN \
12 | --student_decoder_layers 3 --student_encoder_layers 6 \
13 | --freeze_encoder --freeze_embeds \
14 | --model_name_or_path IGNORED \
15 | --alpha_hid=3. \
16 | --train_batch_size=$BS --eval_batch_size=$BS \
17 | --tokenizer_name Helsinki-NLP/opus-mt-en-ro \
18 | --warmup_steps 500 --logger_name wandb \
19 | --fp16_opt_level O1 --task translation --normalize_hidden --num_sanity_val_steps=0 \
20 | "$@"
21 |
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/distil_marian_no_teacher.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | export PYTHONPATH="../":"${PYTHONPATH}"
3 | export WANDB_PROJECT=dmar
4 | python distillation.py \
5 | --learning_rate=3e-4 \
6 | --do_train \
7 | --do_predict \
8 | --fp16 --no_teacher \
9 | --val_check_interval 0.25 \
10 | --data_dir $ENRO_DIR \
11 | --max_source_length $MAX_LEN --max_target_length $MAX_LEN --val_max_target_length $MAX_LEN --test_max_target_length $MAX_LEN \
12 | --freeze_encoder --freeze_embeds \
13 | --train_batch_size=$BS --eval_batch_size=$BS \
14 | --tokenizer_name $m --model_name_or_path $m \
15 | --warmup_steps 500 --sortish_sampler --logger_name wandb \
16 | --gpus 1 --fp16_opt_level=O1 --task translation --num_sanity_val_steps=0 \
17 | "$@"
18 |
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/dynamic_bs_example.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | export PYTHONPATH="../":"${PYTHONPATH}"
3 | export WANDB_PROJECT=dmar
4 | export MAX_LEN=128
5 | export m=sshleifer/student_marian_en_ro_6_1
6 | python finetune.py \
7 | --learning_rate=3e-4 \
8 | --do_train \
9 | --fp16 \
10 | --data_dir wmt_en_ro \
11 | --max_source_length $MAX_LEN --max_target_length $MAX_LEN --val_max_target_length $MAX_LEN --test_max_target_length $MAX_LEN \
12 | --freeze_encoder --freeze_embeds \
13 | --train_batch_size=48 --eval_batch_size=64 \
14 | --tokenizer_name $m --model_name_or_path $m --num_train_epochs=1 \
15 | --warmup_steps 500 --logger_name wandb --gpus 1 \
16 | --fp16_opt_level=O1 --task translation \
17 | "$@"
18 |
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/finetune.sh:
--------------------------------------------------------------------------------
1 | # the proper usage is documented in the README, you need to specify data_dir, output_dir and model_name_or_path
2 | # run ./finetune.sh --help to see all the possible options
3 | python finetune.py \
4 | --learning_rate=3e-5 \
5 | --fp16 \
6 | --gpus 1 \
7 | --do_train \
8 | --do_predict \
9 | --n_val 1000 \
10 | --val_check_interval 0.1 \
11 | "$@"
12 |
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/finetune_bart_tiny.sh:
--------------------------------------------------------------------------------
1 | # Script for verifying that run_bart_sum can be invoked from its directory
2 |
3 | # Get tiny dataset with cnn_dm format (4 examples for train, val, test)
4 | wget https://s3.amazonaws.com/datasets.huggingface.co/summarization/cnn_tiny.tgz
5 | tar -xzvf cnn_tiny.tgz
6 | rm cnn_tiny.tgz
7 |
8 | export OUTPUT_DIR_NAME=bart_utest_output
9 | export CURRENT_DIR=${PWD}
10 | export OUTPUT_DIR=${CURRENT_DIR}/${OUTPUT_DIR_NAME}
11 |
12 | # Make output directory if it doesn't exist
13 | mkdir -p $OUTPUT_DIR
14 |
15 | # Add parent directory to python path to access lightning_base.py and testing_utils.py
16 | export PYTHONPATH="../":"${PYTHONPATH}"
17 | python finetune.py \
18 | --data_dir=cnn_tiny/ \
19 | --model_name_or_path=sshleifer/bart-tiny-random \
20 | --learning_rate=3e-5 \
21 | --train_batch_size=2 \
22 | --eval_batch_size=2 \
23 | --output_dir=$OUTPUT_DIR \
24 | --num_train_epochs=1 \
25 | --gpus=0 \
26 | --do_train "$@"
27 |
28 | rm -rf cnn_tiny
29 | rm -rf $OUTPUT_DIR
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/finetune_pegasus_xsum.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | export PYTHONPATH="../":"${PYTHONPATH}"
3 |
4 | # From appendix C of paper https://arxiv.org/abs/1912.08777
5 | # Set --gradient_accumulation_steps so that effective batch size is 256 (2*128, 4*64, 8*32, 16*16)
6 | python finetune.py \
7 | --learning_rate=1e-4 \
8 | --do_train \
9 | --do_predict \
10 | --n_val 1000 \
11 | --val_check_interval 0.25 \
12 | --max_source_length 512 --max_target_length 56 \
13 | --freeze_embeds --label_smoothing 0.1 --adafactor --task summarization_xsum \
14 | "$@"
15 |
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/finetune_t5.sh:
--------------------------------------------------------------------------------
1 | # Add parent directory to python path to access lightning_base.py
2 | export PYTHONPATH="../":"${PYTHONPATH}"
3 |
4 | python finetune.py \
5 | --data_dir=$CNN_DIR \
6 | --learning_rate=3e-5 \
7 | --train_batch_size=$BS \
8 | --eval_batch_size=$BS \
9 | --output_dir=$OUTPUT_DIR \
10 | --max_source_length=512 \
11 | --max_target_length=56 \
12 | --val_check_interval=0.1 --n_val=200 \
13 | --do_train --do_predict \
14 | "$@"
15 |
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/initialization_utils.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 |
3 | from torch import nn
4 |
5 |
6 | def init_student(student, teacher):
7 | teacher_state_dict = teacher.state_dict()
8 | info = student.load_state_dict(teacher_state_dict, strict=False)
9 | assert info.missing_keys == [], info.missing_keys
10 | return student, info
11 |
12 |
13 | def copy_decoder_layers(teacher, student, l2copy=[0, 2, 4, 7, 9, 11]):
14 | copy_layers(teacher.model.decoder.layers, student.model.decoder.layers, l2copy)
15 |
16 |
17 | def copy_layers(teacher_layers: nn.ModuleList, student_layers: nn.ModuleList, layers_to_copy: List) -> None:
18 | layers_to_copy = nn.ModuleList([l for i, l in enumerate(teacher_layers) if i in layers_to_copy])
19 | assert len(student_layers) == len(layers_to_copy), f"{len(student_layers)} != {len(layers_to_copy)}"
20 | student_layers.load_state_dict(layers_to_copy.state_dict())
21 |
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/minify_dataset.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from pathlib import Path
4 |
5 | import fire
6 |
7 |
8 | def minify(src_dir: str, dest_dir: str, n: int):
9 | """Write first n lines of each file f in src_dir to dest_dir/f """
10 | src_dir = Path(src_dir)
11 | dest_dir = Path(dest_dir)
12 | dest_dir.mkdir(exist_ok=True)
13 | for path in src_dir.iterdir():
14 | new = [x.rstrip() for x in list(path.open().readlines())][:n]
15 | dest_path = dest_dir.joinpath(path.name)
16 | print(dest_path)
17 | dest_path.open("w").write("\n".join(new))
18 |
19 |
20 | if __name__ == "__main__":
21 | fire.Fire(minify)
22 |
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/run_distiller.sh:
--------------------------------------------------------------------------------
1 | # Add parent directory to python path to access lightning_base.py
2 | export PYTHONPATH="../":"${PYTHONPATH}"
3 |
4 | python distillation.py \
5 | --learning_rate=3e-4 \
6 | --do_train \
7 | --do_predict \
8 | --fp16 \
9 | --val_check_interval 0.1 \
10 | "$@"
11 |
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/test_data/fsmt/build-eval-data.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import io
4 | import json
5 | import subprocess
6 |
7 |
8 | pairs = [
9 | ["en", "ru"],
10 | ["ru", "en"],
11 | ["en", "de"],
12 | ["de", "en"],
13 | ]
14 |
15 | n_objs = 8
16 |
17 |
18 | def get_all_data(pairs, n_objs):
19 | text = {}
20 | for src, tgt in pairs:
21 | pair = f"{src}-{tgt}"
22 | cmd = f"sacrebleu -t wmt19 -l {pair} --echo src".split()
23 | src_lines = subprocess.run(cmd, stdout=subprocess.PIPE).stdout.decode("utf-8").splitlines()
24 | cmd = f"sacrebleu -t wmt19 -l {pair} --echo ref".split()
25 | tgt_lines = subprocess.run(cmd, stdout=subprocess.PIPE).stdout.decode("utf-8").splitlines()
26 | text[pair] = {"src": src_lines[:n_objs], "tgt": tgt_lines[:n_objs]}
27 | return text
28 |
29 |
30 | text = get_all_data(pairs, n_objs)
31 | filename = "./fsmt_val_data.json"
32 | with io.open(filename, "w", encoding="utf-8") as f:
33 | bleu_data = json.dump(text, f, indent=2, ensure_ascii=False)
34 |
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/test_data/wmt_en_ro/train.len:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/examples/seq2seq/test_data/wmt_en_ro/train.len
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/test_data/wmt_en_ro/val.len:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/examples/seq2seq/test_data/wmt_en_ro/val.len
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/train_distilbart_cnn.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | export PYTHONPATH="../":"${PYTHONPATH}"
3 |
4 | export BS=32
5 | export GAS=1
6 |
7 | python finetune.py \
8 | --learning_rate=3e-5 \
9 | --fp16 \
10 | --gpus 1 \
11 | --do_train \
12 | --do_predict \
13 | --val_check_interval 0.25 \
14 | --n_val 500 \
15 | --num_train_epochs 2 \
16 | --freeze_encoder --freeze_embeds --data_dir $CNN_DIR \
17 | --max_target_length 142 --val_max_target_length=142 \
18 | --train_batch_size=$BS --eval_batch_size=$BS --gradient_accumulation_steps=$GAS \
19 | --model_name_or_path sshleifer/student_cnn_12_6 \
20 | --tokenizer_name facebook/bart-large \
21 | --warmup_steps 500 \
22 | --output_dir distilbart-cnn-12-6 \
23 | "$@"
24 |
25 |
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/train_distilbart_xsum.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | export PYTHONPATH="../":"${PYTHONPATH}"
3 | export BS=16
4 | export GAS=2
5 | python distillation.py \
6 | --learning_rate=3e-4 \
7 | --do_train \
8 | --do_predict \
9 | --fp16 \
10 | --val_check_interval 0.1 --n_val 1000 \
11 | --teacher facebook/bart-large-xsum --data_dir $XSUM_DIR \
12 | --max_target_length=60 --val_max_target_length=60 --test_max_target_length=100 \
13 | --student_decoder_layers 6 --student_encoder_layers 12 \
14 | --freeze_encoder --freeze_embeds \
15 | --model_name_or_path IGNORED \
16 | --alpha_hid=3. --length_penalty=0.5 \
17 | --train_batch_size=$BS --eval_batch_size=$BS --gradient_accumulation_steps=$GAS --num_train_epochs=6 \
18 | --tokenizer_name facebook/bart-large \
19 | --warmup_steps 500 \
20 | --output_dir distilbart_xsum_12_6 \
21 | "$@"
22 |
--------------------------------------------------------------------------------
/transformers/examples/seq2seq/train_mbart_cc25_enro.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | export PYTHONPATH="../":"${PYTHONPATH}"
3 |
4 | python finetune.py \
5 | --learning_rate=3e-5 \
6 | --fp16 \
7 | --do_train \
8 | --val_check_interval=0.25 \
9 | --adam_eps 1e-06 \
10 | --num_train_epochs 6 --src_lang en_XX --tgt_lang ro_RO \
11 | --data_dir $ENRO_DIR \
12 | --max_source_length $MAX_LEN --max_target_length $MAX_LEN --val_max_target_length $MAX_LEN --test_max_target_length $MAX_LEN \
13 | --train_batch_size=$BS --eval_batch_size=$BS \
14 | --task translation \
15 | --warmup_steps 500 \
16 | --freeze_embeds \
17 | --model_name_or_path=facebook/mbart-large-cc25 \
18 | "$@"
19 |
--------------------------------------------------------------------------------
/transformers/examples/text-generation/README.md:
--------------------------------------------------------------------------------
1 | ## Language generation
2 |
3 | Based on the script [`run_generation.py`](https://github.com/huggingface/transformers/blob/master/examples/text-generation/run_generation.py).
4 |
5 | Conditional text generation using the auto-regressive models of the library: GPT, GPT-2, Transformer-XL, XLNet, CTRL.
6 | A similar script is used for our official demo [Write With Transfomer](https://transformer.huggingface.co), where you
7 | can try out the different models available in the library.
8 |
9 | Example usage:
10 |
11 | ```bash
12 | python run_generation.py \
13 | --model_type=gpt2 \
14 | --model_name_or_path=gpt2
15 | ```
16 |
--------------------------------------------------------------------------------
/transformers/examples/text-generation/pplm/imgs/headfigure.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/examples/text-generation/pplm/imgs/headfigure.png
--------------------------------------------------------------------------------
/transformers/examples/text-generation/pplm/imgs/wooly.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/examples/text-generation/pplm/imgs/wooly.png
--------------------------------------------------------------------------------
/transformers/examples/text-generation/pplm/pplm_classification_head.py:
--------------------------------------------------------------------------------
1 | import torch
2 |
3 |
4 | class ClassificationHead(torch.nn.Module):
5 | """Classification Head for transformer encoders"""
6 |
7 | def __init__(self, class_size, embed_size):
8 | super().__init__()
9 | self.class_size = class_size
10 | self.embed_size = embed_size
11 | # self.mlp1 = torch.nn.Linear(embed_size, embed_size)
12 | # self.mlp2 = (torch.nn.Linear(embed_size, class_size))
13 | self.mlp = torch.nn.Linear(embed_size, class_size)
14 |
15 | def forward(self, hidden_state):
16 | # hidden_state = F.relu(self.mlp1(hidden_state))
17 | # hidden_state = self.mlp2(hidden_state)
18 | logits = self.mlp(hidden_state)
19 | return logits
20 |
--------------------------------------------------------------------------------
/transformers/model_cards/DeepPavlov/bert-base-bg-cs-pl-ru-cased/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language:
3 | - bg
4 | - cs
5 | - pl
6 | - ru
7 | ---
8 |
9 | # bert-base-bg-cs-pl-ru-cased
10 |
11 | SlavicBERT\[1\] \(Slavic \(bg, cs, pl, ru\), cased, 12‑layer, 768‑hidden, 12‑heads, 180M parameters\) was trained on Russian News and four Wikipedias: Bulgarian, Czech, Polish, and Russian. Subtoken vocabulary was built using this data. Multilingual BERT was used as an initialization for SlavicBERT.
12 |
13 |
14 | \[1\]: Arkhipov M., Trofimova M., Kuratov Y., Sorokin A. \(2019\). [Tuning Multilingual Transformers for Language-Specific Named Entity Recognition](https://www.aclweb.org/anthology/W19-3712/). ACL anthology W19-3712.
15 |
--------------------------------------------------------------------------------
/transformers/model_cards/DeepPavlov/rubert-base-cased-conversational/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language:
3 | - ru
4 | ---
5 |
6 | # rubert-base-cased-conversational
7 |
8 | Conversational RuBERT \(Russian, cased, 12‑layer, 768‑hidden, 12‑heads, 180M parameters\) was trained on OpenSubtitles\[1\], [Dirty](https://d3.ru/), [Pikabu](https://pikabu.ru/), and a Social Media segment of Taiga corpus\[2\]. We assembled a new vocabulary for Conversational RuBERT model on this data and initialized the model with [RuBERT](../rubert-base-cased).
9 |
10 |
11 | \[1\]: P. Lison and J. Tiedemann, 2016, OpenSubtitles2016: Extracting Large Parallel Corpora from Movie and TV Subtitles. In Proceedings of the 10th International Conference on Language Resources and Evaluation \(LREC 2016\)
12 |
13 | \[2\]: Shavrina T., Shapovalova O. \(2017\) TO THE METHODOLOGY OF CORPUS CONSTRUCTION FOR MACHINE LEARNING: «TAIGA» SYNTAX TREE CORPUS AND PARSER. in proc. of “CORPORA2017”, international conference , Saint-Petersbourg, 2017.
14 |
--------------------------------------------------------------------------------
/transformers/model_cards/DeepPavlov/rubert-base-cased-sentence/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language:
3 | - ru
4 | ---
5 |
6 | # rubert-base-cased-sentence
7 |
8 | Sentence RuBERT \(Russian, cased, 12-layer, 768-hidden, 12-heads, 180M parameters\) is a representation‑based sentence encoder for Russian. It is initialized with RuBERT and fine‑tuned on SNLI\[1\] google-translated to russian and on russian part of XNLI dev set\[2\]. Sentence representations are mean pooled token embeddings in the same manner as in Sentence‑BERT\[3\].
9 |
10 |
11 | \[1\]: S. R. Bowman, G. Angeli, C. Potts, and C. D. Manning. \(2015\) A large annotated corpus for learning natural language inference. arXiv preprint [arXiv:1508.05326](https://arxiv.org/abs/1508.05326)
12 |
13 | \[2\]: Williams A., Bowman S. \(2018\) XNLI: Evaluating Cross-lingual Sentence Representations. arXiv preprint [arXiv:1809.05053](https://arxiv.org/abs/1809.05053)
14 |
15 | \[3\]: N. Reimers, I. Gurevych \(2019\) Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks. arXiv preprint [arXiv:1908.10084](https://arxiv.org/abs/1908.10084)
16 |
--------------------------------------------------------------------------------
/transformers/model_cards/DeepPavlov/rubert-base-cased/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language:
3 | - ru
4 | ---
5 |
6 | # rubert-base-cased
7 |
8 | RuBERT \(Russian, cased, 12‑layer, 768‑hidden, 12‑heads, 180M parameters\) was trained on the Russian part of Wikipedia and news data. We used this training data to build a vocabulary of Russian subtokens and took a multilingual version of BERT‑base as an initialization for RuBERT\[1\].
9 |
10 |
11 | \[1\]: Kuratov, Y., Arkhipov, M. \(2019\). Adaptation of Deep Bidirectional Multilingual Transformers for Russian Language. arXiv preprint [arXiv:1905.07213](https://arxiv.org/abs/1905.07213).
12 |
--------------------------------------------------------------------------------
/transformers/model_cards/NeuML/bert-small-cord19-squad2/README.md:
--------------------------------------------------------------------------------
1 | # BERT-Small CORD-19 fine-tuned on SQuAD 2.0
2 |
3 | [bert-small-cord19 model](https://huggingface.co/NeuML/bert-small-cord19) fine-tuned on SQuAD 2.0
4 |
5 | ## Building the model
6 |
7 | ```bash
8 | python run_squad.py
9 | --model_type bert
10 | --model_name_or_path bert-small-cord19
11 | --do_train
12 | --do_eval
13 | --do_lower_case
14 | --version_2_with_negative
15 | --train_file train-v2.0.json
16 | --predict_file dev-v2.0.json
17 | --per_gpu_train_batch_size 8
18 | --learning_rate 3e-5
19 | --num_train_epochs 3.0
20 | --max_seq_length 384
21 | --doc_stride 128
22 | --output_dir bert-small-cord19-squad2
23 | --save_steps 0
24 | --threads 8
25 | --overwrite_cache
26 | --overwrite_output_dir
27 |
--------------------------------------------------------------------------------
/transformers/model_cards/NeuML/bert-small-cord19/README.md:
--------------------------------------------------------------------------------
1 | # BERT-Small fine-tuned on CORD-19 dataset
2 |
3 | [BERT L6_H-512_A-8 model](https://huggingface.co/google/bert_uncased_L-6_H-512_A-8) fine-tuned on the [CORD-19 dataset](https://www.semanticscholar.org/cord19).
4 |
5 | ## CORD-19 data subset
6 | The training data for this dataset is stored as a [Kaggle dataset](https://www.kaggle.com/davidmezzetti/cord19-qa?select=cord19.txt). The training
7 | data is a subset of the full corpus, focusing on high-quality, study-design detected articles.
8 |
9 | ## Building the model
10 |
11 | ```bash
12 | python run_language_modeling.py
13 | --model_type bert
14 | --model_name_or_path google/bert_uncased_L-6_H-512_A-8
15 | --do_train
16 | --mlm
17 | --line_by_line
18 | --block_size 512
19 | --train_data_file cord19.txt
20 | --per_gpu_train_batch_size 4
21 | --learning_rate 3e-5
22 | --num_train_epochs 3.0
23 | --output_dir bert-small-cord19
24 | --save_steps 0
25 | --overwrite_output_dir
26 |
--------------------------------------------------------------------------------
/transformers/model_cards/VictorSanh/roberta-base-finetuned-yelp-polarity/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: en
3 | datasets:
4 | - yelp_polarity
5 | ---
6 |
7 | # RoBERTa-base-finetuned-yelp-polarity
8 |
9 | This is a [RoBERTa-base](https://huggingface.co/roberta-base) checkpoint fine-tuned on binary sentiment classifcation from [Yelp polarity](https://huggingface.co/nlp/viewer/?dataset=yelp_polarity).
10 | It gets **98.08%** accuracy on the test set.
11 |
12 | ## Hyper-parameters
13 |
14 | We used the following hyper-parameters to train the model on one GPU:
15 | ```python
16 | num_train_epochs = 2.0
17 | learning_rate = 1e-05
18 | weight_decay = 0.0
19 | adam_epsilon = 1e-08
20 | max_grad_norm = 1.0
21 | per_device_train_batch_size = 32
22 | gradient_accumulation_steps = 1
23 | warmup_steps = 3500
24 | seed = 42
25 | ```
26 |
--------------------------------------------------------------------------------
/transformers/model_cards/akhooli/mbart-large-cc25-ar-en/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | tags:
3 | - translation
4 |
5 | language:
6 | - ar
7 | - en
8 |
9 | license: mit
10 | ---
11 | ### mbart-large-ar-en
12 | This is mbart-large-cc25, finetuned on a subset of the OPUS corpus for ar_en.
13 | Usage: see [example notebook](https://colab.research.google.com/drive/1I6RFOWMaTpPBX7saJYjnSTddW0TD6H1t?usp=sharing)
14 | Note: model has limited training set, not fully trained (do not use for production).
15 | Other models by me: [Abed Khooli](https://huggingface.co/akhooli)
16 |
--------------------------------------------------------------------------------
/transformers/model_cards/akhooli/mbart-large-cc25-en-ar/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | tags:
3 | - translation
4 |
5 | language:
6 | - en
7 | - ar
8 |
9 | license: mit
10 | ---
11 | ### mbart-large-en-ar
12 | This is mbart-large-cc25, finetuned on a subset of the UN corpus for en_ar.
13 | Usage: see [example notebook](https://colab.research.google.com/drive/1I6RFOWMaTpPBX7saJYjnSTddW0TD6H1t?usp=sharing)
14 | Note: model has limited training set, not fully trained (do not use for production).
15 |
--------------------------------------------------------------------------------
/transformers/model_cards/akhooli/xlm-r-large-arabic-sent/README.md:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | language:
4 | - ar
5 | - en
6 |
7 | license: mit
8 | ---
9 | ### xlm-r-large-arabic-sent
10 | Multilingual sentiment classification (Label_0: mixed, Label_1: negative, Label_2: positive) of Arabic reviews by fine-tuning XLM-Roberta-Large.
11 | Zero shot classification of other languages (also works in mixed languages - ex. Arabic & English). Mixed category is not accurate and may confuse other
12 | classes (was based on a rate of 3 out of 5 in reviews).
13 | Usage: see last section in this [Colab notebook](https://lnkd.in/d3bCFyZ)
14 |
--------------------------------------------------------------------------------
/transformers/model_cards/albert-base-v1-README.md:
--------------------------------------------------------------------------------
1 | ---
2 | tags:
3 | - exbert
4 |
5 | license: apache-2.0
6 | ---
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/transformers/model_cards/albert-xxlarge-v2-README.md:
--------------------------------------------------------------------------------
1 | ---
2 | tags:
3 | - exbert
4 |
5 | license: apache-2.0
6 | ---
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/transformers/model_cards/allenai/longformer-base-4096-extra.pos.embd.only/README.md:
--------------------------------------------------------------------------------
1 |
2 | # longformer-base-4096-extra.pos.embd.only
3 |
4 | This model is similar to `longformer-base-4096` but it was pretrained to preserve RoBERTa weights by freezing all RoBERTa weights and only train the additional position embeddings.
5 |
6 |
7 | ### Citing
8 |
9 | If you use `Longformer` in your research, please cite [Longformer: The Long-Document Transformer](https://arxiv.org/abs/2004.05150).
10 | ```
11 | @article{Beltagy2020Longformer,
12 | title={Longformer: The Long-Document Transformer},
13 | author={Iz Beltagy and Matthew E. Peters and Arman Cohan},
14 | journal={arXiv:2004.05150},
15 | year={2020},
16 | }
17 | ```
18 |
19 | `Longformer` is an open-source project developed by [the Allen Institute for Artificial Intelligence (AI2)](http://www.allenai.org).
20 | AI2 is a non-profit institute with the mission to contribute to humanity through high-impact AI research and engineering.
21 |
--------------------------------------------------------------------------------
/transformers/model_cards/aodiniz/bert_uncased_L-10_H-512_A-8_cord19-200616/README.md:
--------------------------------------------------------------------------------
1 | # BERT L-10 H-512 fine-tuned on MLM (CORD-19 2020/06/16)
2 |
3 | BERT model with [10 Transformer layers and hidden embedding of size 512](https://huggingface.co/google/bert_uncased_L-10_H-512_A-8), referenced in [Well-Read Students Learn Better: On the Importance of Pre-training Compact Models](https://arxiv.org/abs/1908.08962), fine-tuned for MLM on CORD-19 dataset (as released on 2020/06/16).
4 |
5 | ## Training the model
6 |
7 | ```bash
8 | python run_language_modeling.py
9 | --model_type bert
10 | --model_name_or_path google/bert_uncased_L-10_H-512_A-8
11 | --do_train
12 | --train_data_file {cord19-200616-dataset}
13 | --mlm
14 | --mlm_probability 0.2
15 | --line_by_line
16 | --block_size 512
17 | --per_device_train_batch_size 10
18 | --learning_rate 3e-5
19 | --num_train_epochs 2
20 | --output_dir bert_uncased_L-10_H-512_A-8_cord19-200616
21 |
--------------------------------------------------------------------------------
/transformers/model_cards/aodiniz/bert_uncased_L-2_H-512_A-8_cord19-200616/README.md:
--------------------------------------------------------------------------------
1 | # BERT L-2 H-512 fine-tuned on MLM (CORD-19 2020/06/16)
2 |
3 | BERT model with [2 Transformer layers and hidden embedding of size 512](https://huggingface.co/google/bert_uncased_L-2_H-512_A-8), referenced in [Well-Read Students Learn Better: On the Importance of Pre-training Compact Models](https://arxiv.org/abs/1908.08962), fine-tuned for MLM on CORD-19 dataset (as released on 2020/06/16).
4 |
5 | ## Training the model
6 |
7 | ```bash
8 | python run_language_modeling.py
9 | --model_type bert
10 | --model_name_or_path google/bert_uncased_L-2_H-512_A-8
11 | --do_train
12 | --train_data_file {cord19-200616-dataset}
13 | --mlm
14 | --mlm_probability 0.2
15 | --line_by_line
16 | --block_size 512
17 | --per_device_train_batch_size 20
18 | --learning_rate 3e-5
19 | --num_train_epochs 2
20 | --output_dir bert_uncased_L-2_H-512_A-8_cord19-200616
21 |
--------------------------------------------------------------------------------
/transformers/model_cards/aodiniz/bert_uncased_L-4_H-256_A-4_cord19-200616/README.md:
--------------------------------------------------------------------------------
1 | # BERT L-4 H-256 fine-tuned on MLM (CORD-19 2020/06/16)
2 |
3 | BERT model with [4 Transformer layers and hidden embedding of size 256](https://huggingface.co/google/bert_uncased_L-4_H-256_A-4), referenced in [Well-Read Students Learn Better: On the Importance of Pre-training Compact Models](https://arxiv.org/abs/1908.08962), fine-tuned for MLM on CORD-19 dataset (as released on 2020/06/16).
4 |
5 | ## Training the model
6 |
7 | ```bash
8 | python run_language_modeling.py
9 | --model_type bert
10 | --model_name_or_path google/bert_uncased_L-4_H-256_A-4
11 | --do_train
12 | --train_data_file {cord19-200616-dataset}
13 | --mlm
14 | --mlm_probability 0.2
15 | --line_by_line
16 | --block_size 256
17 | --per_device_train_batch_size 20
18 | --learning_rate 3e-5
19 | --num_train_epochs 2
20 | --output_dir bert_uncased_L-4_H-256_A-4_cord19-200616
21 |
--------------------------------------------------------------------------------
/transformers/model_cards/bart-large-cnn/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | tags:
3 | - summarization
4 | ---
5 |
6 |
--------------------------------------------------------------------------------
/transformers/model_cards/bart-large-xsum/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | tags:
3 | - summarization
4 | ---
5 |
6 |
--------------------------------------------------------------------------------
/transformers/model_cards/bert-base-chinese-README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: zh
3 | ---
4 |
--------------------------------------------------------------------------------
/transformers/model_cards/bert-base-german-dbmdz-cased-README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: de
3 | license: mit
4 | ---
5 |
--------------------------------------------------------------------------------
/transformers/model_cards/bert-base-german-dbmdz-uncased-README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: de
3 | license: mit
4 | ---
5 |
--------------------------------------------------------------------------------
/transformers/model_cards/bert-large-cased-README.md:
--------------------------------------------------------------------------------
1 | ---
2 | license: apache-2.0
3 | ---
4 |
--------------------------------------------------------------------------------
/transformers/model_cards/binwang/xlnet-base-cased/README.md:
--------------------------------------------------------------------------------
1 | This model is pre-trained **XLNET** with 12 layers.
2 |
3 | It comes with paper: SBERT-WK: A Sentence Embedding Method By Dissecting BERT-based Word Models
4 |
5 | Project Page: [SBERT-WK](https://github.com/BinWang28/SBERT-WK-Sentence-Embedding)
6 |
--------------------------------------------------------------------------------
/transformers/model_cards/daigo/bert-base-japanese-sentiment/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language:
3 | - ja
4 | ---
5 |
6 | binary classification
7 |
8 | # Usage
9 | ```
10 | print(pipeline("sentiment-analysis",model="daigo/bert-base-japanese-sentiment",tokenizer="daigo/bert-base-japanese-sentiment")("私は幸福である。"))
11 |
12 | [{'label': 'ポジティブ', 'score': 0.98430425}]
13 | ```
14 |
--------------------------------------------------------------------------------
/transformers/model_cards/dccuchile/bert-base-spanish-wwm-cased/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: es
3 | ---
4 |
--------------------------------------------------------------------------------
/transformers/model_cards/dccuchile/bert-base-spanish-wwm-uncased/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: es
3 | ---
4 |
--------------------------------------------------------------------------------
/transformers/model_cards/deepset/sentence_bert/README.md:
--------------------------------------------------------------------------------
1 | This is an upload of the bert-base-nli-stsb-mean-tokens pretrained model from the Sentence Transformers Repo (https://github.com/UKPLab/sentence-transformers)
2 |
--------------------------------------------------------------------------------
/transformers/model_cards/distilbert-base-cased-distilled-squad-README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: "en"
3 | datasets:
4 | - squad
5 | metrics:
6 | - squad
7 | ---
8 |
--------------------------------------------------------------------------------
/transformers/model_cards/distilbert-base-multilingual-cased-README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: multilingual
3 | license: apache-2.0
4 | ---
5 |
--------------------------------------------------------------------------------
/transformers/model_cards/distilgpt2-README.md:
--------------------------------------------------------------------------------
1 | ---
2 | tags:
3 | - exbert
4 |
5 | license: apache-2.0
6 | ---
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/transformers/model_cards/distilroberta-base-README.md:
--------------------------------------------------------------------------------
1 | ---
2 | tags:
3 | - exbert
4 |
5 | license: apache-2.0
6 | ---
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/transformers/model_cards/djstrong/bg_cs_pl_ru_cased_L-12_H-768_A-12/README.md:
--------------------------------------------------------------------------------
1 | Slavic BERT from https://github.com/deepmipt/Slavic-BERT-NER http://files.deeppavlov.ai/deeppavlov_data/bg_cs_pl_ru_cased_L-12_H-768_A-12.tar.gz
2 |
--------------------------------------------------------------------------------
/transformers/model_cards/facebook/bart-large-cnn/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | tags:
3 | - summarization
4 |
5 | license: mit
6 | ---
7 |
--------------------------------------------------------------------------------
/transformers/model_cards/facebook/bart-large/README.md:
--------------------------------------------------------------------------------
1 | The Bart model was proposed by Mike Lewis, Yinhan Liu, Naman Goyal, Marjan Ghazvininejad, Abdelrahman Mohamed, Omer Levy, Ves Stoyanov and Luke Zettlemoyer on 29 Oct, 2019. According to the abstract,
2 |
3 | Bart uses a standard seq2seq/machine translation architecture with a bidirectional encoder (like BERT) and a left-to-right decoder (like GPT).
4 |
5 | The pretraining task involves randomly shuffling the order of the original sentences and a novel in-filling scheme, where spans of text are replaced with a single mask token.
6 |
7 | BART is particularly effective when fine tuned for text generation but also works well for comprehension tasks. It matches the performance of RoBERTa with comparable training resources on GLUE and SQuAD, achieves new state-of-the-art results on a range of abstractive dialogue, question answering, and summarization tasks, with gains of up to 6 ROUGE.
8 |
9 | The Authors’ code can be found here:
10 | https://github.com/pytorch/fairseq/tree/master/examples/bart
11 |
--------------------------------------------------------------------------------
/transformers/model_cards/facebook/rag-token-nq_new/README.md:
--------------------------------------------------------------------------------
1 | The model can be loaded and used as follows on [this branch](https://github.com/huggingface/transformers/tree/finalize_rag) as follows.
2 |
3 |
4 | # Load model
5 |
6 | ```python
7 | from transformers import RagTokenizer, RagTokenForGeneration, RagRetriever
8 |
9 | # create Retriever augmented model
10 | retriever = RagRetriever.from_pretrained("facebook/rag-token-nq_new", use_dummy_dataset=True)
11 | model = RagTokenForGeneration.from_pretrained("facebook/rag-token-nq_new", retriever=retriever)
12 |
13 | tokenizer = RagTokenizer.from_pretrained("facebook/rag-token-nq_new")
14 |
15 | # create input ids and labels
16 | input_ids = tokenizer("who sings does he love me with reba", return_tensors="pt").input_ids
17 |
18 | # use labels
19 | labels = tokenizer.generator("Linda Davis", return_tensors="pt").input_ids
20 |
21 |
22 | # compute loss
23 | outputs = model(input_ids, labels=labels)
24 | ```
25 |
--------------------------------------------------------------------------------
/transformers/model_cards/flexudy/t5-base-multi-sentence-doctor/sent-banner.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/model_cards/flexudy/t5-base-multi-sentence-doctor/sent-banner.png
--------------------------------------------------------------------------------
/transformers/model_cards/gpt2-large-README.md:
--------------------------------------------------------------------------------
1 | Test the full generation capabilities here: https://transformer.huggingface.co/doc/gpt2-large
2 |
--------------------------------------------------------------------------------
/transformers/model_cards/gpt2-medium-README.md:
--------------------------------------------------------------------------------
1 | Test the full generation capabilities here: https://transformer.huggingface.co/doc/gpt2-large
2 |
--------------------------------------------------------------------------------
/transformers/model_cards/gpt2-xl-README.md:
--------------------------------------------------------------------------------
1 | Test the whole generation capabilities here: https://transformer.huggingface.co/doc/gpt2-large
2 |
--------------------------------------------------------------------------------
/transformers/model_cards/healx/gpt-2-pubmed-large/README.md:
--------------------------------------------------------------------------------
1 | GPT-2 (774M model) finetuned on 0.5m PubMed abstracts. Used in the [writemeanabstract.com](writemeanabstract.com) and the following preprint:
2 |
3 | [Papanikolaou, Yannis, and Andrea Pierleoni. "DARE: Data Augmented Relation Extraction with GPT-2." arXiv preprint arXiv:2004.13845 (2020).](https://arxiv.org/abs/2004.13845)
4 |
--------------------------------------------------------------------------------
/transformers/model_cards/healx/gpt-2-pubmed-medium/README.md:
--------------------------------------------------------------------------------
1 | GPT-2 (355M model) finetuned on 0.5m PubMed abstracts. Used in the [writemeanabstract.com](writemeanabstract.com) and the following preprint:
2 |
3 | [Papanikolaou, Yannis, and Andrea Pierleoni. "DARE: Data Augmented Relation Extraction with GPT-2." arXiv preprint arXiv:2004.13845 (2020).](https://arxiv.org/abs/2004.13845)
4 |
--------------------------------------------------------------------------------
/transformers/model_cards/illuin/lepetit/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: fr
3 | thumbnail: https://miro.medium.com/max/700/1*MoPnD6vA9wTHjdLfW7POyw.png
4 | widget:
5 | - text: "Le camembert LePetit c'est le ."
6 | - text: "Salut les ça va ?"
7 | ---
8 |
9 | # LePetit: A pre-training efficient and lightning fast French Language Model
10 |
11 | See [blogpost](https://medium.com/illuin/lepetit-a-pre-training-efficient-and-lightning-fast-french-language-model-96495ad726b3)
12 |
13 |
--------------------------------------------------------------------------------
/transformers/model_cards/ipuneetrathore/bert-base-cased-finetuned-finBERT/README.md:
--------------------------------------------------------------------------------
1 | ## FinBERT
2 |
3 | Code for importing and using this model is available [here](https://github.com/ipuneetrathore/BERT_models)
4 |
--------------------------------------------------------------------------------
/transformers/model_cards/jannesg/bertsson/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: sv
3 | ---
4 |
5 | # BERTSSON Models
6 |
7 | The models are trained on:
8 | - Government Text
9 | - Swedish Literature
10 | - Swedish News
11 |
12 | Corpus size: Roughly 6B tokens.
13 |
14 | The following models are currently available:
15 |
16 | - **bertsson** - A BERT base model trained with the same hyperparameters as first published by Google.
17 |
18 | All models are cased and trained with whole word masking.
19 |
20 | Stay tuned for evaluations.
21 |
--------------------------------------------------------------------------------
/transformers/model_cards/jimregan/BERTreach/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: ga
3 | tags:
4 | - irish
5 | ---
6 |
7 | ## BERTreach
8 |
9 | ([beirtreach](https://www.teanglann.ie/en/fgb/beirtreach) means 'oyster bed')
10 |
11 | **Model size:** 84M
12 |
13 | **Training data:**
14 | * [PARSEME 1.2](https://gitlab.com/parseme/parseme_corpus_ga/-/blob/master/README.md)
15 | * Newscrawl 300k portion of the [Leipzig Corpora](https://wortschatz.uni-leipzig.de/en/download/irish)
16 | * Private news corpus crawled with [Corpus Crawler](https://github.com/google/corpuscrawler)
17 |
18 | (2125804 sentences, 47419062 tokens, as reckoned by wc)
19 |
20 | ```
21 | from transformers import pipeline
22 | fill_mask = pipeline("fill-mask", model="jimregan/BERTreach", tokenizer="jimregan/BERTreach")
23 | ```
24 |
--------------------------------------------------------------------------------
/transformers/model_cards/julien-c/bert-xsmall-dummy/README.md:
--------------------------------------------------------------------------------
1 | ## How to build a dummy model
2 |
3 |
4 | ```python
5 | from transformers.configuration_bert import BertConfig
6 | from transformers.modeling_bert import BertForMaskedLM
7 | from transformers.modeling_tf_bert import TFBertForMaskedLM
8 | from transformers.tokenization_bert import BertTokenizer
9 |
10 |
11 | SMALL_MODEL_IDENTIFIER = "julien-c/bert-xsmall-dummy"
12 | DIRNAME = "./bert-xsmall-dummy"
13 |
14 | config = BertConfig(10, 20, 1, 1, 40)
15 |
16 | model = BertForMaskedLM(config)
17 | model.save_pretrained(DIRNAME)
18 |
19 | tf_model = TFBertForMaskedLM.from_pretrained(DIRNAME, from_pt=True)
20 | tf_model.save_pretrained(DIRNAME)
21 |
22 | # Slightly different for tokenizer.
23 | # tokenizer = BertTokenizer.from_pretrained(DIRNAME)
24 | # tokenizer.save_pretrained()
25 | ```
26 |
--------------------------------------------------------------------------------
/transformers/model_cards/lvwerra/bert-imdb/README.md:
--------------------------------------------------------------------------------
1 | # BERT-IMDB
2 |
3 | ## What is it?
4 | BERT (`bert-large-cased`) trained for sentiment classification on the [IMDB dataset](https://www.kaggle.com/lakshmi25npathi/imdb-dataset-of-50k-movie-reviews).
5 |
6 | ## Training setting
7 |
8 | The model was trained on 80% of the IMDB dataset for sentiment classification for three epochs with a learning rate of `1e-5` with the `simpletransformers` library. The library uses a learning rate schedule.
9 |
10 | ## Result
11 | The model achieved 90% classification accuracy on the validation set.
12 |
13 | ## Reference
14 | The full experiment is available in the [tlr repo](https://lvwerra.github.io/trl/03-bert-imdb-training/).
15 |
--------------------------------------------------------------------------------
/transformers/model_cards/lvwerra/gpt2-imdb/README.md:
--------------------------------------------------------------------------------
1 | # GPT2-IMDB
2 |
3 | ## What is it?
4 | A GPT2 (`gpt2`) language model fine-tuned on the [IMDB dataset](https://www.kaggle.com/lakshmi25npathi/imdb-dataset-of-50k-movie-reviews).
5 |
6 | ## Training setting
7 |
8 | The GPT2 language model was fine-tuned for 1 epoch on the IMDB dataset. All comments were joined into a single text file separated by the EOS token:
9 |
10 | ```
11 | import pandas as pd
12 | df = pd.read_csv("imdb-dataset.csv")
13 | imdb_str = " <|endoftext|> ".join(df['review'].tolist())
14 |
15 | with open ('imdb.txt', 'w') as f:
16 | f.write(imdb_str)
17 | ```
18 |
19 | To train the model the `run_language_modeling.py` script in the `transformer` library was used:
20 |
21 | ```
22 | python run_language_modeling.py
23 | --train_data_file imdb.txt
24 | --output_dir gpt2-imdb
25 | --model_type gpt2
26 | --model_name_or_path gpt2
27 | ```
28 |
--------------------------------------------------------------------------------
/transformers/model_cards/lvwerra/gpt2-medium-taboo/README.md:
--------------------------------------------------------------------------------
1 | # GPT-2 (medium) Taboo
2 |
3 | ## What is it?
4 | A fine-tuned GPT-2 version for Taboo cards generation.
5 |
6 | ## Training setting
7 |
8 | The model was trained on ~900 Taboo cards in the following format for 100 epochs:
9 | ```
10 | Describe the word Glitch without using the words Problem, Unexpected, Technology, Minor, Outage.
11 | ````
12 |
13 |
--------------------------------------------------------------------------------
/transformers/model_cards/lysandre/arxiv-nlp/README.md:
--------------------------------------------------------------------------------
1 | # ArXiv-NLP GPT-2 checkpoint
2 |
3 | This is a GPT-2 small checkpoint for PyTorch. It is the official `gpt2-small` fine-tuned to ArXiv paper on the computational linguistics field.
4 |
5 | ## Training data
6 |
7 | This model was trained on a subset of ArXiv papers that were parsed from PDF to txt. The resulting data is made of 80MB of text from the computational linguistics (cs.CL) field.
--------------------------------------------------------------------------------
/transformers/model_cards/lysandre/arxiv/README.md:
--------------------------------------------------------------------------------
1 | # ArXiv GPT-2 checkpoint
2 |
3 | This is a GPT-2 small checkpoint for PyTorch. It is the official `gpt2-small` finetuned to ArXiv paper on physics fields.
4 |
5 | ## Training data
6 |
7 | This model was trained on a subset of ArXiv papers that were parsed from PDF to txt. The resulting data is made of 130MB of text, mostly from quantum physics (quant-ph) and other physics sub-fields.
8 |
--------------------------------------------------------------------------------
/transformers/model_cards/mrm8488/GuaPeTe-2-tiny/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: es
3 | widget:
4 | - text: "Murcia es la huerta de Europa porque"
5 | ---
6 |
7 | #GuaPeTe-2-tiny: A proof of concept tiny GPT-2 like model trained on Spanish Wikipedia corpus
8 |
--------------------------------------------------------------------------------
/transformers/model_cards/mrm8488/RoBERTinha/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: gl
3 | widget:
4 | - text: "Galicia é unha autónoma española."
5 | - text: "A lingua oficial de Galicia é o ."
6 | ---
7 |
8 | # RoBERTinha: RoBERTa-like Language model trained on OSCAR Galician corpus
9 |
--------------------------------------------------------------------------------
/transformers/model_cards/mrm8488/RoBasquERTa/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: eu
3 | widget:
4 | - text: "Euskara da Euskal Herriko ofiziala"
5 | - text: "Gaur egun, Euskadik Espainia osoko ekonomia du"
6 | ---
7 |
8 | # RoBasquERTa: RoBERTa-like Language model trained on OSCAR Basque corpus
9 |
--------------------------------------------------------------------------------
/transformers/model_cards/mrm8488/RuPERTa-base-finetuned-pawsx-es/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: es
3 | datasets:
4 | - xtreme
5 | widget:
6 | - text: "En 2009 se mudó a Filadelfia y en la actualidad vive en Nueva York. Se mudó nuevamente a Filadelfia en 2009 y ahora vive en la ciudad de Nueva York."
7 | ---
8 |
9 | # RuPERTa-base fine-tuned on PAWS-X-es for Paraphrase Identification
10 |
--------------------------------------------------------------------------------
/transformers/model_cards/mrm8488/RuPERTa-base-finetuned-squadv1/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: es
3 | datasets:
4 | - squad
5 | ---
6 |
--------------------------------------------------------------------------------
/transformers/model_cards/mrm8488/RuPERTa-base-finetuned-squadv2/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: es
3 | datasets:
4 | - squad_v2
5 | ---
6 |
--------------------------------------------------------------------------------
/transformers/model_cards/mrm8488/bert-base-german-dbmdz-cased-finetuned-pawsx-de/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: de
3 | datasets:
4 | - xtreme
5 | widget:
6 | - text: "Winarsky ist Mitglied des IEEE, Phi Beta Kappa, des ACM und des Sigma Xi. Winarsky ist Mitglied des ACM, des IEEE, der Phi Beta Kappa und der Sigma Xi."
7 | ---
8 |
9 | # bert-base-german-dbmdz-cased fine-tuned on PAWS-X-de for Paraphrase Identification
10 |
--------------------------------------------------------------------------------
/transformers/model_cards/mrm8488/camembert-base-finetuned-pawsx-fr/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: fr
3 | datasets:
4 | - xtreme
5 | widget:
6 | - text: "La première série a été mieux reçue par la critique que la seconde. La seconde série a été bien accueillie par la critique, mieux que la première."
7 | ---
8 |
9 | # Camembert-base fine-tuned on PAWS-X-fr for Paraphrase Identification
10 |
--------------------------------------------------------------------------------
/transformers/model_cards/mrm8488/electricidad-base-finetuned-pawsx-es/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: es
3 | datasets:
4 | - xtreme
5 | widget:
6 | - text: "El río Tabaci es una vertiente del río Leurda en Rumania. El río Leurda es un afluente del río Tabaci en Rumania."
7 | ---
8 |
9 | # Electricidad-base fine-tuned on PAWS-X-es for Paraphrase Identification
10 |
--------------------------------------------------------------------------------
/transformers/model_cards/mrm8488/gpt2-finetuned-recipes-cooking/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: en
3 | thumbnail:
4 | widget:
5 | - text: "HuggingFace Cake:"
6 | ---
7 |
--------------------------------------------------------------------------------
/transformers/model_cards/mrm8488/gpt2-finetuned-recipes-cooking_v2/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: en
3 | thumbnail:
4 | widget:
5 | - text: "HuggingFace Cake:"
6 | ---
7 |
--------------------------------------------------------------------------------
/transformers/model_cards/pradhyra/AWSBlogBert/README.md:
--------------------------------------------------------------------------------
1 | This model is pre-trained on blog articles from AWS Blogs.
2 |
3 | ## Pre-training corpora
4 | The input text contains around 3000 blog articles on [AWS Blogs website](https://aws.amazon.com/blogs/) technical subject matter including AWS products, tools and tutorials.
5 |
6 | ## Pre-training details
7 | I picked a Roberta architecture for masked language modeling (6-layer, 768-hidden, 12-heads, 82M parameters) and its corresponding ByteLevelBPE tokenization strategy. I then followed HuggingFace's Transformers [blog post](https://huggingface.co/blog/how-to-train) to train the model.
8 | I chose to follow the following training set-up: 28k training steps with batches of 64 sequences of length 512 with an initial learning rate 5e-5. The model acheived a training loss of 3.6 on the MLM task over 10 epochs.
9 |
--------------------------------------------------------------------------------
/transformers/model_cards/pranavpsv/gpt2-genre-story-generator/README.md:
--------------------------------------------------------------------------------
1 |
2 | # GPT2 Genre Based Story Generator
3 |
4 | ## Model description
5 |
6 | GPT2 fine-tuned on genre-based story generation.
7 |
8 | ## Intended uses
9 |
10 | Used to generate stories based on user inputted genre and starting prompts.
11 |
12 | ## How to use
13 |
14 | #### Supported Genres
15 | superhero, action, drama, horror, thriller, sci_fi
16 | #### Input text format
17 | \ \ Some optional text...
18 |
19 | **Example**: \ \ After discovering time travel,
20 |
21 | ```python
22 | # Example of usage
23 | from transformers import pipeline
24 |
25 | story_gen = pipeline("text-generation", "pranavpsv/gpt2-genre-story-generator")
26 | print(story_gen(" Batman"))
27 |
28 | ```
29 |
30 | ## Training data
31 |
32 | Initialized with pre-trained weights of "gpt2" checkpoint. Fine-tuned the model on stories of various genres.
33 |
--------------------------------------------------------------------------------
/transformers/model_cards/rdenadai/BR_BERTo/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: pt
3 | tags:
4 | - portuguese
5 | - brazil
6 | - pt_BR
7 | widget:
8 | - text: gostei muito dessa
9 | ---
10 |
11 | # BR_BERTo
12 |
13 | Portuguese (Brazil) model for text inference.
14 |
15 | ## Params
16 |
17 | Trained on a corpus of 6_993_330 sentences.
18 |
19 | - Vocab size: 150_000
20 | - RobertaForMaskedLM size : 512
21 | - Num train epochs: 3
22 | - Time to train: ~10days (on GCP with a Nvidia T4)
23 |
24 | I follow the great tutorial from HuggingFace team:
25 |
26 | [How to train a new language model from scratch using Transformers and Tokenizers](https://huggingface.co/blog/how-to-train)
27 |
28 | More infor here:
29 |
30 | [BR_BERTo](https://github.com/rdenadai/BR-BERTo)
31 |
--------------------------------------------------------------------------------
/transformers/model_cards/roberta-large-mnli-README.md:
--------------------------------------------------------------------------------
1 | ---
2 | license: mit
3 | widget:
4 | - text: "I like you. I love you."
5 | ---
6 |
7 |
8 | ## roberta-large-mnli
9 |
10 | Trained by Facebook, [original source](https://github.com/pytorch/fairseq/tree/master/examples/roberta)
11 |
12 | ```bibtex
13 | @article{liu2019roberta,
14 | title = {RoBERTa: A Robustly Optimized BERT Pretraining Approach},
15 | author = {Yinhan Liu and Myle Ott and Naman Goyal and Jingfei Du and
16 | Mandar Joshi and Danqi Chen and Omer Levy and Mike Lewis and
17 | Luke Zettlemoyer and Veselin Stoyanov},
18 | journal={arXiv preprint arXiv:1907.11692},
19 | year = {2019},
20 | }
21 | ```
22 |
23 |
--------------------------------------------------------------------------------
/transformers/model_cards/schmidek/electra-small-cased/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: en
3 | license: apache-2.0
4 | ---
5 |
6 | ## ELECTRA-small-cased
7 |
8 | This is a cased version of `google/electra-small-discriminator`, trained on the
9 | [OpenWebText corpus](https://skylion007.github.io/OpenWebTextCorpus/).
10 |
11 | Uses the same tokenizer and vocab from `bert-base-cased`
12 |
--------------------------------------------------------------------------------
/transformers/model_cards/severinsimmler/literary-german-bert/kfold.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/model_cards/severinsimmler/literary-german-bert/kfold.png
--------------------------------------------------------------------------------
/transformers/model_cards/severinsimmler/literary-german-bert/prosa-jahre.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/model_cards/severinsimmler/literary-german-bert/prosa-jahre.png
--------------------------------------------------------------------------------
/transformers/model_cards/spentaur/yelp/README.md:
--------------------------------------------------------------------------------
1 | # DistilBERT Yelp Review Sentiment
2 | This model is used for sentiment analysis on english yelp reviews.
3 | It is a DistilBERT model trained on 1 million reviews from the yelp open dataset.
4 | It is a regression model, with outputs in the range of ~-2 to ~2. With -2 being 1 star and 2 being 5 stars.
5 | It was trained using the [ktrain](https://github.com/amaiya/ktrain) because of it's ease of use.
6 |
7 | Example use:
8 |
9 | ```
10 | tokenizer = AutoTokenizer.from_pretrained(
11 | 'distilbert-base-uncased', use_fast=True)
12 | model = TFAutoModelForSequenceClassification.from_pretrained(
13 | "spentaur/yelp")
14 |
15 | review = "This place is great!"
16 | input_ids = tokenizer.encode(review, return_tensors='tf')
17 | pred = model(input_ids)[0][0][0].numpy()
18 | # pred should === 1.9562385
19 | ```
20 |
--------------------------------------------------------------------------------
/transformers/model_cards/stas/tiny-wmt19-en-de/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language:
3 | - en
4 | - de
5 | thumbnail:
6 | tags:
7 | - wmt19
8 | - testing
9 | license: apache-2.0
10 | datasets:
11 | - wmt19
12 | metrics:
13 | - bleu
14 | ---
15 |
16 | # Tiny FSMT
17 |
18 | This is a tiny model that is used in the `transformers` test suite. It doesn't do anything useful, other than testing that `FSMT` works.
19 |
--------------------------------------------------------------------------------
/transformers/model_cards/uncnlp/lxmert-base-uncased/lxmert_model-1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/model_cards/uncnlp/lxmert-base-uncased/lxmert_model-1.jpg
--------------------------------------------------------------------------------
/transformers/model_cards/xlm-mlm-en-2048-README.md:
--------------------------------------------------------------------------------
1 | ---
2 | tags:
3 | - exbert
4 |
5 | license: cc-by-nc-4.0
6 | ---
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/transformers/model_cards/xlm-roberta-base-README.md:
--------------------------------------------------------------------------------
1 | ---
2 | tags:
3 | - exbert
4 |
5 | license: mit
6 | ---
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/transformers/model_cards/xlm-roberta-large-finetuned-conll03-german-README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: de
3 | ---
4 |
5 | ## xlm-roberta-large-finetuned-conll03-german
6 |
--------------------------------------------------------------------------------
/transformers/model_cards/yjernite/bart_eli5/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: en
3 | license: apache-2.0
4 | datasets:
5 | - eli5
6 | ---
7 |
8 | ## BART ELI5
9 |
10 | Read the article at https://yjernite.github.io/lfqa.html and try the demo at https://huggingface.co/qa/
11 |
--------------------------------------------------------------------------------
/transformers/model_cards/yuvraj/summarizer-cnndm/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: "en"
3 | tags:
4 | - summarization
5 | ---
6 |
7 | # Summarization
8 |
9 | ## Model description
10 |
11 | BartForConditionalGeneration model fine tuned for summarization on 10000 samples from the cnn-dailymail dataset
12 |
13 | ## How to use
14 |
15 | PyTorch model available
16 |
17 | ```python
18 | from transformers import AutoTokenizer, AutoModelWithLMHead, pipeline
19 |
20 | tokenizer = AutoTokenizer.from_pretrained("yuvraj/summarizer-cnndm")
21 | AutoModelWithLMHead.from_pretrained("yuvraj/summarizer-cnndm")
22 |
23 | summarizer = pipeline('summarization', model=model, tokenizer=tokenizer)
24 | summarizer("")
25 |
26 | ## Limitations and bias
27 | Trained on a small dataset
28 |
--------------------------------------------------------------------------------
/transformers/model_cards/yuvraj/xSumm/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | language: "en"
3 | tags:
4 | - summarization
5 | - extreme summarization
6 | ---
7 |
8 | ## Model description
9 |
10 | BartForConditionalGenerationModel for extreme summarization- creates a one line abstractive summary of a given article
11 |
12 | ## How to use
13 |
14 | PyTorch model available
15 |
16 | ```python
17 | from transformers import AutoTokenizer, AutoModelWithLMHead, pipeline
18 |
19 | tokenizer = AutoTokenizer.from_pretrained("yuvraj/xSumm")
20 | model = AutoModelWithLMHead.from_pretrained("yuvraj/xSumm")
21 |
22 | xsumm = pipeline('summarization', model=model, tokenizer=tokenizer)
23 | xsumm("")
24 |
25 | ## Limitations and bias
26 | Trained on a small fraction of the xsumm training dataset
27 |
--------------------------------------------------------------------------------
/transformers/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.black]
2 | line-length = 119
3 | target-version = ['py35']
4 |
--------------------------------------------------------------------------------
/transformers/scripts/fsmt/tests-to-run.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # these scripts need to be run before any changes to FSMT-related code - it should cover all bases
4 |
5 | USE_CUDA=0 RUN_SLOW=1 pytest --disable-warnings tests/test_tokenization_fsmt.py tests/test_configuration_auto.py tests/test_modeling_fsmt.py examples/seq2seq/test_fsmt_bleu_score.py
6 | USE_CUDA=1 RUN_SLOW=1 pytest --disable-warnings tests/test_tokenization_fsmt.py tests/test_configuration_auto.py tests/test_modeling_fsmt.py examples/seq2seq/test_fsmt_bleu_score.py
7 |
--------------------------------------------------------------------------------
/transformers/setup.cfg:
--------------------------------------------------------------------------------
1 | [isort]
2 | default_section = FIRSTPARTY
3 | ensure_newline_before_comments = True
4 | force_grid_wrap = 0
5 | include_trailing_comma = True
6 | known_first_party = transformers
7 | known_third_party =
8 | absl
9 | conllu
10 | datasets
11 | elasticsearch
12 | fairseq
13 | faiss-cpu
14 | fastprogress
15 | fire
16 | fugashi
17 | git
18 | h5py
19 | matplotlib
20 | nltk
21 | numpy
22 | packaging
23 | pandas
24 | PIL
25 | psutil
26 | pytest
27 | pytorch_lightning
28 | rouge_score
29 | sacrebleu
30 | seqeval
31 | sklearn
32 | streamlit
33 | tensorboardX
34 | tensorflow
35 | tensorflow_datasets
36 | timeout_decorator
37 | torch
38 | torchtext
39 | torchvision
40 | torch_xla
41 | tqdm
42 |
43 | line_length = 119
44 | lines_after_imports = 2
45 | multi_line_output = 3
46 | use_parentheses = True
47 |
48 | [flake8]
49 | ignore = E203, E501, E741, W503, W605
50 | max-line-length = 119
51 |
--------------------------------------------------------------------------------
/transformers/src/transformers.egg-info/dependency_links.txt:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/transformers/src/transformers.egg-info/entry_points.txt:
--------------------------------------------------------------------------------
1 | [console_scripts]
2 | transformers-cli = transformers.commands.transformers_cli:main
3 |
4 |
--------------------------------------------------------------------------------
/transformers/src/transformers.egg-info/top_level.txt:
--------------------------------------------------------------------------------
1 | transformers
2 |
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/__init__.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/__init__.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/activations.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/activations.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/activations.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/activations.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/activations_tf.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/activations_tf.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_albert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_albert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_albert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_albert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_auto.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_auto.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_auto.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_auto.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_bart.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_bart.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_bart.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_bart.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_bert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_bert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_bert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_bert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_bert_generation.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_bert_generation.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_bert_generation.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_bert_generation.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_camembert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_camembert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_camembert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_camembert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_ctrl.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_ctrl.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_ctrl.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_ctrl.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_distilbert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_distilbert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_distilbert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_distilbert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_dpr.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_dpr.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_dpr.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_dpr.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_electra.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_electra.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_electra.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_electra.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_encoder_decoder.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_encoder_decoder.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_encoder_decoder.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_encoder_decoder.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_flaubert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_flaubert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_flaubert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_flaubert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_fsmt.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_fsmt.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_fsmt.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_fsmt.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_funnel.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_funnel.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_funnel.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_funnel.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_gpt2.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_gpt2.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_gpt2.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_gpt2.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_layoutlm.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_layoutlm.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_layoutlm.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_layoutlm.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_longformer.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_longformer.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_longformer.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_longformer.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_lxmert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_lxmert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_lxmert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_lxmert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_marian.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_marian.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_marian.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_marian.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_mbart.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_mbart.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_mbart.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_mbart.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_mmbt.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_mmbt.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_mmbt.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_mmbt.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_mobilebert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_mobilebert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_mobilebert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_mobilebert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_openai.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_openai.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_openai.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_openai.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_pegasus.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_pegasus.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_pegasus.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_pegasus.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_rag.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_rag.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_rag.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_rag.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_reformer.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_reformer.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_reformer.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_reformer.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_retribert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_retribert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_retribert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_retribert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_roberta.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_roberta.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_roberta.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_roberta.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_t5.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_t5.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_t5.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_t5.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_transfo_xl.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_transfo_xl.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_transfo_xl.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_transfo_xl.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_utils.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_utils.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_utils.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_utils.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_xlm.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_xlm.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_xlm.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_xlm.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_xlm_roberta.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_xlm_roberta.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_xlm_roberta.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_xlm_roberta.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_xlnet.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_xlnet.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/configuration_xlnet.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/configuration_xlnet.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/file_utils.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/file_utils.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/file_utils.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/file_utils.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/generation_tf_utils.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/generation_tf_utils.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/generation_utils.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/generation_utils.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/generation_utils.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/generation_utils.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/hf_argparser.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/hf_argparser.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/hf_argparser.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/hf_argparser.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/integrations.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/integrations.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/integrations.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/integrations.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modelcard.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modelcard.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modelcard.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modelcard.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_albert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_albert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_albert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_albert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_auto.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_auto.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_auto.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_auto.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_bart.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_bart.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_bart.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_bart.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_bert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_bert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_bert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_bert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_bert_generation.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_bert_generation.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_bert_generation.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_bert_generation.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_camembert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_camembert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_camembert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_camembert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_ctrl.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_ctrl.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_ctrl.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_ctrl.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_distilbert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_distilbert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_distilbert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_distilbert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_dpr.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_dpr.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_dpr.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_dpr.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_electra.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_electra.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_electra.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_electra.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_encoder_decoder.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_encoder_decoder.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_encoder_decoder.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_encoder_decoder.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_flaubert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_flaubert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_flaubert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_flaubert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_fsmt.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_fsmt.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_fsmt.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_fsmt.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_funnel.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_funnel.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_funnel.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_funnel.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_gpt2.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_gpt2.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_gpt2.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_gpt2.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_layoutlm.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_layoutlm.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_layoutlm.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_layoutlm.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_longformer.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_longformer.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_longformer.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_longformer.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_lxmert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_lxmert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_lxmert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_lxmert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_marian.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_marian.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_marian.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_marian.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_mbart.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_mbart.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_mbart.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_mbart.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_mmbt.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_mmbt.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_mmbt.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_mmbt.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_mobilebert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_mobilebert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_mobilebert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_mobilebert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_openai.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_openai.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_openai.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_openai.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_outputs.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_outputs.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_outputs.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_outputs.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_pegasus.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_pegasus.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_pegasus.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_pegasus.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_rag.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_rag.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_rag.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_rag.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_reformer.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_reformer.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_reformer.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_reformer.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_retribert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_retribert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_retribert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_retribert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_roberta.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_roberta.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_roberta.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_roberta.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_t5.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_t5.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_t5.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_t5.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_albert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_albert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_auto.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_auto.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_bert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_bert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_camembert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_camembert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_ctrl.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_ctrl.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_distilbert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_distilbert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_electra.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_electra.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_flaubert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_flaubert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_funnel.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_funnel.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_gpt2.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_gpt2.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_longformer.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_longformer.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_lxmert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_lxmert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_mobilebert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_mobilebert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_openai.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_openai.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_outputs.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_outputs.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_pytorch_utils.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_pytorch_utils.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_pytorch_utils.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_pytorch_utils.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_roberta.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_roberta.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_t5.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_t5.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_transfo_xl.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_transfo_xl.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_transfo_xl_utilities.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_transfo_xl_utilities.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_utils.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_utils.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_xlm.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_xlm.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_xlm_roberta.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_xlm_roberta.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_tf_xlnet.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_tf_xlnet.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_transfo_xl.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_transfo_xl.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_transfo_xl.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_transfo_xl.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_transfo_xl_utilities.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_transfo_xl_utilities.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_transfo_xl_utilities.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_transfo_xl_utilities.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_utils.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_utils.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_utils.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_utils.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_xlm.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_xlm.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_xlm.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_xlm.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_xlm_roberta.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_xlm_roberta.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_xlm_roberta.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_xlm_roberta.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_xlnet.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_xlnet.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/modeling_xlnet.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/modeling_xlnet.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/optimization.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/optimization.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/optimization.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/optimization.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/optimization_tf.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/optimization_tf.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/pipelines.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/pipelines.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/pipelines.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/pipelines.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/retrieval_rag.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/retrieval_rag.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/retrieval_rag.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/retrieval_rag.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_albert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_albert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_albert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_albert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_auto.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_auto.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_auto.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_auto.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_bart.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_bart.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_bart.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_bart.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_bert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_bert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_bert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_bert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_bert_generation.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_bert_generation.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_bert_generation.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_bert_generation.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_bert_japanese.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_bert_japanese.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_bert_japanese.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_bert_japanese.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_bertweet.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_bertweet.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_bertweet.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_bertweet.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_camembert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_camembert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_camembert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_camembert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_ctrl.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_ctrl.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_ctrl.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_ctrl.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_distilbert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_distilbert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_distilbert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_distilbert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_dpr.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_dpr.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_dpr.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_dpr.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_electra.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_electra.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_electra.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_electra.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_flaubert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_flaubert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_flaubert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_flaubert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_fsmt.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_fsmt.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_fsmt.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_fsmt.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_funnel.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_funnel.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_funnel.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_funnel.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_gpt2.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_gpt2.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_gpt2.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_gpt2.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_layoutlm.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_layoutlm.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_layoutlm.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_layoutlm.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_longformer.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_longformer.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_longformer.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_longformer.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_lxmert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_lxmert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_lxmert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_lxmert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_marian.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_marian.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_marian.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_marian.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_mbart.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_mbart.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_mbart.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_mbart.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_mobilebert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_mobilebert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_mobilebert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_mobilebert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_openai.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_openai.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_openai.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_openai.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_pegasus.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_pegasus.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_pegasus.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_pegasus.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_phobert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_phobert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_phobert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_phobert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_rag.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_rag.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_rag.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_rag.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_reformer.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_reformer.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_reformer.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_reformer.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_retribert.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_retribert.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_retribert.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_retribert.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_roberta.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_roberta.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_roberta.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_roberta.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_t5.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_t5.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_t5.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_t5.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_transfo_xl.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_transfo_xl.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_transfo_xl.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_transfo_xl.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_utils.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_utils.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_utils.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_utils.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_utils_base.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_utils_base.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_utils_base.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_utils_base.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_utils_fast.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_utils_fast.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_utils_fast.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_utils_fast.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_xlm.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_xlm.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_xlm.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_xlm.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_xlm_roberta.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_xlm_roberta.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_xlm_roberta.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_xlm_roberta.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_xlnet.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_xlnet.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/tokenization_xlnet.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/tokenization_xlnet.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/trainer.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/trainer.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/trainer.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/trainer.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/trainer_tf.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/trainer_tf.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/trainer_utils.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/trainer_utils.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/trainer_utils.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/trainer_utils.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/training_args.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/training_args.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/training_args.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/training_args.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/training_args_tf.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/training_args_tf.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/__pycache__/training_args_tf.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/__pycache__/training_args_tf.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/benchmark/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/benchmark/__init__.py
--------------------------------------------------------------------------------
/transformers/src/transformers/benchmark/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/benchmark/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/benchmark/__pycache__/__init__.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/benchmark/__pycache__/__init__.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/benchmark/__pycache__/benchmark.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/benchmark/__pycache__/benchmark.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/benchmark/__pycache__/benchmark.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/benchmark/__pycache__/benchmark.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/benchmark/__pycache__/benchmark_args.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/benchmark/__pycache__/benchmark_args.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/benchmark/__pycache__/benchmark_args.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/benchmark/__pycache__/benchmark_args.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/benchmark/__pycache__/benchmark_args_tf.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/benchmark/__pycache__/benchmark_args_tf.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/benchmark/__pycache__/benchmark_args_utils.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/benchmark/__pycache__/benchmark_args_utils.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/benchmark/__pycache__/benchmark_args_utils.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/benchmark/__pycache__/benchmark_args_utils.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/benchmark/__pycache__/benchmark_tf.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/benchmark/__pycache__/benchmark_tf.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/benchmark/__pycache__/benchmark_utils.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/benchmark/__pycache__/benchmark_utils.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/benchmark/__pycache__/benchmark_utils.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/benchmark/__pycache__/benchmark_utils.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/commands/__init__.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 | from argparse import ArgumentParser
3 |
4 |
5 | class BaseTransformersCLICommand(ABC):
6 | @staticmethod
7 | @abstractmethod
8 | def register_subcommand(parser: ArgumentParser):
9 | raise NotImplementedError()
10 |
11 | @abstractmethod
12 | def run(self):
13 | raise NotImplementedError()
14 |
--------------------------------------------------------------------------------
/transformers/src/transformers/data/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 | # There's no way to ignore "F401 '...' imported but unused" warnings in this
3 | # module, but to preserve other warnings. So, don't check this module at all.
4 |
5 | from .metrics import is_sklearn_available
6 | from .processors import (
7 | DataProcessor,
8 | InputExample,
9 | InputFeatures,
10 | SingleSentenceClassificationProcessor,
11 | SquadExample,
12 | SquadFeatures,
13 | SquadV1Processor,
14 | SquadV2Processor,
15 | glue_convert_examples_to_features,
16 | glue_output_modes,
17 | glue_processors,
18 | glue_tasks_num_labels,
19 | squad_convert_examples_to_features,
20 | xnli_output_modes,
21 | xnli_processors,
22 | xnli_tasks_num_labels,
23 | )
24 |
25 |
26 | if is_sklearn_available():
27 | from .metrics import glue_compute_metrics, xnli_compute_metrics
28 |
--------------------------------------------------------------------------------
/transformers/src/transformers/data/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/__pycache__/__init__.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/__pycache__/__init__.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/__pycache__/data_collator.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/__pycache__/data_collator.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/__pycache__/data_collator.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/__pycache__/data_collator.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/datasets/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 | # There's no way to ignore "F401 '...' imported but unused" warnings in this
3 | # module, but to preserve other warnings. So, don't check this module at all.
4 |
5 | from .glue import GlueDataset, GlueDataTrainingArguments
6 | from .language_modeling import (
7 | LineByLineTextDataset,
8 | LineByLineWithSOPTextDataset,
9 | TextDataset,
10 | TextDatasetForNextSentencePrediction,
11 | )
12 | from .squad import SquadDataset, SquadDataTrainingArguments
13 |
--------------------------------------------------------------------------------
/transformers/src/transformers/data/datasets/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/datasets/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/datasets/__pycache__/__init__.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/datasets/__pycache__/__init__.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/datasets/__pycache__/glue.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/datasets/__pycache__/glue.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/datasets/__pycache__/glue.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/datasets/__pycache__/glue.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/datasets/__pycache__/language_modeling.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/datasets/__pycache__/language_modeling.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/datasets/__pycache__/language_modeling.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/datasets/__pycache__/language_modeling.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/datasets/__pycache__/squad.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/datasets/__pycache__/squad.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/datasets/__pycache__/squad.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/datasets/__pycache__/squad.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/metrics/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/metrics/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/metrics/__pycache__/__init__.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/metrics/__pycache__/__init__.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/processors/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 | # There's no way to ignore "F401 '...' imported but unused" warnings in this
3 | # module, but to preserve other warnings. So, don't check this module at all.
4 |
5 | from .glue import glue_convert_examples_to_features, glue_output_modes, glue_processors, glue_tasks_num_labels
6 | from .squad import SquadExample, SquadFeatures, SquadV1Processor, SquadV2Processor, squad_convert_examples_to_features
7 | from .utils import DataProcessor, InputExample, InputFeatures, SingleSentenceClassificationProcessor
8 | from .xnli import xnli_output_modes, xnli_processors, xnli_tasks_num_labels
9 |
--------------------------------------------------------------------------------
/transformers/src/transformers/data/processors/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/processors/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/processors/__pycache__/__init__.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/processors/__pycache__/__init__.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/processors/__pycache__/glue.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/processors/__pycache__/glue.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/processors/__pycache__/glue.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/processors/__pycache__/glue.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/processors/__pycache__/squad.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/processors/__pycache__/squad.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/processors/__pycache__/squad.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/processors/__pycache__/squad.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/processors/__pycache__/utils.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/processors/__pycache__/utils.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/processors/__pycache__/utils.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/processors/__pycache__/utils.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/processors/__pycache__/xnli.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/processors/__pycache__/xnli.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/data/processors/__pycache__/xnli.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/data/processors/__pycache__/xnli.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/utils/__init__.py
--------------------------------------------------------------------------------
/transformers/src/transformers/utils/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/utils/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/utils/__pycache__/__init__.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/utils/__pycache__/__init__.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/utils/__pycache__/logging.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/utils/__pycache__/logging.cpython-37.pyc
--------------------------------------------------------------------------------
/transformers/src/transformers/utils/__pycache__/logging.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/src/transformers/utils/__pycache__/logging.cpython-38.pyc
--------------------------------------------------------------------------------
/transformers/templates/adding_a_new_example_script/README.md:
--------------------------------------------------------------------------------
1 | # How to add a new example script in 🤗Transformers
2 |
3 | This folder provide a template for adding a new example script implementing a training or inference task with the models in the 🤗Transformers library.
4 | Add tests!
5 |
6 |
7 | These folder can be put in a subdirectory under your example's name, like `examples/deebert`.
8 |
9 |
10 | Best Practices:
11 | - use `Trainer`/`TFTrainer`
12 | - write an @slow test that checks that your model can train on one batch and get a low loss.
13 | - this test should use cuda if it's available. (e.g. by checking `transformers.torch_device`)
14 | - adding an `eval_xxx.py` script that can evaluate a pretrained checkpoint.
15 | - tweet about your new example with a carbon screenshot of how to run it and tag @huggingface
16 |
--------------------------------------------------------------------------------
/transformers/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/tests/__init__.py
--------------------------------------------------------------------------------
/transformers/tests/conftest.py:
--------------------------------------------------------------------------------
1 | # tests directory-specific settings - this file is run automatically
2 | # by pytest before any tests are run
3 |
4 | import sys
5 | import warnings
6 | from os.path import abspath, dirname, join
7 |
8 |
9 | # allow having multiple repository checkouts and not needing to remember to rerun
10 | # 'pip install -e .[dev]' when switching between checkouts and running tests.
11 | git_repo_path = abspath(join(dirname(dirname(__file__)), "src"))
12 | sys.path.insert(1, git_repo_path)
13 |
14 |
15 | # silence FutureWarning warnings in tests since often we can't act on them until
16 | # they become normal warnings - i.e. the tests still need to test the current functionality
17 | warnings.simplefilter(action="ignore", category=FutureWarning)
18 |
--------------------------------------------------------------------------------
/transformers/tests/fixtures/dummy-config.json:
--------------------------------------------------------------------------------
1 | {
2 | "model_type": "roberta"
3 | }
--------------------------------------------------------------------------------
/transformers/tests/fixtures/empty.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/tests/fixtures/empty.txt
--------------------------------------------------------------------------------
/transformers/tests/fixtures/input.txt:
--------------------------------------------------------------------------------
1 | Who was Jim Henson ? ||| Jim Henson was a puppeteer
2 |
--------------------------------------------------------------------------------
/transformers/tests/fixtures/spiece.model:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/tests/fixtures/spiece.model
--------------------------------------------------------------------------------
/transformers/tests/fixtures/test_sentencepiece.model:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SALT-NLP/Structure-Aware-BART/90419667a9d1034bcb064312722aeadb0635ee20/transformers/tests/fixtures/test_sentencepiece.model
--------------------------------------------------------------------------------
/transformers/tests/fixtures/tests_samples/.gitignore:
--------------------------------------------------------------------------------
1 | *.*
2 | cache*
3 | temp*
4 | !*.txt
5 | !*.tsv
6 | !*.json
7 | !.gitignore
--------------------------------------------------------------------------------
/transformers/tests/fixtures/tests_samples/GermEval/labels.txt:
--------------------------------------------------------------------------------
1 | B-LOC
2 | B-LOCderiv
3 | B-LOCpart
4 | B-ORG
5 | B-ORGderiv
6 | B-ORGpart
7 | B-OTH
8 | B-OTHderiv
9 | B-OTHpart
10 | B-PER
11 | B-PERderiv
12 | B-PERpart
13 | I-LOC
14 | I-LOCderiv
15 | I-LOCpart
16 | I-ORG
17 | I-ORGderiv
18 | I-ORGpart
19 | I-OTH
20 | I-OTHderiv
21 | I-OTHpart
22 | I-PER
23 | I-PERderiv
24 | I-PERpart
25 | O
26 |
--------------------------------------------------------------------------------
/transformers/tests/test_activations_tf.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | from transformers import is_tf_available
4 | from transformers.testing_utils import require_tf
5 |
6 |
7 | if is_tf_available():
8 | from transformers.activations_tf import get_tf_activation
9 |
10 |
11 | @require_tf
12 | class TestTFActivations(unittest.TestCase):
13 | def test_get_activation(self):
14 | get_tf_activation("swish")
15 | get_tf_activation("gelu")
16 | get_tf_activation("relu")
17 | get_tf_activation("tanh")
18 | get_tf_activation("gelu_new")
19 | get_tf_activation("gelu_fast")
20 | get_tf_activation("mish")
21 | with self.assertRaises(KeyError):
22 | get_tf_activation("bogus")
23 | with self.assertRaises(KeyError):
24 | get_tf_activation(None)
25 |
--------------------------------------------------------------------------------