├── .github └── workflows │ └── php.yml ├── .gitignore ├── LICENSE ├── README.md ├── composer.json ├── composer.lock ├── src ├── Gpt3Tokenizer.php ├── Gpt3TokenizerConfig.php ├── Merges.php ├── Vocab.php └── pretrained_vocab_files │ ├── README.md │ ├── merges.txt │ └── vocab.json └── tests ├── Gpt3TokenizerTest.php └── __fixtures__ ├── long_text.txt ├── merges_example.txt └── vocab_example.json /.github/workflows/php.yml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Gioni06/GPT3Tokenizer/HEAD/.github/workflows/php.yml -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Gioni06/GPT3Tokenizer/HEAD/.gitignore -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Gioni06/GPT3Tokenizer/HEAD/LICENSE -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Gioni06/GPT3Tokenizer/HEAD/README.md -------------------------------------------------------------------------------- /composer.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Gioni06/GPT3Tokenizer/HEAD/composer.json -------------------------------------------------------------------------------- /composer.lock: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Gioni06/GPT3Tokenizer/HEAD/composer.lock -------------------------------------------------------------------------------- /src/Gpt3Tokenizer.php: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Gioni06/GPT3Tokenizer/HEAD/src/Gpt3Tokenizer.php -------------------------------------------------------------------------------- /src/Gpt3TokenizerConfig.php: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Gioni06/GPT3Tokenizer/HEAD/src/Gpt3TokenizerConfig.php -------------------------------------------------------------------------------- /src/Merges.php: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Gioni06/GPT3Tokenizer/HEAD/src/Merges.php -------------------------------------------------------------------------------- /src/Vocab.php: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Gioni06/GPT3Tokenizer/HEAD/src/Vocab.php -------------------------------------------------------------------------------- /src/pretrained_vocab_files/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Gioni06/GPT3Tokenizer/HEAD/src/pretrained_vocab_files/README.md -------------------------------------------------------------------------------- /src/pretrained_vocab_files/merges.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Gioni06/GPT3Tokenizer/HEAD/src/pretrained_vocab_files/merges.txt -------------------------------------------------------------------------------- /src/pretrained_vocab_files/vocab.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Gioni06/GPT3Tokenizer/HEAD/src/pretrained_vocab_files/vocab.json -------------------------------------------------------------------------------- /tests/Gpt3TokenizerTest.php: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Gioni06/GPT3Tokenizer/HEAD/tests/Gpt3TokenizerTest.php -------------------------------------------------------------------------------- /tests/__fixtures__/long_text.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Gioni06/GPT3Tokenizer/HEAD/tests/__fixtures__/long_text.txt -------------------------------------------------------------------------------- /tests/__fixtures__/merges_example.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Gioni06/GPT3Tokenizer/HEAD/tests/__fixtures__/merges_example.txt -------------------------------------------------------------------------------- /tests/__fixtures__/vocab_example.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Gioni06/GPT3Tokenizer/HEAD/tests/__fixtures__/vocab_example.json --------------------------------------------------------------------------------