├── .github └── workflows │ └── php-unittest.yml ├── .gitignore ├── CHANGELOG.md ├── CODE_OF_CONDUCT.md ├── LICENSE ├── README.md ├── UPGRADE.md ├── composer.json ├── composer.lock ├── config └── robots-txt.php ├── phpcs.xml ├── phpstan.neon ├── phpunit.xml ├── src ├── Controllers │ └── RobotsTxtController.php ├── Providers │ └── RobotsTxtProvider.php ├── RobotsTxtManager.php └── routes.php └── tests └── RobotsTxtTest.php /.github/workflows/php-unittest.yml: -------------------------------------------------------------------------------- 1 | name: Run tests 2 | 3 | on: push 4 | 5 | jobs: 6 | php-tests: 7 | runs-on: ubuntu-latest 8 | 9 | strategy: 10 | matrix: 11 | php: [ 8.0, 8.1, 8.2 ] 12 | laravel: [ '9.*', '10.*', '11.*', '12.*' ] 13 | dependency-version: [ prefer-lowest, prefer-stable ] 14 | include: 15 | - laravel: 10.* 16 | testbench: 8.* 17 | phpunit: 10.* 18 | - laravel: 9.* 19 | testbench: 7.* 20 | phpunit: 9.* 21 | - laravel: 11.* 22 | testbench: 9.* 23 | phpunit: 10.* 24 | - laravel: 12.* 25 | testbench: 10.* 26 | phpunit: 11.* 27 | exclude: 28 | - laravel: 10.* 29 | php: 8.0 30 | - laravel: 11.* 31 | php: 8.0 32 | - laravel: 11.* 33 | php: 8.1 34 | - laravel: 12.* 35 | php: 8.1 36 | 37 | name: P${{ matrix.php }} - L${{ matrix.laravel }} - ${{ matrix.dependency-version }} - ${{ matrix.os }} 38 | 39 | steps: 40 | - name: Checkout code 41 | uses: actions/checkout@v1 42 | 43 | - name: Setup PHP 44 | uses: shivammathur/setup-php@v2 45 | with: 46 | php-version: ${{ matrix.php }} 47 | extensions: dom, curl, libxml, mbstring, zip, pcntl, pdo, sqlite, pdo_sqlite, bcmath, soap, intl, gd, exif, iconv, imagick 48 | coverage: none 49 | 50 | - name: Install dependencies 51 | run: | 52 | composer require "laravel/framework:${{ matrix.laravel }}" "orchestra/testbench:${{ matrix.testbench }}" "phpunit/phpunit:${{ matrix.phpunit }}" --no-interaction --no-update 53 | composer update --${{ matrix.dependency-version }} --prefer-dist --no-interaction --no-suggest 54 | - name: Execute tests 55 | run: vendor/bin/phpunit 56 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /vendor 2 | composer.phar 3 | .DS_Store 4 | .phpunit.result.cache 5 | /coverage 6 | /.idea -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). 6 | 7 | ## [5.2.0] - 2025-02-25 8 | 9 | ### Added 10 | 11 | - Laravel 12 support (thanks [IT-Joris](https://github.com/IT-Joris)) 12 | 13 | ## [5.1.0] - 2024-09-10 14 | 15 | ### Added 16 | 17 | - Laravel 11 support (thanks [IT-Joris](https://github.com/IT-Joris)) 18 | 19 | ## [5.0.2] - 2023-03-13 20 | 21 | ### Changed 22 | 23 | - Migrate PHPUnit config file to PHPUnit 10 24 | 25 | ## [5.0.1] - 2023-03-13 26 | 27 | ### Changed 28 | 29 | - Update shivammathur/setup-php GitHub action dependency to v2 30 | 31 | ## [5.0.0] - 2023-03-13 32 | 33 | ### Added 34 | 35 | - Laravel 10 support 36 | 37 | ### Removed 38 | 39 | - Laravel 8 support as it is EOL 40 | 41 | ## [4.0.0] - 2022-04-05 42 | 43 | ### Added 44 | 45 | - Laravel 9 support 46 | - PHP 7.3 as a dependency as Laravel 8 still supports it 47 | 48 | ### Changed 49 | 50 | - PHPUnit configuration 51 | - Analysis script alias 52 | 53 | ### Removed 54 | 55 | - Laravel 5, 6 and 7 support as they are no longer officially supported 56 | 57 | ## [3.1.0] - 2020-09-30 58 | 59 | ### Added 60 | 61 | - Added Laravel 8 support thanks to @annejan 62 | 63 | ## [3.0.0] - 2020-03-05 64 | 65 | ### Added 66 | 67 | - Added Laravel 7 support 68 | 69 | ## [2.0.0] - 2020-01-02 70 | 71 | ### Added 72 | 73 | - Sitemaps support 🤩 74 | - 'allow' path directive support. 75 | 76 | ### Changed 77 | 78 | - Refactored a whole lot of code in the inner working, including the configuration of the sitemap paths. If you are upgrading from v1 to v2, please see UPGRADE.MD. 79 | - Namespace now matches the Github vendor name. 80 | 81 | ## [1.1.0] - 2017-01-16 82 | 83 | ### Changed 84 | 85 | - Changed Github/vendor username from gverschuur to verschuur. (The namespace in the code is still the same for now). 86 | 87 | ## [1.0.0] - 2016-10-04 88 | 89 | ### Added 90 | 91 | - Everything for initial release :shipit: 92 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies both within project spaces and in public spaces 49 | when an individual is representing the project or its community. Examples of 50 | representing a project or community include using an official project e-mail 51 | address, posting via an official social media account, or acting as an appointed 52 | representative at an online or offline event. Representation of a project may be 53 | further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at govert.verschuur@gmail.com. All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 72 | 73 | [homepage]: https://www.contributor-covenant.org 74 | 75 | For answers to common questions about this code of conduct, see 76 | https://www.contributor-covenant.org/faq 77 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | Copyright (c) 2019 Govert Verschuur 3 | 4 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 5 | 6 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 7 | 8 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![Run tests](https://github.com/verschuur/laravel-robotstxt/workflows/Run%20tests/badge.svg?branch=master) ![Code Climate issues](https://img.shields.io/codeclimate/issues/verschuur/laravel-robotstxt.svg?style=flat-square) ![Code Climate maintainability](https://img.shields.io/codeclimate/maintainability/verschuur/laravel-robotstxt.svg?style=flat-square) ![Scrutinizer](https://img.shields.io/scrutinizer/g/verschuur/laravel-robotstxt.svg?style=flat-square) 2 | 3 |

Dynamic robots.txt ServiceProvider for Laravel 🤖

4 | 5 | - [Installation](#installation) 6 | - [Composer](#composer) 7 | - [Manual](#manual) 8 | - [Service provider registration](#service-provider-registration) 9 | - [Usage](#usage) 10 | - [Basic usage](#basic-usage) 11 | - [Custom settings](#custom-settings) 12 | - [Examples](#examples) 13 | - [Allow directive](#allow-directive) 14 | - [Sitemaps](#sitemaps) 15 | - [The standard production configuration](#the-standard-production-configuration) 16 | - [Adding multiple sitemaps](#adding-multiple-sitemaps) 17 | - [Compatiblility](#compatiblility) 18 | - [Testing](#testing) 19 | - [robots.txt reference](#robotstxt-reference) 20 | 21 | # Installation 22 | 23 | ## Composer 24 | 25 | ```bash 26 | composer require verschuur/laravel-robotstxt 27 | ``` 28 | 29 | ## Manual 30 | 31 | Add the following to your `composer.json` and then run `composer install`. 32 | 33 | ```php 34 | { 35 | "require": { 36 | "verschuur/laravel-robotstxt": "^3.0" 37 | } 38 | } 39 | ``` 40 | 41 | ## Service provider registration 42 | 43 | This package supports Laravel's service provider autodiscovery so that's it. If you wish to register the package manually, add the ServiceProvider to the providers array in `config/app.php`. 44 | 45 | ```php 46 | Verschuur\Laravel\RobotsTxt\Providers\RobotsTxtProvider::class 47 | ``` 48 | 49 | # Usage 50 | 51 | ## Basic usage 52 | 53 | This package adds a `/robots.txt` route to your application. Remember to remove the physical `robots.txt` file from your `/public` dir or else it will take precedence over Laravel's route and this package will not work. 54 | 55 | By default, the `production` environment will show 56 | 57 | ```txt 58 | User-agent: * 59 | Disallow: 60 | ``` 61 | 62 | while every other environment will show 63 | 64 | ```txt 65 | User-agent: * 66 | Disallow: / 67 | ``` 68 | 69 | This will allow the default install to allow all robots on a production environment, while disallowing robots on every other environment. 70 | 71 | ## Custom settings 72 | 73 | If you need custom sitemap entries, publish the configuration file 74 | 75 | ```bash 76 | php artisan vendor:publish --provider="Verschuur\Laravel\RobotsTxt\Providers\RobotsTxtProvider" 77 | ``` 78 | 79 | This will copy the `robots-txt.php` config file to your app's `config` folder. In this file you will find the following array structure 80 | 81 | ```php 82 | 'environments' => [ 83 | '{environment name}' => [ 84 | 'paths' => [ 85 | '{robot name}' => [ 86 | 'disallow' => [ 87 | '' 88 | ], 89 | 'allow' => [] 90 | ], 91 | ] 92 | ] 93 | ] 94 | ``` 95 | 96 | In which: 97 | 98 | - `{environment name}`: the enviroment for which to define the paths. 99 | - `{robot name}`: the robot for which to define the paths. 100 | - `disallow`: all entries which will be used by the `disallow` directive. 101 | - `allow`: all entries which will be used by the `allow` directive. 102 | 103 | By default, the environment name is set to `production` with a robot name of `*` and a disallow entry consisting of an empty string. This will allow all bots to access all paths on the production environment. 104 | 105 | **Note:** If you do not define any environments in this configuration file (i.e. an empty configuration), the default will always be to disallow all bots for all paths. 106 | 107 | ## Examples 108 | 109 | For brevity, the `environment` array key will be disregarded in these examples. 110 | 111 | Allow all paths for all robots on production, and disallow all paths for every robot in staging. 112 | 113 | ```php 114 | 'production' => [ 115 | 'paths' => [ 116 | '*' => [ 117 | 'disallow' => [ 118 | '' 119 | ] 120 | ] 121 | ] 122 | ], 123 | 'staging' => [ 124 | 'paths' => [ 125 | '*' => [ 126 | 'disallow' => [ 127 | '/' 128 | ] 129 | ] 130 | ] 131 | ] 132 | ``` 133 | 134 | Allow all paths for all robot _bender_ on production, but disallow `/admin` and `/images` on production for robot _flexo_ 135 | 136 | ```php 137 | 'production' => [ 138 | 'paths' => [ 139 | 'bender' => [ 140 | 'disallow' => [ 141 | '' 142 | ] 143 | ], 144 | 'flexo' => [ 145 | 'disallow' => [ 146 | '/admin', 147 | '/images' 148 | ] 149 | ] 150 | ] 151 | ], 152 | ``` 153 | 154 | ### Allow directive 155 | 156 | Besides the more standard `disallow` directive, the `allow` directive is also supported. 157 | 158 | Allow a path, but disallow sub paths: 159 | 160 | ```php 161 | 'production' => [ 162 | 'paths' => [ 163 | '*' => [ 164 | 'disallow' => [ 165 | '/foo/bar' 166 | ], 167 | 'allow' => [ 168 | '/foo' 169 | ] 170 | ] 171 | ] 172 | ], 173 | ``` 174 | 175 | When the file is rendered, the `disallow` directives will always be placed before the `allow` directives. 176 | 177 | If you don't need one or the other directive, and you wish to keep the configuration file clean, you can simply remove the entire key from the entire array. 178 | 179 | ## Sitemaps 180 | 181 | This package also allows to add sitemaps to the robots file. By default, the production environment will add a sitemap.xml entry to the file. You can remove this default entry from the `sitemaps` array if you don't need it. 182 | 183 | Because sitemaps always need to an absolute url, they are automatically wrapped using [Laravel's url() helper function](https://laravel.com/docs/7.x/helpers#method-url). The sitemap entries in the config file should be relative to the webroot. 184 | 185 | ### The standard production configuration 186 | 187 | ```php 188 | 'environments' => [ 189 | 'production' => [ 190 | 'sitemaps' => [ 191 | 'sitemap.xml' 192 | ] 193 | ] 194 | ] 195 | ``` 196 | 197 | ### Adding multiple sitemaps 198 | 199 | ```php 200 | 'environments' => [ 201 | 'production' => [ 202 | 'sitemaps' => [ 203 | 'sitemap-articles.xml', 204 | 'sitemap-products.xml', 205 | 'sitemap-etcetera.xml' 206 | ] 207 | ] 208 | ] 209 | ``` 210 | 211 | # Compatiblility 212 | 213 | This package is compatible with Laravel 9, 10, 11 and 12. For a complete overview of supported Laravel and PHP versions, please refer to the ['Run test' workflow](https://github.com/verschuur/laravel-robotstxt/actions). 214 | 215 | # Testing 216 | 217 | PHPUnit test cases are provided in `/tests`. Run the tests through `composer run test` or `vendor/bin/phpunit --configuration phpunit.xml`. 218 | 219 | # robots.txt reference 220 | 221 | The following reference was while creating this package: 222 | 223 | 224 | -------------------------------------------------------------------------------- /UPGRADE.md: -------------------------------------------------------------------------------- 1 |

robots.txt upgrade guide 🛠

2 | 3 | - [Upgrading from v1 to v2](#upgrading-from-v1-to-v2) 4 | - [Compatibilty](#compatibilty) 5 | - [Service provider registration](#service-provider-registration) 6 | - [Namespace](#namespace) 7 | - [Problems during installation](#problems-during-installation) 8 | - [Path structure](#path-structure) 9 | - [Example](#example) 10 | 11 | # Upgrading from v1 to v2 12 | 13 | ## Compatibilty 14 | 15 | Due to a lot of refactoring, this version is **not** compatible with v1. 16 | 17 | ## Service provider registration 18 | 19 | This package now supports Laravel's package auto-discovery. You can remove the line: 20 | 21 | `Gverschuur\RobotsTxt\RobotsTxtProvider::class` from your service providers in the `config/app.php` file. 22 | 23 | ## Namespace 24 | 25 | - If you included any files from the package yourself (by extending etc), change the namespace from `Gverschuur\RobotsTxt` to `Verschuur\RobotsTxt`. 26 | 27 | ## Problems during installation 28 | 29 | Due to the renaming of the namespace, there might be an error during installing due to conflicts. If this happens, first revote the provider from the app config file, then dump the autoloader and finally rerun the installation. 30 | 31 | ## Path structure 32 | 33 | This version has a change to how the paths are defined in the config file. The v1 version was as follows: 34 | 35 | _paths -> {environment name} -> {robot name} -> {disallowed entries}_ 36 | 37 | e.g.: _paths -> production -> *robot name* -> Disallow all_ 38 | 39 | This has been changed in v2. The order is now: 40 | 41 | _environments -> {environment name} -> paths -> {robot name} -> {disallowd/allow entries}_ 42 | 43 | e.g.: _environments -> production -> paths -> *{robot name}* -> Disallow all_ 44 | 45 | ### Example 46 | 47 | For example, let's say your configuration is as follows: 48 | 49 | ```php 50 | 'paths' => [ 51 | 'production' => [ 52 | 'bender' => [ 53 | '' 54 | ], 55 | 'flexo' => [ 56 | 'images' 57 | ] 58 | ], 59 | 'staging' => [ 60 | '*' => [ 61 | '/' 62 | ] 63 | ] 64 | ] 65 | ``` 66 | 67 | Then the new configuration would be: 68 | 69 | ```php 70 | 'environments' => [ 71 | 'production' => [ 72 | 'paths' => [ 73 | 'bender' => [ 74 | 'disallow' => [ 75 | '' 76 | ], 77 | 'allow' => [] 78 | ], 79 | 'flexo' => [ 80 | 'disallow' => [ 81 | 'images' 82 | ], 83 | 'allow' => [] 84 | ] 85 | ] 86 | ], 87 | 'staging' => [ 88 | 'paths' => [ 89 | '*' => [ 90 | 'disallow' => [ 91 | '/' 92 | ], 93 | ] 94 | ] 95 | ] 96 | ] 97 | ``` -------------------------------------------------------------------------------- /composer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "verschuur/laravel-robotstxt", 3 | "description": "Set the robots.txt content dynamically based on the Laravel app environment.", 4 | "keywords": [ 5 | "php", 6 | "laravel", 7 | "robots.txt" 8 | ], 9 | "authors": [ 10 | { 11 | "name": "Govert Verschuur", 12 | "email": "govert.verschuur@gmail.com" 13 | } 14 | ], 15 | "require": { 16 | "php": "^8.0", 17 | "laravel/framework": "^9.0|^10.0|^11.0|^12.0" 18 | }, 19 | "autoload": { 20 | "psr-4": { 21 | "Verschuur\\Laravel\\RobotsTxt\\": "src/" 22 | } 23 | }, 24 | "autoload-dev": { 25 | "psr-4": { 26 | "Verschuur\\Laravel\\RobotsTxt\\Tests\\": "tests/" 27 | } 28 | }, 29 | "license": "MIT", 30 | "require-dev": { 31 | "phpunit/phpunit": "^9.0|^10.0|^11.5", 32 | "orchestra/testbench": "^7.0|^8.0|^9.0|^10.0", 33 | "phpstan/phpstan": "^1.4" 34 | }, 35 | "scripts": { 36 | "test": "vendor/bin/phpunit --configuration phpunit.xml --colors=always", 37 | "analyse": "vendor/bin/phpstan analyse src tests --level 5" 38 | }, 39 | "extra": { 40 | "laravel": { 41 | "providers": [ 42 | "Verschuur\\Laravel\\RobotsTxt\\Providers\\RobotsTxtProvider" 43 | ] 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /config/robots-txt.php: -------------------------------------------------------------------------------- 1 | [ 4 | 'production' => [ 5 | 'paths' => [ 6 | '*' => [ 7 | 'disallow' => [ 8 | '' 9 | ], 10 | 'allow' => [] 11 | ], 12 | ], 13 | 'sitemaps' => [ 14 | 'sitemap.xml' 15 | ] 16 | ] 17 | ] 18 | ]; -------------------------------------------------------------------------------- /phpcs.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | Ignore required CamelCaps 4 | 5 | */tests/* 6 | 7 | -------------------------------------------------------------------------------- /phpstan.neon: -------------------------------------------------------------------------------- 1 | parameters: 2 | ignoreErrors: 3 | - '#Call to static method get\(\) on an unknown class Route\.#' -------------------------------------------------------------------------------- /phpunit.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | ./src 6 | 7 | 8 | ./src/routes.php 9 | 10 | 11 | 12 | 13 | ./tests 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /src/Controllers/RobotsTxtController.php: -------------------------------------------------------------------------------- 1 | build()); 20 | 21 | // output the entire robots.txt 22 | return response($robots, 200) 23 | ->header('Content-Type', 'text/plain; charset=UTF-8'); 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /src/Providers/RobotsTxtProvider.php: -------------------------------------------------------------------------------- 1 | publishes([ 18 | __DIR__.'/../../config/robots-txt.php' => config_path('robots-txt.php'), 19 | ]); 20 | } 21 | 22 | /** 23 | * Register the application services. 24 | * 25 | * @return void 26 | */ 27 | public function register() 28 | { 29 | include __DIR__ . '/../routes.php'; 30 | $this->app->make('Verschuur\Laravel\RobotsTxt\Controllers\RobotsTxtController'); 31 | 32 | $this->mergeConfigFrom(__DIR__.'/../../config/robots-txt.php', 'robots-txt'); 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/RobotsTxtManager.php: -------------------------------------------------------------------------------- 1 | currentEnvironment = config('app.env'); 38 | $this->definedPaths = config('robots-txt.environments.'.$this->currentEnvironment.'.paths'); 39 | $this->definedSitemaps = config('robots-txt.environments.'.$this->currentEnvironment.'.sitemaps'); 40 | } 41 | 42 | /** 43 | * Build the array containing all the entries for the txt file. 44 | * 45 | * @return array 46 | */ 47 | public function build(): array 48 | { 49 | $paths = ($this->definedPaths) ? $this->getPaths() : $this->defaultRobot(); 50 | $sitemaps = ($this->definedSitemaps) ? $this->getSitemaps() : []; 51 | 52 | return array_merge($paths, $sitemaps); 53 | } 54 | 55 | /** 56 | * Returns 'Disallow /' as the default for every robot 57 | * 58 | * @return array user agent and disallow string 59 | */ 60 | protected function defaultRobot(): array 61 | { 62 | return ['User-agent: *', 'Disallow: /']; 63 | } 64 | 65 | /** 66 | * Assemble all the defined paths from the config. 67 | * 68 | * Loop through all the defined paths, 69 | * creating an array which matches the order of the path entries in the txt file 70 | * 71 | * @return array 72 | */ 73 | protected function getPaths(): array 74 | { 75 | // For each user agent, get the user agent name and the paths for the agent, 76 | // adding them to the array 77 | $entries = []; 78 | 79 | foreach ($this->definedPaths as $agent => $paths) { 80 | $entries[] = 'User-agent: ' . $agent; 81 | 82 | $entries = \array_merge($entries, $this->parsePaths('disallow', $paths)); 83 | $entries = \array_merge($entries, $this->parsePaths('allow', $paths)); 84 | } 85 | 86 | return $entries; 87 | } 88 | 89 | /** 90 | * Parse defined paths into sitemap entries 91 | * 92 | * @param string $directive The directive name (disallow/allow) 93 | * @param array $paths Array of all the paths 94 | * @return array Array containing the sitemap entries 95 | */ 96 | protected function parsePaths(string $directive, array $paths): array 97 | { 98 | $entries = []; 99 | 100 | if (array_key_exists($directive, $paths)) { 101 | foreach ($paths[$directive] as $path) { 102 | $entries[] = sprintf('%s: %s', ucfirst($directive), $path); 103 | } 104 | } 105 | 106 | return $entries; 107 | } 108 | 109 | /** 110 | * Assemble all the defined sitemaps from the config. 111 | * 112 | * Loop through all the defined sitemaps, 113 | * creating an array which matches the order of the sitemap entries in the txt file 114 | * 115 | * @return array 116 | */ 117 | protected function getSitemaps(): array 118 | { 119 | $entries = []; 120 | 121 | foreach ($this->definedSitemaps as $sitemap) { 122 | // Sitemaps should always use a absolute url. 123 | // Combinding the sitemap paths with Laravel's url() function will do nicely. 124 | $entries[] = 'Sitemap: ' . url($sitemap); 125 | } 126 | 127 | return $entries; 128 | } 129 | } 130 | -------------------------------------------------------------------------------- /src/routes.php: -------------------------------------------------------------------------------- 1 | name('robots.txt'); 5 | -------------------------------------------------------------------------------- /tests/RobotsTxtTest.php: -------------------------------------------------------------------------------- 1 | withoutExceptionHandling(); 15 | } 16 | /** 17 | * Test that given an environment of 'production', it returns the default allow all 18 | */ 19 | public function test_has_default_response_for_production_env() 20 | { 21 | $this->app['config']->set('app.env', 'production'); 22 | 23 | $response = $this->get('/robots.txt'); 24 | 25 | $response->assertSeeTextInOrder([ 26 | 'User-agent: *', 27 | 'Disallow: ' 28 | ]); 29 | $response->assertDontSeeText('Disallow: /' . PHP_EOL); 30 | } 31 | 32 | /** 33 | * Test that given any other environment than 'production', it returns the default allow none 34 | */ 35 | public function test_has_default_response_for_non_production_env() 36 | { 37 | $this->app['config']->set('app.env', 'staging'); 38 | 39 | $response = $this->get('/robots.txt'); 40 | 41 | $response->assertSeeText('User-agent: *' . PHP_EOL . 'Disallow: /'); 42 | $response->assertDontSeeText('Disallow: ' . PHP_EOL); 43 | } 44 | 45 | /** 46 | * Test that custom paths will overwrite the defaults 47 | */ 48 | public function test_shows_custom_set_paths() 49 | { 50 | $paths = [ 51 | '*' => [ 52 | 'disallow' => [ 53 | '/foobar', 54 | ], 55 | 'allow' => [ 56 | '/fizzbuzz' 57 | ] 58 | ] 59 | ]; 60 | 61 | $this->setConfig($paths); 62 | 63 | $response = $this->get('/robots.txt'); 64 | 65 | $response->assertSeeTextInOrder([ 66 | 'User-agent: *'. PHP_EOL, 67 | 'Disallow: /foobar' . PHP_EOL, 68 | 'Allow: /fizzbuzz']); 69 | $response->assertDontSeeText('Disallow: ' . PHP_EOL); 70 | } 71 | 72 | /** 73 | * Test that given multiple user agents, it will return multiple user agent entries 74 | */ 75 | public function test_shows_multiple_user_agents() 76 | { 77 | $bots = [ 78 | 'bot1' => [], 79 | 'bot2' => [] 80 | ]; 81 | 82 | $this->setConfig($bots); 83 | 84 | $this->app['config']->set('app.env', 'production'); 85 | $this->app['config']->set('robots-txt.environments.production.paths', $bots); 86 | 87 | $response = $this->get('/robots.txt'); 88 | 89 | $response->assertSeeTextInOrder([ 90 | 'User-agent: bot1', 91 | 'User-agent: bot2' 92 | ]); 93 | $response->assertDontSeeText('Disallow: ' . PHP_EOL); 94 | $response->assertDontSeeText('Disallow: /' . PHP_EOL); 95 | } 96 | 97 | /** 98 | * Test that given multiple paths for a user agent, 99 | * it will return multiple path entries for a single user agent entry 100 | */ 101 | public function test_shows_multiple_paths_per_agent() 102 | { 103 | $paths = [ 104 | 'bender' => [ 105 | 'disallow' => [ 106 | '/foobar', 107 | '/barfoo', 108 | ], 109 | 'allow' => [ 110 | '/fizzbuzz', 111 | '/buzzfizz' 112 | ] 113 | ] 114 | ]; 115 | 116 | $this->setConfig($paths); 117 | 118 | $response = $this->get('/robots.txt'); 119 | 120 | $response->assertSeeTextInOrder([ 121 | 'User-agent: bender' . PHP_EOL , 122 | 'Disallow: /foobar' . PHP_EOL, 123 | 'Disallow: /barfoo' . PHP_EOL, 124 | 'Allow: /fizzbuzz' . PHP_EOL, 125 | 'Allow: /buzzfizz' . PHP_EOL, 126 | ]); 127 | $response->assertDontSeeText('Disallow: ' . PHP_EOL); 128 | $response->assertDontSeeText('Disallow: /' . PHP_EOL); 129 | } 130 | 131 | /** 132 | * Test that given multiple paths for multiple user agents, 133 | * it will return multiple path entries for multiple user agent entries 134 | */ 135 | public function test_shows_multiple_paths_for_multiple_agents() 136 | { 137 | $paths = [ 138 | 'bender' => [ 139 | 'disallow' => [ 140 | '/foobar', 141 | '/barfoo', 142 | ], 143 | 'allow' => [ 144 | '/fizzbuzz', 145 | '/buzzfizz' 146 | ] 147 | ], 148 | 'flexo' => [ 149 | 'disallow' => [ 150 | '/fizzbuzz', 151 | '/buzzfizz' 152 | ], 153 | 'allow' => [ 154 | '/foobar', 155 | '/barfoo', 156 | ] 157 | ] 158 | ]; 159 | 160 | $this->setConfig($paths); 161 | 162 | $response = $this->get('/robots.txt'); 163 | 164 | $response->assertSeeTextInOrder([ 165 | 'User-agent: bender' . PHP_EOL , 166 | 'Disallow: /foobar' . PHP_EOL, 167 | 'Disallow: /barfoo' . PHP_EOL, 168 | 'Allow: /fizzbuzz' . PHP_EOL, 169 | 'Allow: /buzzfizz' . PHP_EOL, 170 | 171 | 'User-agent: flexo' . PHP_EOL , 172 | 'Disallow: /fizzbuzz' . PHP_EOL, 173 | 'Disallow: /buzzfizz' . PHP_EOL, 174 | 'Allow: /foobar' . PHP_EOL, 175 | 'Allow: /barfoo' . PHP_EOL, 176 | ]); 177 | $response->assertDontSeeText('Disallow: ' . PHP_EOL); 178 | $response->assertDontSeeText('Disallow: /' . PHP_EOL); 179 | } 180 | 181 | /** 182 | * Test that given multiple environments, it returns the correct path for the given environment 183 | */ 184 | public function test_shows_correct_paths_for_multiple_environments() 185 | { 186 | $environments = [ 187 | 'production' => [ 188 | 'paths' => [ 189 | '*' => [ 190 | 'disallow' => [ 191 | '/foobar' 192 | ] 193 | ], 194 | ] 195 | ], 196 | 'staging' => [ 197 | 'paths' => [ 198 | '*' => [ 199 | 'allow' => [ 200 | '/barfoo' 201 | ] 202 | ], 203 | ] 204 | ] 205 | ]; 206 | 207 | 208 | $this->app['config']->set('robots-txt.environments', $environments); 209 | 210 | // Test env #1 211 | $this->app['config']->set('app.env', 'production'); 212 | 213 | $response = $this->get('/robots.txt'); 214 | 215 | $response->assertSeeTextInOrder([ 216 | 'User-agent: *' . PHP_EOL, 217 | 'Disallow: /foobar', 218 | ]); 219 | $response->assertDontSeeText('Allow: /barfoo' . PHP_EOL); 220 | $response->assertDontSeeText('Disallow: ' . PHP_EOL); 221 | $response->assertDontSeeText('Disallow: /' . PHP_EOL); 222 | 223 | // Test env #2 224 | $this->app['config']->set('app.env', 'staging'); 225 | 226 | $response = $this->get('/robots.txt'); 227 | 228 | $response->assertSeeTextInOrder([ 229 | 'User-agent: *' . PHP_EOL, 230 | 'Allow: /barfoo' 231 | ]); 232 | $response->assertDontSeeText('Disallow: /foobar' . PHP_EOL); 233 | $response->assertDontSeeText('Disallow: ' . PHP_EOL); 234 | $response->assertDontSeeText('Disallow: /' . PHP_EOL); 235 | } 236 | 237 | public function test_output_content_type_is_text_plain_utf_eight() 238 | { 239 | $response = $this->get('/robots.txt'); 240 | $response->assertHeader('Content-Type', 'text/plain; charset=UTF-8'); 241 | } 242 | 243 | public function test_shows_sitemaps() 244 | { 245 | $sitemaps = [ 246 | 'sitemap-foo.xml', 247 | 'sitemap-bar.xml', 248 | ]; 249 | 250 | $this->setConfig($sitemaps, 'sitemaps'); 251 | 252 | $response = $this->get('/robots.txt'); 253 | $response->assertSeeTextInOrder([ 254 | 'User-agent: *' . PHP_EOL, 255 | 'Disallow: ' . PHP_EOL, 256 | 'Sitemap: http://localhost/sitemap-foo.xml' . PHP_EOL, 257 | 'Sitemap: http://localhost/sitemap-bar.xml' 258 | ]); 259 | } 260 | 261 | protected function setConfig(array $data, string $section = 'paths', string $env = 'production') 262 | { 263 | $this->app['config']->set('app.env', $env); 264 | $this->app['config']->set('robots-txt.environments.production.' . $section, $data); 265 | } 266 | 267 | protected function getPackageProviders($app) 268 | { 269 | return [\Verschuur\Laravel\RobotsTxt\Providers\RobotsTxtProvider::class]; 270 | } 271 | } 272 | --------------------------------------------------------------------------------