├── tests ├── __init__.py ├── requirements.txt ├── urls.py └── settings.py ├── .isort.cfg ├── docs ├── history.rst ├── make.bat ├── Makefile ├── index.rst └── conf.py ├── src └── robots │ ├── locale │ ├── da │ │ └── LC_MESSAGES │ │ │ ├── django.mo │ │ │ └── django.po │ ├── de │ │ └── LC_MESSAGES │ │ │ ├── django.mo │ │ │ └── django.po │ ├── el │ │ └── LC_MESSAGES │ │ │ ├── django.mo │ │ │ └── django.po │ ├── en │ │ └── LC_MESSAGES │ │ │ ├── django.mo │ │ │ └── django.po │ ├── fr │ │ └── LC_MESSAGES │ │ │ ├── django.mo │ │ │ └── django.po │ ├── it │ │ └── LC_MESSAGES │ │ │ ├── django.mo │ │ │ └── django.po │ ├── ja │ │ └── LC_MESSAGES │ │ │ ├── django.mo │ │ │ └── django.po │ ├── nl │ │ └── LC_MESSAGES │ │ │ ├── django.mo │ │ │ └── django.po │ ├── ru │ │ └── LC_MESSAGES │ │ │ ├── django.mo │ │ │ └── django.po │ ├── sk │ │ └── LC_MESSAGES │ │ │ ├── django.mo │ │ │ └── django.po │ ├── uk │ │ └── LC_MESSAGES │ │ │ ├── django.mo │ │ │ └── django.po │ ├── es_ES │ │ └── LC_MESSAGES │ │ │ ├── django.mo │ │ │ └── django.po │ └── pt_BR │ │ └── LC_MESSAGES │ │ ├── django.mo │ │ └── django.po │ ├── urls.py │ ├── apps.py │ ├── __init__.py │ ├── forms.py │ ├── templates │ └── robots │ │ └── rule_list.html │ ├── migrations │ ├── 0002_alter_id_fields.py │ ├── __init__.py │ ├── 0003_alter_url_pattern.py │ └── 0001_initial.py │ ├── settings.py │ ├── admin.py │ ├── views.py │ ├── models.py │ └── tests.py ├── .gitignore ├── setup.cfg ├── .tx └── config ├── CONTRIBUTING.md ├── .coveragerc ├── .pre-commit-config.yaml ├── .github └── workflows │ ├── release.yml │ └── test.yml ├── tox.ini ├── LICENSE.txt ├── README.rst ├── setup.py ├── CODE_OF_CONDUCT.md └── CHANGES.rst /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/requirements.txt: -------------------------------------------------------------------------------- 1 | tox>=2.0 2 | coverage 3 | -------------------------------------------------------------------------------- /.isort.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | profile = black 3 | known_first_party = robots 4 | -------------------------------------------------------------------------------- /docs/history.rst: -------------------------------------------------------------------------------- 1 | .. _history: 2 | 3 | ======= 4 | History 5 | ======= 6 | 7 | .. include:: ../CHANGES.rst 8 | :start-line: 5 9 | -------------------------------------------------------------------------------- /src/robots/locale/da/LC_MESSAGES/django.mo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/django-robots/master/src/robots/locale/da/LC_MESSAGES/django.mo -------------------------------------------------------------------------------- /src/robots/locale/de/LC_MESSAGES/django.mo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/django-robots/master/src/robots/locale/de/LC_MESSAGES/django.mo -------------------------------------------------------------------------------- /src/robots/locale/el/LC_MESSAGES/django.mo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/django-robots/master/src/robots/locale/el/LC_MESSAGES/django.mo -------------------------------------------------------------------------------- /src/robots/locale/en/LC_MESSAGES/django.mo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/django-robots/master/src/robots/locale/en/LC_MESSAGES/django.mo -------------------------------------------------------------------------------- /src/robots/locale/fr/LC_MESSAGES/django.mo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/django-robots/master/src/robots/locale/fr/LC_MESSAGES/django.mo -------------------------------------------------------------------------------- /src/robots/locale/it/LC_MESSAGES/django.mo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/django-robots/master/src/robots/locale/it/LC_MESSAGES/django.mo -------------------------------------------------------------------------------- /src/robots/locale/ja/LC_MESSAGES/django.mo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/django-robots/master/src/robots/locale/ja/LC_MESSAGES/django.mo -------------------------------------------------------------------------------- /src/robots/locale/nl/LC_MESSAGES/django.mo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/django-robots/master/src/robots/locale/nl/LC_MESSAGES/django.mo -------------------------------------------------------------------------------- /src/robots/locale/ru/LC_MESSAGES/django.mo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/django-robots/master/src/robots/locale/ru/LC_MESSAGES/django.mo -------------------------------------------------------------------------------- /src/robots/locale/sk/LC_MESSAGES/django.mo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/django-robots/master/src/robots/locale/sk/LC_MESSAGES/django.mo -------------------------------------------------------------------------------- /src/robots/locale/uk/LC_MESSAGES/django.mo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/django-robots/master/src/robots/locale/uk/LC_MESSAGES/django.mo -------------------------------------------------------------------------------- /src/robots/locale/es_ES/LC_MESSAGES/django.mo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/django-robots/master/src/robots/locale/es_ES/LC_MESSAGES/django.mo -------------------------------------------------------------------------------- /src/robots/locale/pt_BR/LC_MESSAGES/django.mo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/django-robots/master/src/robots/locale/pt_BR/LC_MESSAGES/django.mo -------------------------------------------------------------------------------- /src/robots/urls.py: -------------------------------------------------------------------------------- 1 | from django.urls import path 2 | 3 | from robots.views import rules_list 4 | 5 | urlpatterns = [ 6 | path("", rules_list, name="robots_rule_list"), 7 | ] 8 | -------------------------------------------------------------------------------- /src/robots/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class RobotsConfig(AppConfig): 5 | default_auto_field = "django.db.models.BigAutoField" 6 | name = "robots" 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.egg-info 2 | docs/_build/ 3 | *py[co] 4 | __pycache__ 5 | /build 6 | /dist 7 | .tox 8 | .coverage 9 | coverage.xml 10 | .DS_Store 11 | 12 | env/ 13 | .vscode/ 14 | .eggs/ 15 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | exclude = *.egg-info,.git,.settings,.tox,build,dist,docs,requirements,tmp,*migrations*,*south_migrations*,tests,data 3 | max-line-length = 99 4 | 5 | [metadata] 6 | license-file = LICENSE.txt 7 | -------------------------------------------------------------------------------- /.tx/config: -------------------------------------------------------------------------------- 1 | [main] 2 | host = https://www.transifex.com 3 | 4 | [django-robots.main] 5 | file_filter = src/robots/locale//LC_MESSAGES/django.po 6 | source_file = src/robots/locale/en/LC_MESSAGES/django.po 7 | source_lang = en 8 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | [![Jazzband](https://jazzband.co/static/img/jazzband.svg)](https://jazzband.co/) 2 | 3 | This is a [Jazzband](https://jazzband.co/) project. By contributing you agree to abide by the [Contributor Code of Condut](https://jazzband.co/docs/conduct) and follow the [guidelines](https://jazzband.co/docs/guidelines). 4 | -------------------------------------------------------------------------------- /src/robots/__init__.py: -------------------------------------------------------------------------------- 1 | try: 2 | from importlib.metadata import version 3 | 4 | __version__ = version("django-robots") 5 | except ImportError: 6 | from pkg_resources import get_distribution 7 | 8 | __version__ = get_distribution("django-robots").version 9 | 10 | # needed for Django<3.2 11 | default_app_config = "robots.apps.RobotsConfig" 12 | -------------------------------------------------------------------------------- /src/robots/forms.py: -------------------------------------------------------------------------------- 1 | from django import forms 2 | from django.utils.translation import gettext_lazy as _ 3 | 4 | from robots.models import Rule 5 | 6 | 7 | class RuleAdminForm(forms.ModelForm): 8 | class Meta: 9 | model = Rule 10 | fields = "__all__" 11 | 12 | def clean(self): 13 | if not self.cleaned_data.get("disallowed", False) and not self.cleaned_data.get( 14 | "allowed", False 15 | ): 16 | raise forms.ValidationError( 17 | _("Please specify at least one allowed or disallowed URL.") 18 | ) 19 | return self.cleaned_data 20 | -------------------------------------------------------------------------------- /src/robots/templates/robots/rule_list.html: -------------------------------------------------------------------------------- 1 | {% load l10n %}{% if rules %}{% for rule in rules %}{% ifchanged rule.robot %}{% if not forloop.first %} 2 | {% endif %}User-agent: {{ rule.robot }}{% endifchanged %} 3 | {% for url in rule.allowed.all %}Allow: {{ url.pattern|safe }} 4 | {% endfor %}{% for url in rule.disallowed.all %}Disallow: {{ url.pattern|safe }} 5 | {% endfor %}{% if rule.crawl_delay %}Crawl-delay: {% localize off %}{{ rule.crawl_delay|floatformat:'0' }}{% endlocalize %} 6 | {% endif %}{% endfor %}{% else %}User-agent: * 7 | Disallow: 8 | {% endif %} 9 | {% if host %}Host: {{ host }}{% endif %} 10 | {% for sitemap_url in sitemap_urls %}Sitemap: {{ sitemap_url }} 11 | {% endfor %} 12 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | source = robots 4 | 5 | [report] 6 | omit = ../*migrations*,../*tests*,../*compat.* 7 | # Regexes for lines to exclude from consideration 8 | exclude_lines = 9 | # Have to re-enable the standard pragma 10 | pragma: no cover 11 | 12 | # Don't complain about missing debug-only code: 13 | def __repr__ 14 | if self\.debug 15 | 16 | # Don't complain if tests don't hit defensive assertion code: 17 | raise AssertionError 18 | raise NotImplementedError 19 | 20 | # Don't complain if non-runnable code isn't run: 21 | if 0: 22 | if __name__ == .__main__.: 23 | 24 | ignore_errors = True 25 | 26 | [html] 27 | directory = coverage_html 28 | -------------------------------------------------------------------------------- /src/robots/migrations/0002_alter_id_fields.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 3.2 on 2021-09-23 12:14 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("robots", "0001_initial"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="rule", 14 | name="id", 15 | field=models.BigAutoField( 16 | auto_created=True, primary_key=True, serialize=False, verbose_name="ID" 17 | ), 18 | ), 19 | migrations.AlterField( 20 | model_name="url", 21 | name="id", 22 | field=models.BigAutoField( 23 | auto_created=True, primary_key=True, serialize=False, verbose_name="ID" 24 | ), 25 | ), 26 | ] 27 | -------------------------------------------------------------------------------- /src/robots/settings.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | 4 | class Settings: 5 | defaults = { 6 | #: A list of one or more sitemaps to inform robots about: 7 | "SITEMAP_URLS": ("ROBOTS_SITEMAP_URLS", []), 8 | "USE_SITEMAP": ("ROBOTS_USE_SITEMAP", True), 9 | "USE_HOST": ("ROBOTS_USE_HOST", True), 10 | "CACHE_TIMEOUT": ("ROBOTS_CACHE_TIMEOUT", None), 11 | "SITE_BY_REQUEST": ("ROBOTS_SITE_BY_REQUEST", False), 12 | "USE_SCHEME_IN_HOST": ("ROBOTS_USE_SCHEME_IN_HOST", False), 13 | "SITEMAP_VIEW_NAME": ("ROBOTS_SITEMAP_VIEW_NAME", False), 14 | } 15 | 16 | def __getattr__(self, attribute): 17 | from django.conf import settings 18 | 19 | if attribute in self.defaults: 20 | return getattr(settings, *self.defaults[attribute]) 21 | 22 | 23 | sys.modules[__name__] = Settings() 24 | -------------------------------------------------------------------------------- /src/robots/migrations/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Django migrations for robots app 3 | 4 | This package does not contain South migrations. South migrations can be found 5 | in the ``south_migrations`` package. 6 | """ 7 | 8 | # This check is based on code from django-email-log. Thanks Trey Hunner. 9 | # https://github.com/treyhunner/django-email-log/blob/master/email_log/migrations/__init__.py 10 | 11 | SOUTH_ERROR_MESSAGE = """\n 12 | For South support, customize the SOUTH_MIGRATION_MODULES setting like so: 13 | 14 | SOUTH_MIGRATION_MODULES = { 15 | 'robots': 'robots.south_migrations', 16 | } 17 | """ 18 | 19 | # Ensure the user is not using Django 1.6 or below with South 20 | try: 21 | from django.db import migrations # noqa 22 | except ImportError: 23 | from django.core.exceptions import ImproperlyConfigured 24 | 25 | raise ImproperlyConfigured(SOUTH_ERROR_MESSAGE) 26 | -------------------------------------------------------------------------------- /src/robots/admin.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | from django.utils.translation import gettext_lazy as _ 3 | 4 | from robots.forms import RuleAdminForm 5 | from robots.models import Rule, Url 6 | 7 | 8 | class RuleAdmin(admin.ModelAdmin): 9 | form = RuleAdminForm 10 | fieldsets = ( 11 | (None, {"fields": ("robot", "sites")}), 12 | (_("URL patterns"), {"fields": ("allowed", "disallowed")}), 13 | ( 14 | _("Advanced options"), 15 | {"classes": ("collapse",), "fields": ("crawl_delay",)}, 16 | ), 17 | ) 18 | list_filter = ("sites",) 19 | list_display = ("robot", "allowed_urls", "disallowed_urls") 20 | search_fields = ("robot", "allowed__pattern", "disallowed__pattern") 21 | filter_horizontal = ("sites", "allowed", "disallowed") 22 | 23 | 24 | admin.site.register(Url) 25 | admin.site.register(Rule, RuleAdmin) 26 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | # vim: set nospell: 4 | repos: 5 | - repo: https://github.com/pre-commit/pre-commit-hooks 6 | rev: v4.5.0 7 | hooks: 8 | - id: trailing-whitespace 9 | args: [--markdown-linebreak-ext=md] 10 | - id: end-of-file-fixer 11 | - id: check-toml 12 | - id: check-added-large-files 13 | - id: debug-statements 14 | - repo: https://github.com/pycqa/isort 15 | rev: "5.12.0" 16 | hooks: 17 | - id: isort 18 | - repo: https://github.com/PyCQA/flake8 19 | rev: "6.1.0" 20 | hooks: 21 | - id: flake8 22 | additional_dependencies: 23 | - flake8-comprehensions 24 | - flake8-bugbear 25 | exclude: docs\/.* 26 | - repo: https://github.com/psf/black 27 | rev: "23.11.0" 28 | hooks: 29 | - id: black 30 | -------------------------------------------------------------------------------- /tests/urls.py: -------------------------------------------------------------------------------- 1 | import django.contrib.sitemaps.views 2 | import django.views.i18n 3 | import django.views.static 4 | from django.conf import settings 5 | from django.contrib import admin 6 | from django.contrib.sitemaps.views import sitemap as sitemap_view 7 | from django.urls import include 8 | from django.urls import re_path as url 9 | from django.views.decorators.cache import cache_page 10 | 11 | urlpatterns = [ 12 | url( 13 | r"^media/(?P.*)$", 14 | django.views.static.serve, # NOQA 15 | {"document_root": settings.MEDIA_ROOT, "show_indexes": True}, 16 | ), 17 | url(r"^admin/", admin.site.urls), # NOQA 18 | url(r"^/", include("robots.urls")), # NOQA 19 | url(r"^sitemap.xml$", sitemap_view, {"sitemaps": []}), 20 | url( 21 | r"^other/sitemap.xml$", 22 | cache_page(60)(sitemap_view), 23 | {"sitemaps": []}, 24 | name="cached-sitemap", 25 | ), 26 | ] 27 | -------------------------------------------------------------------------------- /src/robots/migrations/0003_alter_url_pattern.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.0.6 on 2022-08-19 08:56 2 | 3 | import django.core.validators 4 | from django.db import migrations, models 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("robots", "0002_alter_id_fields"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AlterField( 14 | model_name="url", 15 | name="pattern", 16 | field=models.CharField( 17 | help_text="Case-sensitive. A missing trailing slash does also match to files which start with the name of the pattern, e.g., '/admin' matches /admin.html too. Some major search engines allow an asterisk (*) as a wildcard and a dollar sign ($) to match the end of the URL, e.g., '/*.jpg$'.", 18 | max_length=255, 19 | validators=[django.core.validators.MaxLengthValidator(254)], 20 | verbose_name="pattern", 21 | ), 22 | ), 23 | ] 24 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - '*' 7 | 8 | jobs: 9 | build: 10 | if: github.repository == 'jazzband/django-robots' 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - uses: actions/checkout@v2 15 | with: 16 | fetch-depth: 0 17 | 18 | - name: Set up Python 19 | uses: actions/setup-python@v2 20 | with: 21 | python-version: 3.8 22 | 23 | - name: Install dependencies 24 | run: | 25 | python -m pip install -U pip 26 | python -m pip install -U setuptools twine wheel 27 | 28 | - name: Build package 29 | run: | 30 | python setup.py --version 31 | python setup.py sdist --format=gztar bdist_wheel 32 | twine check dist/* 33 | 34 | - name: Upload packages to Jazzband 35 | if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') 36 | uses: pypa/gh-action-pypi-publish@master 37 | with: 38 | user: jazzband 39 | password: ${{ secrets.JAZZBAND_RELEASE_KEY }} 40 | repository_url: https://jazzband.co/projects/django-robots/upload 41 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | strategy: 9 | fail-fast: false 10 | max-parallel: 5 11 | matrix: 12 | python-version: ['3.7', '3.8', '3.9', '3.10', '3.11'] 13 | 14 | steps: 15 | - uses: actions/checkout@v2 16 | 17 | - name: Set up Python ${{ matrix.python-version }} 18 | uses: actions/setup-python@v2 19 | with: 20 | python-version: ${{ matrix.python-version }} 21 | 22 | - name: Get pip cache dir 23 | id: pip-cache 24 | run: | 25 | echo "::set-output name=dir::$(pip cache dir)" 26 | 27 | - name: Cache 28 | uses: actions/cache@v2 29 | with: 30 | path: ${{ steps.pip-cache.outputs.dir }} 31 | key: 32 | ${{ matrix.python-version }}-v1-${{ hashFiles('**/setup.py') }}-${{ hashFiles('**/tox.ini') }} 33 | restore-keys: | 34 | ${{ matrix.python-version }}-v1- 35 | 36 | - name: Install dependencies 37 | run: | 38 | python -m pip install --upgrade pip 39 | python -m pip install --upgrade tox tox-gh-actions 40 | 41 | - name: Tox tests 42 | run: | 43 | tox -v 44 | 45 | - name: Upload coverage 46 | uses: codecov/codecov-action@v1 47 | with: 48 | name: Python ${{ matrix.python-version }} 49 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = 3 | # list of supported Django/Python versions: 4 | # https://docs.djangoproject.com/en/dev/faq/install/#what-python-version-can-i-use-with-django 5 | py{36,37,38,39,310}-dj{22,31,32} 6 | py{38,39,310}-dj{40,41} 7 | py{38,39,310,311}-dj42 8 | py{310,311,312}-djmain 9 | py38-{lint,docs} 10 | 11 | [gh-actions] 12 | python = 13 | 3.7: py37 14 | 3.8: py38 15 | 3.9: py39 16 | 3.10: py310 17 | 3.11: py311 18 | 3.12: py312 19 | 20 | [testenv] 21 | usedevelop = true 22 | setenv = 23 | DJANGO_SETTINGS_MODULE = tests.settings 24 | PYTHONPATH = . 25 | commands = 26 | {envbindir}/coverage run {envbindir}/django-admin test robots -v2 27 | {envbindir}/coverage report -m 28 | {envbindir}/coverage xml 29 | deps = 30 | -r{toxinidir}/tests/requirements.txt 31 | dj22: django>=2.2,<2.3 32 | dj31: django>=3.1,<3.2 33 | dj32: django>=3.2,<3.3 34 | dj40: django>=4.0,<4.1 35 | dj41: django>=4.1,<4.2 36 | dj42: django>=4.2,<4.3 37 | djmain: https://github.com/django/django/archive/main.tar.gz 38 | 39 | [testenv:py38-lint] 40 | basepython = python3.8 41 | skip_install = true 42 | deps = 43 | isort 44 | flake8 45 | flake8-black 46 | twine 47 | commands = 48 | isort -c -diff src tests 49 | flake8 src tests 50 | python setup.py sdist 51 | twine check dist/* 52 | 53 | [testenv:py38-docs] 54 | basepython = python3.8 55 | deps = 56 | Sphinx 57 | sphinx_rtd_theme 58 | commands = sphinx-build -b html -d {envtmpdir}/doctrees docs {envtmpdir}/html 59 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2008-, Jannis Leidel 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are 6 | met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | * Redistributions in binary form must reproduce the above 11 | copyright notice, this list of conditions and the following 12 | disclaimer in the documentation and/or other materials provided 13 | with the distribution. 14 | * Neither the name of the author nor the names of other 15 | contributors may be used to endorse or promote products derived 16 | from this software without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 19 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 20 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 21 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 22 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 23 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 24 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 25 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 26 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 27 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | -------------------------------------------------------------------------------- /tests/settings.py: -------------------------------------------------------------------------------- 1 | INSTALLED_APPS = [ 2 | "django.contrib.admin", 3 | "django.contrib.auth", 4 | "django.contrib.contenttypes", 5 | "django.contrib.sites", 6 | "django.contrib.messages", 7 | "robots", 8 | ] 9 | 10 | DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3"}} 11 | 12 | ROOT_URLCONF = "tests.urls" 13 | 14 | SITE_ID = 1 15 | 16 | MIDDLEWARE_CLASSES = [ 17 | "django.middleware.http.ConditionalGetMiddleware", 18 | "django.contrib.sessions.middleware.SessionMiddleware", 19 | "django.contrib.auth.middleware.AuthenticationMiddleware", 20 | "django.contrib.messages.middleware.MessageMiddleware", 21 | "django.middleware.csrf.CsrfViewMiddleware", 22 | "django.middleware.locale.LocaleMiddleware", 23 | "django.middleware.common.CommonMiddleware", 24 | ] 25 | 26 | MIDDLEWARE = [ 27 | "django.middleware.http.ConditionalGetMiddleware", 28 | "django.contrib.sessions.middleware.SessionMiddleware", 29 | "django.contrib.auth.middleware.AuthenticationMiddleware", 30 | "django.contrib.messages.middleware.MessageMiddleware", 31 | "django.middleware.csrf.CsrfViewMiddleware", 32 | "django.middleware.locale.LocaleMiddleware", 33 | "django.middleware.common.CommonMiddleware", 34 | ] 35 | 36 | TEMPLATES = [ 37 | { 38 | "BACKEND": "django.template.backends.django.DjangoTemplates", 39 | "APP_DIRS": True, 40 | "OPTIONS": { 41 | "context_processors": [ 42 | "django.contrib.auth.context_processors.auth", 43 | "django.contrib.messages.context_processors.messages", 44 | ], 45 | }, 46 | } 47 | ] 48 | 49 | SECRET_KEY = "e07910a06a086c83ba41827aa00b26ed" 50 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ============= 2 | Django Robots 3 | ============= 4 | 5 | .. image:: https://jazzband.co/static/img/badge.svg 6 | :target: https://jazzband.co/ 7 | :alt: Jazzband 8 | .. image:: https://github.com/jazzband/django-robots/workflows/Test/badge.svg 9 | :target: https://github.com/jazzband/django-robots/actions 10 | .. image:: https://codecov.io/gh/jazzband/django-robots/branch/master/graph/badge.svg 11 | :target: https://codecov.io/gh/jazzband/django-robots 12 | .. image:: https://img.shields.io/pypi/v/django-robots.svg 13 | :target: https://pypi.python.org/pypi/django-robots 14 | 15 | This is a basic Django application to manage robots.txt files following the 16 | `robots exclusion protocol`_, complementing the Django_ `Sitemap contrib app`_. 17 | 18 | For installation instructions, see the documentation `install section`_; 19 | for instructions on how to use this application, and on 20 | what it provides, see the file "overview.txt" in the "docs/" 21 | directory or on ReadTheDocs: https://django-robots.readthedocs.io/ 22 | 23 | Supported Django versions 24 | ------------------------- 25 | 26 | * Django 4.0 27 | * Django 3.2 28 | * Django 3.1 29 | * Django 2.2 30 | 31 | For older Django versions (1.6-1.10) use ``django-robots==3.0``. 32 | for Django 2 and above, use ``django-robots>=4.0.0``. 33 | 34 | Supported Python version 35 | ------------------------ 36 | 37 | * Python 3.7, 3.8, 3.9, 3.10 38 | 39 | .. _install section: https://django-robots.readthedocs.io/en/latest/#installation 40 | .. _robots exclusion protocol: http://en.wikipedia.org/wiki/Robots_exclusion_standard 41 | .. _Django: http://www.djangoproject.com/ 42 | .. _Sitemap contrib app: http://docs.djangoproject.com/en/dev/ref/contrib/sitemaps/ 43 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from os import path 2 | 3 | from setuptools import find_packages, setup 4 | 5 | 6 | def read(*parts): 7 | return open(path.join(path.dirname(__file__), *parts)).read() 8 | 9 | 10 | setup( 11 | name="django-robots", 12 | use_scm_version=True, 13 | setup_requires=["setuptools_scm"], 14 | long_description=read("README.rst"), 15 | long_description_content_type="text/x-rst", 16 | description="Robots exclusion application for Django, complementing Sitemaps.", 17 | author="Jannis Leidel", 18 | author_email="jannis@leidel.info", 19 | python_requires=">=3.7", 20 | url="https://github.com/jazzband/django-robots/", 21 | packages=find_packages("src"), 22 | package_dir={"": "src"}, 23 | include_package_data=True, 24 | zip_safe=False, 25 | package_data={ 26 | "robots": [ 27 | "locale/*/LC_MESSAGES/*", 28 | "templates/robots/*.html", 29 | ], 30 | }, 31 | classifiers=[ 32 | "Development Status :: 5 - Production/Stable", 33 | "Environment :: Web Environment", 34 | "Framework :: Django", 35 | "Intended Audience :: Developers", 36 | "License :: OSI Approved :: BSD License", 37 | "Operating System :: OS Independent", 38 | "Programming Language :: Python", 39 | "Topic :: Internet :: WWW/HTTP :: Dynamic Content", 40 | "Topic :: Software Development", 41 | "Topic :: Software Development :: Libraries :: Application Frameworks", 42 | "Programming Language :: Python :: 3.7", 43 | "Programming Language :: Python :: 3.8", 44 | "Programming Language :: Python :: 3.9", 45 | "Programming Language :: Python :: 3.10", 46 | "Programming Language :: Python :: 3.11", 47 | "Framework :: Django", 48 | "Framework :: Django :: 2.2", 49 | "Framework :: Django :: 3.1", 50 | "Framework :: Django :: 3.2", 51 | "Framework :: Django :: 4.0", 52 | "Framework :: Django :: 4.1", 53 | "Framework :: Django :: 4.2", 54 | ], 55 | ) 56 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | As contributors and maintainers of the Jazzband projects, and in the interest of 4 | fostering an open and welcoming community, we pledge to respect all people who 5 | contribute through reporting issues, posting feature requests, updating documentation, 6 | submitting pull requests or patches, and other activities. 7 | 8 | We are committed to making participation in the Jazzband a harassment-free experience 9 | for everyone, regardless of the level of experience, gender, gender identity and 10 | expression, sexual orientation, disability, personal appearance, body size, race, 11 | ethnicity, age, religion, or nationality. 12 | 13 | Examples of unacceptable behavior by participants include: 14 | 15 | - The use of sexualized language or imagery 16 | - Personal attacks 17 | - Trolling or insulting/derogatory comments 18 | - Public or private harassment 19 | - Publishing other's private information, such as physical or electronic addresses, 20 | without explicit permission 21 | - Other unethical or unprofessional conduct 22 | 23 | The Jazzband roadies have the right and responsibility to remove, edit, or reject 24 | comments, commits, code, wiki edits, issues, and other contributions that are not 25 | aligned to this Code of Conduct, or to ban temporarily or permanently any contributor 26 | for other behaviors that they deem inappropriate, threatening, offensive, or harmful. 27 | 28 | By adopting this Code of Conduct, the roadies commit themselves to fairly and 29 | consistently applying these principles to every aspect of managing the jazzband 30 | projects. Roadies who do not follow or enforce the Code of Conduct may be permanently 31 | removed from the Jazzband roadies. 32 | 33 | This code of conduct applies both within project spaces and in public spaces when an 34 | individual is representing the project or its community. 35 | 36 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by 37 | contacting the roadies at `roadies@jazzband.co`. All complaints will be reviewed and 38 | investigated and will result in a response that is deemed necessary and appropriate to 39 | the circumstances. Roadies are obligated to maintain confidentiality with regard to the 40 | reporter of an incident. 41 | 42 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 43 | 1.3.0, available at [https://contributor-covenant.org/version/1/3/0/][version] 44 | 45 | [homepage]: https://contributor-covenant.org 46 | [version]: https://contributor-covenant.org/version/1/3/0/ 47 | -------------------------------------------------------------------------------- /src/robots/locale/en/LC_MESSAGES/django.po: -------------------------------------------------------------------------------- 1 | # SOME DESCRIPTIVE TITLE. 2 | # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER 3 | # This file is distributed under the same license as the PACKAGE package. 4 | # FIRST AUTHOR , YEAR. 5 | # 6 | #, fuzzy 7 | msgid "" 8 | msgstr "" 9 | "Project-Id-Version: PACKAGE VERSION\n" 10 | "Report-Msgid-Bugs-To: \n" 11 | "POT-Creation-Date: 2016-02-06 19:16+0100\n" 12 | "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" 13 | "Last-Translator: FULL NAME \n" 14 | "Language-Team: LANGUAGE \n" 15 | "Language: \n" 16 | "MIME-Version: 1.0\n" 17 | "Content-Type: text/plain; charset=UTF-8\n" 18 | "Content-Transfer-Encoding: 8bit\n" 19 | 20 | #: admin.py:12 21 | msgid "URL patterns" 22 | msgstr "" 23 | 24 | #: admin.py:15 25 | msgid "Advanced options" 26 | msgstr "" 27 | 28 | #: forms.py:16 29 | msgid "Please specify at least one allowed or dissallowed URL." 30 | msgstr "" 31 | 32 | #: models.py:15 33 | msgid "pattern" 34 | msgstr "" 35 | 36 | #: models.py:16 37 | msgid "" 38 | "Case-sensitive. A missing trailing slash does also match to files which " 39 | "start with the name of the pattern, e.g., '/admin' matches /admin.html too. " 40 | "Some major search engines allow an asterisk (*) as a wildcard and a dollar " 41 | "sign ($) to match the end of the URL, e.g., '/*.jpg$'." 42 | msgstr "" 43 | 44 | #: models.py:24 models.py:25 45 | msgid "url" 46 | msgstr "" 47 | 48 | #: models.py:44 49 | msgid "robot" 50 | msgstr "" 51 | 52 | #: models.py:45 53 | msgid "" 54 | "This should be a user agent string like 'Googlebot'. Enter an asterisk (*) " 55 | "for all user agents. For a full list look at the database of Web Robots." 57 | msgstr "" 58 | 59 | #: models.py:53 models.py:91 60 | msgid "allowed" 61 | msgstr "" 62 | 63 | #: models.py:54 64 | msgid "The URLs which are allowed to be accessed by bots." 65 | msgstr "" 66 | 67 | #: models.py:58 models.py:95 68 | msgid "disallowed" 69 | msgstr "" 70 | 71 | #: models.py:59 72 | msgid "The URLs which are not allowed to be accessed by bots." 73 | msgstr "" 74 | 75 | #: models.py:62 76 | msgid "sites" 77 | msgstr "" 78 | 79 | #: models.py:64 80 | msgid "crawl delay" 81 | msgstr "" 82 | 83 | #: models.py:66 84 | msgid "" 85 | "Between 0.1 and 99.0. This field is supported by some search engines and " 86 | "defines the delay between successive crawler accesses in seconds. If the " 87 | "crawler rate is a problem for your server, you can set the delay up to 5 or " 88 | "10 or a comfortable value for your server, but it's suggested to start with " 89 | "small values (0.5-1), and increase as needed to an acceptable value for your " 90 | "server. Larger delay values add more delay between successive crawl accesses " 91 | "and decrease the maximum crawl rate to your web server." 92 | msgstr "" 93 | 94 | #: models.py:83 95 | msgid "rule" 96 | msgstr "" 97 | 98 | #: models.py:84 99 | msgid "rules" 100 | msgstr "" 101 | 102 | #: models.py:90 models.py:94 103 | msgid "and" 104 | msgstr "" 105 | -------------------------------------------------------------------------------- /src/robots/locale/ja/LC_MESSAGES/django.po: -------------------------------------------------------------------------------- 1 | # SOME DESCRIPTIVE TITLE. 2 | # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER 3 | # This file is distributed under the same license as the PACKAGE package. 4 | # 5 | # Translators: 6 | # Jonas Obrist , 2011 7 | msgid "" 8 | msgstr "" 9 | "Project-Id-Version: django-robots\n" 10 | "Report-Msgid-Bugs-To: \n" 11 | "POT-Creation-Date: 2011-02-08 12:09+0100\n" 12 | "PO-Revision-Date: 2013-11-20 09:28+0000\n" 13 | "Last-Translator: Jannis Leidel \n" 14 | "Language-Team: Japanese (http://www.transifex.com/projects/p/django-robots/language/ja/)\n" 15 | "MIME-Version: 1.0\n" 16 | "Content-Type: text/plain; charset=UTF-8\n" 17 | "Content-Transfer-Encoding: 8bit\n" 18 | "Language: ja\n" 19 | "Plural-Forms: nplurals=1; plural=0;\n" 20 | 21 | #: admin.py:11 22 | msgid "URL patterns" 23 | msgstr "URLパターン" 24 | 25 | #: admin.py:12 26 | msgid "Advanced options" 27 | msgstr "高度なオプション" 28 | 29 | #: forms.py:12 30 | msgid "Please specify at least one allowed or dissallowed URL." 31 | msgstr "少なくとも一つURLを入力して下さい" 32 | 33 | #: models.py:11 34 | msgid "pattern" 35 | msgstr "パターン" 36 | 37 | #: models.py:12 38 | msgid "" 39 | "Case-sensitive. A missing trailing slash does also match to files which " 40 | "start with the name of the pattern, e.g., '/admin' matches /admin.html too. " 41 | "Some major search engines allow an asterisk (*) as a wildcard and a dollar " 42 | "sign ($) to match the end of the URL, e.g., '/*.jpg$'." 43 | msgstr "" 44 | 45 | #: models.py:19 models.py:20 46 | msgid "url" 47 | msgstr "URL" 48 | 49 | #: models.py:37 50 | msgid "robot" 51 | msgstr "ロボット" 52 | 53 | #: models.py:38 54 | msgid "" 55 | "This should be a user agent string like 'Googlebot'. Enter an asterisk (*) " 56 | "for all user agents. For a full list look at the database of Web Robots." 58 | msgstr "" 59 | 60 | #: models.py:46 models.py:83 61 | msgid "allowed" 62 | msgstr "公許を受けた" 63 | 64 | #: models.py:47 65 | msgid "The URLs which are allowed to be accessed by bots." 66 | msgstr "公許を受けたURL" 67 | 68 | #: models.py:51 models.py:87 69 | msgid "disallowed" 70 | msgstr "禁じられた" 71 | 72 | #: models.py:52 73 | msgid "The URLs which are not allowed to be accessed by bots." 74 | msgstr "ボットが" 75 | 76 | #: models.py:55 77 | msgid "sites" 78 | msgstr "" 79 | 80 | #: models.py:57 81 | msgid "crawl delay" 82 | msgstr "遅延クロール" 83 | 84 | #: models.py:59 85 | msgid "" 86 | "Between 0.1 and 99.0. This field is supported by some search engines and " 87 | "defines the delay between successive crawler accesses in seconds. If the " 88 | "crawler rate is a problem for your server, you can set the delay up to 5 or " 89 | "10 or a comfortable value for your server, but it's suggested to start with " 90 | "small values (0.5-1), and increase as needed to an acceptable value for your" 91 | " server. Larger delay values add more delay between successive crawl " 92 | "accesses and decrease the maximum crawl rate to your web server." 93 | msgstr "" 94 | 95 | #: models.py:75 96 | msgid "rule" 97 | msgstr "ルール" 98 | 99 | #: models.py:76 100 | msgid "rules" 101 | msgstr "ルール" 102 | 103 | #: models.py:82 models.py:86 104 | msgid "and" 105 | msgstr "と" 106 | -------------------------------------------------------------------------------- /src/robots/locale/sk/LC_MESSAGES/django.po: -------------------------------------------------------------------------------- 1 | # SOME DESCRIPTIVE TITLE. 2 | # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER 3 | # This file is distributed under the same license as the PACKAGE package. 4 | # 5 | # Translators: 6 | # Juraj Bubniak , 2012 7 | msgid "" 8 | msgstr "" 9 | "Project-Id-Version: django-robots\n" 10 | "Report-Msgid-Bugs-To: \n" 11 | "POT-Creation-Date: 2011-02-08 12:09+0100\n" 12 | "PO-Revision-Date: 2013-11-20 09:28+0000\n" 13 | "Last-Translator: Jannis Leidel \n" 14 | "Language-Team: Slovak (http://www.transifex.com/projects/p/django-robots/language/sk/)\n" 15 | "MIME-Version: 1.0\n" 16 | "Content-Type: text/plain; charset=UTF-8\n" 17 | "Content-Transfer-Encoding: 8bit\n" 18 | "Language: sk\n" 19 | "Plural-Forms: nplurals=3; plural=(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2;\n" 20 | 21 | #: admin.py:11 22 | msgid "URL patterns" 23 | msgstr "URL formát" 24 | 25 | #: admin.py:12 26 | msgid "Advanced options" 27 | msgstr "Pokročilé nastavenia" 28 | 29 | #: forms.py:12 30 | msgid "Please specify at least one allowed or dissallowed URL." 31 | msgstr "Uveďte prosím aspoň jednu povolenú alebo zakázanú URL adresu." 32 | 33 | #: models.py:11 34 | msgid "pattern" 35 | msgstr "formát" 36 | 37 | #: models.py:12 38 | msgid "" 39 | "Case-sensitive. A missing trailing slash does also match to files which " 40 | "start with the name of the pattern, e.g., '/admin' matches /admin.html too. " 41 | "Some major search engines allow an asterisk (*) as a wildcard and a dollar " 42 | "sign ($) to match the end of the URL, e.g., '/*.jpg$'." 43 | msgstr "" 44 | 45 | #: models.py:19 models.py:20 46 | msgid "url" 47 | msgstr "url" 48 | 49 | #: models.py:37 50 | msgid "robot" 51 | msgstr "robot" 52 | 53 | #: models.py:38 54 | msgid "" 55 | "This should be a user agent string like 'Googlebot'. Enter an asterisk (*) " 56 | "for all user agents. For a full list look at the database of Web Robots." 58 | msgstr "" 59 | 60 | #: models.py:46 models.py:83 61 | msgid "allowed" 62 | msgstr "povolené" 63 | 64 | #: models.py:47 65 | msgid "The URLs which are allowed to be accessed by bots." 66 | msgstr "URL adresy, ktoré majú povolený prístup pre botov." 67 | 68 | #: models.py:51 models.py:87 69 | msgid "disallowed" 70 | msgstr "zakázané" 71 | 72 | #: models.py:52 73 | msgid "The URLs which are not allowed to be accessed by bots." 74 | msgstr "" 75 | 76 | #: models.py:55 77 | msgid "sites" 78 | msgstr "stránky" 79 | 80 | #: models.py:57 81 | msgid "crawl delay" 82 | msgstr "" 83 | 84 | #: models.py:59 85 | msgid "" 86 | "Between 0.1 and 99.0. This field is supported by some search engines and " 87 | "defines the delay between successive crawler accesses in seconds. If the " 88 | "crawler rate is a problem for your server, you can set the delay up to 5 or " 89 | "10 or a comfortable value for your server, but it's suggested to start with " 90 | "small values (0.5-1), and increase as needed to an acceptable value for your" 91 | " server. Larger delay values add more delay between successive crawl " 92 | "accesses and decrease the maximum crawl rate to your web server." 93 | msgstr "" 94 | 95 | #: models.py:75 96 | msgid "rule" 97 | msgstr "pravidlo" 98 | 99 | #: models.py:76 100 | msgid "rules" 101 | msgstr "pravidlá" 102 | 103 | #: models.py:82 models.py:86 104 | msgid "and" 105 | msgstr "a" 106 | -------------------------------------------------------------------------------- /src/robots/views.py: -------------------------------------------------------------------------------- 1 | from django.contrib.sitemaps import views as sitemap_views 2 | from django.contrib.sites.models import Site 3 | from django.urls import NoReverseMatch, reverse 4 | from django.views.decorators.cache import cache_page 5 | from django.views.generic import ListView 6 | 7 | from robots import settings 8 | from robots.models import Rule 9 | 10 | 11 | class RuleList(ListView): 12 | """ 13 | Returns a generated robots.txt file with correct mimetype (text/plain), 14 | status code (200 or 404), sitemap url (automatically). 15 | """ 16 | 17 | model = Rule 18 | context_object_name = "rules" 19 | cache_timeout = settings.CACHE_TIMEOUT 20 | 21 | def get_current_site(self, request): 22 | if settings.SITE_BY_REQUEST: 23 | return Site.objects.get(domain=request.get_host()) 24 | else: 25 | return Site.objects.get_current() 26 | 27 | def reverse_sitemap_url(self): 28 | try: 29 | if settings.SITEMAP_VIEW_NAME: 30 | return reverse(settings.SITEMAP_VIEW_NAME) 31 | else: 32 | return reverse(sitemap_views.index) 33 | except NoReverseMatch: 34 | try: 35 | return reverse(sitemap_views.sitemap) 36 | except NoReverseMatch: 37 | pass 38 | 39 | def get_domain(self): 40 | scheme = self.request.is_secure() and "https" or "http" 41 | if not self.current_site.domain.startswith(("http", "https")): 42 | return "%s://%s" % (scheme, self.current_site.domain) 43 | return self.current_site.domain 44 | 45 | def get_sitemap_urls(self): 46 | sitemap_urls = list(settings.SITEMAP_URLS) 47 | 48 | if not sitemap_urls and settings.USE_SITEMAP: 49 | sitemap_url = self.reverse_sitemap_url() 50 | 51 | if sitemap_url is not None: 52 | if not sitemap_url.startswith(("http", "https")): 53 | sitemap_url = "%s%s" % (self.get_domain(), sitemap_url) 54 | if sitemap_url not in sitemap_urls: 55 | sitemap_urls.append(sitemap_url) 56 | 57 | return sitemap_urls 58 | 59 | def get_queryset(self): 60 | return Rule.objects.filter(sites=self.current_site) 61 | 62 | def get_context_data(self, **kwargs): 63 | context = super().get_context_data(**kwargs) 64 | context["sitemap_urls"] = self.get_sitemap_urls() 65 | if settings.USE_HOST: 66 | if settings.USE_SCHEME_IN_HOST: 67 | context["host"] = self.get_domain() 68 | else: 69 | context["host"] = self.current_site.domain 70 | else: 71 | context["host"] = None 72 | return context 73 | 74 | def render_to_response(self, context, **kwargs): 75 | return super().render_to_response(context, content_type="text/plain", **kwargs) 76 | 77 | def get_cache_timeout(self): 78 | return self.cache_timeout 79 | 80 | def dispatch(self, request, *args, **kwargs): 81 | cache_timeout = self.get_cache_timeout() 82 | self.current_site = self.get_current_site(request) 83 | super_dispatch = super().dispatch 84 | if not cache_timeout: 85 | return super_dispatch(request, *args, **kwargs) 86 | key_prefix = self.current_site.domain 87 | cache_decorator = cache_page(cache_timeout, key_prefix=key_prefix) 88 | return cache_decorator(super_dispatch)(request, *args, **kwargs) 89 | 90 | 91 | rules_list = RuleList.as_view() 92 | -------------------------------------------------------------------------------- /src/robots/locale/uk/LC_MESSAGES/django.po: -------------------------------------------------------------------------------- 1 | # SOME DESCRIPTIVE TITLE. 2 | # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER 3 | # This file is distributed under the same license as the PACKAGE package. 4 | # 5 | # Translators: 6 | # Sergey Lysach , 2011 7 | msgid "" 8 | msgstr "" 9 | "Project-Id-Version: django-robots\n" 10 | "Report-Msgid-Bugs-To: \n" 11 | "POT-Creation-Date: 2011-02-08 12:09+0100\n" 12 | "PO-Revision-Date: 2013-11-20 09:28+0000\n" 13 | "Last-Translator: Jannis Leidel \n" 14 | "Language-Team: Ukrainian (http://www.transifex.com/projects/p/django-robots/language/uk/)\n" 15 | "MIME-Version: 1.0\n" 16 | "Content-Type: text/plain; charset=UTF-8\n" 17 | "Content-Transfer-Encoding: 8bit\n" 18 | "Language: uk\n" 19 | "Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n" 20 | 21 | #: admin.py:11 22 | msgid "URL patterns" 23 | msgstr "URL шаблони" 24 | 25 | #: admin.py:12 26 | msgid "Advanced options" 27 | msgstr "Розширені налаштування" 28 | 29 | #: forms.py:12 30 | msgid "Please specify at least one allowed or dissallowed URL." 31 | msgstr "Будь ласка, вкажіть принаймні один дозволений або недозволений URL." 32 | 33 | #: models.py:11 34 | msgid "pattern" 35 | msgstr "шаблон" 36 | 37 | #: models.py:12 38 | msgid "" 39 | "Case-sensitive. A missing trailing slash does also match to files which " 40 | "start with the name of the pattern, e.g., '/admin' matches /admin.html too. " 41 | "Some major search engines allow an asterisk (*) as a wildcard and a dollar " 42 | "sign ($) to match the end of the URL, e.g., '/*.jpg$'." 43 | msgstr "" 44 | 45 | #: models.py:19 models.py:20 46 | msgid "url" 47 | msgstr "url" 48 | 49 | #: models.py:37 50 | msgid "robot" 51 | msgstr "робот" 52 | 53 | #: models.py:38 54 | msgid "" 55 | "This should be a user agent string like 'Googlebot'. Enter an asterisk (*) " 56 | "for all user agents. For a full list look at the database of Web Robots." 58 | msgstr "" 59 | 60 | #: models.py:46 models.py:83 61 | msgid "allowed" 62 | msgstr "дозволено" 63 | 64 | #: models.py:47 65 | msgid "The URLs which are allowed to be accessed by bots." 66 | msgstr "URL-адреси, які дозволені для індексації пошуковими роботами." 67 | 68 | #: models.py:51 models.py:87 69 | msgid "disallowed" 70 | msgstr "заборонено" 71 | 72 | #: models.py:52 73 | msgid "The URLs which are not allowed to be accessed by bots." 74 | msgstr "URL-адреси, які не дозволені для індексації пошуковими роботами." 75 | 76 | #: models.py:55 77 | msgid "sites" 78 | msgstr "" 79 | 80 | #: models.py:57 81 | msgid "crawl delay" 82 | msgstr "затримка сканування" 83 | 84 | #: models.py:59 85 | msgid "" 86 | "Between 0.1 and 99.0. This field is supported by some search engines and " 87 | "defines the delay between successive crawler accesses in seconds. If the " 88 | "crawler rate is a problem for your server, you can set the delay up to 5 or " 89 | "10 or a comfortable value for your server, but it's suggested to start with " 90 | "small values (0.5-1), and increase as needed to an acceptable value for your" 91 | " server. Larger delay values add more delay between successive crawl " 92 | "accesses and decrease the maximum crawl rate to your web server." 93 | msgstr "" 94 | 95 | #: models.py:75 96 | msgid "rule" 97 | msgstr "правило" 98 | 99 | #: models.py:76 100 | msgid "rules" 101 | msgstr "правила" 102 | 103 | #: models.py:82 models.py:86 104 | msgid "and" 105 | msgstr "та" 106 | -------------------------------------------------------------------------------- /src/robots/locale/el/LC_MESSAGES/django.po: -------------------------------------------------------------------------------- 1 | # SOME DESCRIPTIVE TITLE. 2 | # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER 3 | # This file is distributed under the same license as the PACKAGE package. 4 | # 5 | # Translators: 6 | # Dimitris Glezos , 2011 7 | msgid "" 8 | msgstr "" 9 | "Project-Id-Version: django-robots\n" 10 | "Report-Msgid-Bugs-To: \n" 11 | "POT-Creation-Date: 2011-02-08 12:09+0100\n" 12 | "PO-Revision-Date: 2013-11-20 09:28+0000\n" 13 | "Last-Translator: Jannis Leidel \n" 14 | "Language-Team: Greek (http://www.transifex.com/projects/p/django-robots/language/el/)\n" 15 | "MIME-Version: 1.0\n" 16 | "Content-Type: text/plain; charset=UTF-8\n" 17 | "Content-Transfer-Encoding: 8bit\n" 18 | "Language: el\n" 19 | "Plural-Forms: nplurals=2; plural=(n != 1);\n" 20 | 21 | #: admin.py:11 22 | msgid "URL patterns" 23 | msgstr "Σχήματα URL" 24 | 25 | #: admin.py:12 26 | msgid "Advanced options" 27 | msgstr "Προχωρημένες επιλογές" 28 | 29 | #: forms.py:12 30 | msgid "Please specify at least one allowed or dissallowed URL." 31 | msgstr "Παρακαλείστε να διευκρινίσετε τουλάχιστον ένα URL το οποίο να επιτρέπεται ή να απορρίπτεται." 32 | 33 | #: models.py:11 34 | msgid "pattern" 35 | msgstr "έκφραση" 36 | 37 | #: models.py:12 38 | msgid "" 39 | "Case-sensitive. A missing trailing slash does also match to files which " 40 | "start with the name of the pattern, e.g., '/admin' matches /admin.html too. " 41 | "Some major search engines allow an asterisk (*) as a wildcard and a dollar " 42 | "sign ($) to match the end of the URL, e.g., '/*.jpg$'." 43 | msgstr "Με διάκριση πεζών/κεφαλαίων. Η απουσία μιας καθέτου στο τέλος επιστρέφει αποτελέσματα τα οποία αρχίζουν με το όνομα της έκφρασης, π.χ. το '/admin' ταιριάζει και με το /admin.html. Μερικές μεγάλες μηχανές αναζήτησης επιτρέπουν ένα αστερίσκο (*) ως μπαλαντέρ και το σύμβολο του δολαρίου ($) για ανίχνευση του τέλους του URL, π.χ., '/*.jpg$'." 44 | 45 | #: models.py:19 models.py:20 46 | msgid "url" 47 | msgstr "url" 48 | 49 | #: models.py:37 50 | msgid "robot" 51 | msgstr "ρομπότ" 52 | 53 | #: models.py:38 54 | msgid "" 55 | "This should be a user agent string like 'Googlebot'. Enter an asterisk (*) " 56 | "for all user agents. For a full list look at the database of Web Robots." 58 | msgstr "Αυτό πρέπει να είναι μια συμβολοσειρά για ένα user agent όπως 'Googlebot'. Εισάγετε ένα αστερίσκο (*) για όλα τα user agents. Για έναν πλήρη κατάλογο ανατρέξτε στη βάση δεδομένων των Web Robots." 59 | 60 | #: models.py:46 models.py:83 61 | msgid "allowed" 62 | msgstr "επιτρέπονται" 63 | 64 | #: models.py:47 65 | msgid "The URLs which are allowed to be accessed by bots." 66 | msgstr "Τα URL στα οποία επιτρέπεται να έχουν πρόσβαση τα ρομπότ." 67 | 68 | #: models.py:51 models.py:87 69 | msgid "disallowed" 70 | msgstr "να αρθεί" 71 | 72 | #: models.py:52 73 | msgid "The URLs which are not allowed to be accessed by bots." 74 | msgstr "Τα URL στα οποία δεν επιτρέπεται να έχουν πρόσβαση τα ρομπότ." 75 | 76 | #: models.py:55 77 | msgid "sites" 78 | msgstr "" 79 | 80 | #: models.py:57 81 | msgid "crawl delay" 82 | msgstr "καθυστέρηση αναζήτησης" 83 | 84 | #: models.py:59 85 | msgid "" 86 | "Between 0.1 and 99.0. This field is supported by some search engines and " 87 | "defines the delay between successive crawler accesses in seconds. If the " 88 | "crawler rate is a problem for your server, you can set the delay up to 5 or " 89 | "10 or a comfortable value for your server, but it's suggested to start with " 90 | "small values (0.5-1), and increase as needed to an acceptable value for your" 91 | " server. Larger delay values add more delay between successive crawl " 92 | "accesses and decrease the maximum crawl rate to your web server." 93 | msgstr "" 94 | 95 | #: models.py:75 96 | msgid "rule" 97 | msgstr "κανόνας" 98 | 99 | #: models.py:76 100 | msgid "rules" 101 | msgstr "κανόνες" 102 | 103 | #: models.py:82 models.py:86 104 | msgid "and" 105 | msgstr "και" 106 | -------------------------------------------------------------------------------- /src/robots/locale/da/LC_MESSAGES/django.po: -------------------------------------------------------------------------------- 1 | # SOME DESCRIPTIVE TITLE. 2 | # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER 3 | # This file is distributed under the same license as the PACKAGE package. 4 | # 5 | # Translators: 6 | msgid "" 7 | msgstr "" 8 | "Project-Id-Version: django-robots\n" 9 | "Report-Msgid-Bugs-To: \n" 10 | "POT-Creation-Date: 2011-02-08 12:09+0100\n" 11 | "PO-Revision-Date: 2013-11-20 09:28+0000\n" 12 | "Last-Translator: Jannis Leidel \n" 13 | "Language-Team: Danish (http://www.transifex.com/projects/p/django-robots/language/da/)\n" 14 | "MIME-Version: 1.0\n" 15 | "Content-Type: text/plain; charset=UTF-8\n" 16 | "Content-Transfer-Encoding: 8bit\n" 17 | "Language: da\n" 18 | "Plural-Forms: nplurals=2; plural=(n != 1);\n" 19 | 20 | #: admin.py:11 21 | msgid "URL patterns" 22 | msgstr "URL mønstre" 23 | 24 | #: admin.py:12 25 | msgid "Advanced options" 26 | msgstr "Avancerede indstillinger" 27 | 28 | #: forms.py:12 29 | msgid "Please specify at least one allowed or dissallowed URL." 30 | msgstr "Venligst specificer mindst én tilladt eller ikke tilladt URL." 31 | 32 | #: models.py:11 33 | msgid "pattern" 34 | msgstr "mønster" 35 | 36 | #: models.py:12 37 | msgid "" 38 | "Case-sensitive. A missing trailing slash does also match to files which " 39 | "start with the name of the pattern, e.g., '/admin' matches /admin.html too. " 40 | "Some major search engines allow an asterisk (*) as a wildcard and a dollar " 41 | "sign ($) to match the end of the URL, e.g., '/*.jpg$'." 42 | msgstr "Case-sensitive. En manglende slut-skråstreg matcher også filer der starter med navnet på mønstret, f.eks. '/admin' matcher også /admin.html. Nogle store søgemaskiner tillader brugen af asterisk (*) som et wildcard og et dollar tegn ($) til at mache slutningen af URL'en, f.eks., '/*.jpg$'." 43 | 44 | #: models.py:19 models.py:20 45 | msgid "url" 46 | msgstr "url" 47 | 48 | #: models.py:37 49 | msgid "robot" 50 | msgstr "robot" 51 | 52 | #: models.py:38 53 | msgid "" 54 | "This should be a user agent string like 'Googlebot'. Enter an asterisk (*) " 55 | "for all user agents. For a full list look at the database of Web Robots." 57 | msgstr "Dette bør være en user agent streng ligesom 'Googlebot'. Skriv en asterisk (*) for at indikere alle user agents. For en komplet liste, kig på databasen af Web Robotter." 58 | 59 | #: models.py:46 models.py:83 60 | msgid "allowed" 61 | msgstr "tilladt" 62 | 63 | #: models.py:47 64 | msgid "The URLs which are allowed to be accessed by bots." 65 | msgstr "URL'erne der er tilladt at tilgå af bots." 66 | 67 | #: models.py:51 models.py:87 68 | msgid "disallowed" 69 | msgstr "ikke tilladt" 70 | 71 | #: models.py:52 72 | msgid "The URLs which are not allowed to be accessed by bots." 73 | msgstr "URL'erne der ikke er tilladt at tilgå af bots." 74 | 75 | #: models.py:55 76 | msgid "sites" 77 | msgstr "" 78 | 79 | #: models.py:57 80 | msgid "crawl delay" 81 | msgstr "crawl-delay" 82 | 83 | #: models.py:59 84 | msgid "" 85 | "Between 0.1 and 99.0. This field is supported by some search engines and " 86 | "defines the delay between successive crawler accesses in seconds. If the " 87 | "crawler rate is a problem for your server, you can set the delay up to 5 or " 88 | "10 or a comfortable value for your server, but it's suggested to start with " 89 | "small values (0.5-1), and increase as needed to an acceptable value for your" 90 | " server. Larger delay values add more delay between successive crawl " 91 | "accesses and decrease the maximum crawl rate to your web server." 92 | msgstr "Mellem 0.1 og 99.0. Dette felt er understøttet af nogle søgemaskiner og definerer et delay imellem crawler-adgange i sekunder. Hvis crawler-raten er et problem for din server, så kan du sætte delayet op til 5 eller 10 eller en anden behagelig værdi for din server. Men det anbefales at starte med lave værdier (0.5-1), og så hæve disse efter behov til et acceptabelt niveau for din server. Større delay-værdier tilføjer mere delay imellem crawl-adgange og mindsker den maksimale crawl-rate til din webserver." 93 | 94 | #: models.py:75 95 | msgid "rule" 96 | msgstr "regel" 97 | 98 | #: models.py:76 99 | msgid "rules" 100 | msgstr "regler" 101 | 102 | #: models.py:82 models.py:86 103 | msgid "and" 104 | msgstr "og" 105 | -------------------------------------------------------------------------------- /src/robots/locale/it/LC_MESSAGES/django.po: -------------------------------------------------------------------------------- 1 | # SOME DESCRIPTIVE TITLE. 2 | # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER 3 | # This file is distributed under the same license as the PACKAGE package. 4 | # 5 | # Translators: 6 | # yakky , 2016 7 | msgid "" 8 | msgstr "" 9 | "Project-Id-Version: django-robots\n" 10 | "Report-Msgid-Bugs-To: \n" 11 | "POT-Creation-Date: 2016-02-06 19:16+0100\n" 12 | "PO-Revision-Date: 2016-02-06 19:46+0000\n" 13 | "Last-Translator: yakky \n" 14 | "Language-Team: Italian (http://www.transifex.com/jazzband/django-robots/language/it/)\n" 15 | "MIME-Version: 1.0\n" 16 | "Content-Type: text/plain; charset=UTF-8\n" 17 | "Content-Transfer-Encoding: 8bit\n" 18 | "Language: it\n" 19 | "Plural-Forms: nplurals=2; plural=(n != 1);\n" 20 | 21 | #: admin.py:12 22 | msgid "URL patterns" 23 | msgstr "regole URL" 24 | 25 | #: admin.py:15 26 | msgid "Advanced options" 27 | msgstr "Opzioni avanzate" 28 | 29 | #: forms.py:16 30 | msgid "Please specify at least one allowed or dissallowed URL." 31 | msgstr "Specifica almeno un URL ammesso o non ammesso." 32 | 33 | #: models.py:15 34 | msgid "pattern" 35 | msgstr "regola" 36 | 37 | #: models.py:16 38 | msgid "" 39 | "Case-sensitive. A missing trailing slash does also match to files which " 40 | "start with the name of the pattern, e.g., '/admin' matches /admin.html too. " 41 | "Some major search engines allow an asterisk (*) as a wildcard and a dollar " 42 | "sign ($) to match the end of the URL, e.g., '/*.jpg$'." 43 | msgstr "Sensibile alle maiuscole. Senza '/' in fondo riconosceanche i file che cominciano con la regola, p. es: '/admin' riconosce anche '/admin.html'. Alcuni dei principali motori di ricerca permettono l'uso dell'asterisco (*) come carattere jolly e il dollaro ($) per individuare la fine dell'URL, p. es: '/*.jpg$'" 44 | 45 | #: models.py:24 models.py:25 46 | msgid "url" 47 | msgstr "url" 48 | 49 | #: models.py:44 50 | msgid "robot" 51 | msgstr "robot" 52 | 53 | #: models.py:45 54 | msgid "" 55 | "This should be a user agent string like 'Googlebot'. Enter an asterisk (*) " 56 | "for all user agents. For a full list look at the database of Web Robots." 58 | msgstr "Questo deve essere una string con lo user agent tipo 'Googlebot'. Usa l'asterisco (*) per tutti gli user agent. Per una lista completa verifica sul database dei Robot Web." 59 | 60 | #: models.py:53 models.py:91 61 | msgid "allowed" 62 | msgstr "ammesso" 63 | 64 | #: models.py:54 65 | msgid "The URLs which are allowed to be accessed by bots." 66 | msgstr "Gli URL a cui i bot possono accedere" 67 | 68 | #: models.py:58 models.py:95 69 | msgid "disallowed" 70 | msgstr "non ammesso" 71 | 72 | #: models.py:59 73 | msgid "The URLs which are not allowed to be accessed by bots." 74 | msgstr "Gli URL a cui i bot non possono accedere" 75 | 76 | #: models.py:62 77 | msgid "sites" 78 | msgstr "siti" 79 | 80 | #: models.py:64 81 | msgid "crawl delay" 82 | msgstr "ritardo navigazione" 83 | 84 | #: models.py:66 85 | msgid "" 86 | "Between 0.1 and 99.0. This field is supported by some search engines and " 87 | "defines the delay between successive crawler accesses in seconds. If the " 88 | "crawler rate is a problem for your server, you can set the delay up to 5 or " 89 | "10 or a comfortable value for your server, but it's suggested to start with " 90 | "small values (0.5-1), and increase as needed to an acceptable value for your" 91 | " server. Larger delay values add more delay between successive crawl " 92 | "accesses and decrease the maximum crawl rate to your web server." 93 | msgstr "Fra 0.1 e 99.0. Questo campo è supportato dal alcuni motori di ricerca e definisce l'intervallo fra due accessi consecutivi del crawler. Se il flusso dei crawler è un problema per il tuo server, puoi impostare il ritardo a 5 o 10 o un valore adeguat per il tuo server, si suggerisce comunque di cominciare convalori piccoli (0.5-1), ed aumentarli a seconda delle necessità fino a valori adeguatiper il tuo sever. Intervalli molto grandi ritardano le isite del crawler e diminuiscono il flusso dei crawler al tuo web server." 94 | 95 | #: models.py:83 96 | msgid "rule" 97 | msgstr "regola" 98 | 99 | #: models.py:84 100 | msgid "rules" 101 | msgstr "regole" 102 | 103 | #: models.py:90 models.py:94 104 | msgid "and" 105 | msgstr "e" 106 | -------------------------------------------------------------------------------- /src/robots/models.py: -------------------------------------------------------------------------------- 1 | from django.contrib.sites.models import Site 2 | from django.core.validators import MaxLengthValidator 3 | from django.db import models 4 | from django.utils.text import get_text_list 5 | from django.utils.translation import gettext_lazy as _ 6 | 7 | 8 | class Url(models.Model): 9 | """ 10 | Defines a URL pattern for use with a robot exclusion rule. It's 11 | case-sensitive and exact, e.g., "/admin" and "/admin/" are different URLs. 12 | """ 13 | 14 | pattern = models.CharField( 15 | _("pattern"), 16 | max_length=255, 17 | help_text=_( 18 | "Case-sensitive. A missing trailing slash does al" 19 | "so match to files which start with the name of " 20 | "the pattern, e.g., '/admin' matches /admin.html " 21 | "too. Some major search engines allow an asterisk" 22 | " (*) as a wildcard and a dollar sign ($) to " 23 | "match the end of the URL, e.g., '/*.jpg$'." 24 | ), 25 | validators=[MaxLengthValidator(254)], 26 | ) 27 | 28 | class Meta: 29 | verbose_name = _("url") 30 | verbose_name_plural = _("url") 31 | 32 | def __str__(self): 33 | return "%s" % self.pattern 34 | 35 | def save(self, *args, **kwargs): 36 | if not self.pattern.startswith("/"): 37 | self.pattern = "/" + self.pattern 38 | super().save(*args, **kwargs) 39 | 40 | 41 | class Rule(models.Model): 42 | """ 43 | Defines an abstract rule which is used to respond to crawling web robots, 44 | using the robot exclusion standard, a.k.a. robots.txt. It allows or 45 | disallows the robot identified by its user agent to access the given URLs. 46 | The Site contrib app is used to enable multiple robots.txt per instance. 47 | """ 48 | 49 | robot = models.CharField( 50 | _("robot"), 51 | max_length=255, 52 | help_text=_( 53 | "This should be a user agent string like " 54 | "'Googlebot'. Enter an asterisk (*) for all " 55 | "user agents. For a full list look at the " 56 | " database of Web Robots." 59 | ), 60 | ) 61 | 62 | allowed = models.ManyToManyField( 63 | Url, 64 | blank=True, 65 | related_name="allowed", 66 | verbose_name=_("allowed"), 67 | help_text=_("The URLs which are allowed " "to be accessed by bots."), 68 | ) 69 | 70 | disallowed = models.ManyToManyField( 71 | Url, 72 | blank=True, 73 | related_name="disallowed", 74 | verbose_name=_("disallowed"), 75 | help_text=_("The URLs which are not " "allowed to be accessed " "by bots."), 76 | ) 77 | sites = models.ManyToManyField(Site, verbose_name=_("sites")) 78 | 79 | crawl_delay = models.DecimalField( 80 | _("crawl delay"), 81 | blank=True, 82 | null=True, 83 | max_digits=3, 84 | decimal_places=1, 85 | help_text=_( 86 | "Between 0.1 and 99.0. This field is " 87 | "supported by some search engines and " 88 | "defines the delay between successive " 89 | "crawler accesses in seconds. If the " 90 | "crawler rate is a problem for your " 91 | "server, you can set the delay up to 5 " 92 | "or 10 or a comfortable value for your " 93 | "server, but it's suggested to start " 94 | "with small values (0.5-1), and " 95 | "increase as needed to an acceptable " 96 | "value for your server. Larger delay " 97 | "values add more delay between " 98 | "successive crawl accesses and " 99 | "decrease the maximum crawl rate to " 100 | "your web server." 101 | ), 102 | ) 103 | 104 | class Meta: 105 | verbose_name = _("rule") 106 | verbose_name_plural = _("rules") 107 | 108 | def __str__(self): 109 | return "%s" % self.robot 110 | 111 | def allowed_urls(self): 112 | return get_text_list(list(self.allowed.all()), _("and")) 113 | 114 | allowed_urls.short_description = _("allowed") 115 | 116 | def disallowed_urls(self): 117 | return get_text_list(list(self.disallowed.all()), _("and")) 118 | 119 | disallowed_urls.short_description = _("disallowed") 120 | -------------------------------------------------------------------------------- /src/robots/locale/nl/LC_MESSAGES/django.po: -------------------------------------------------------------------------------- 1 | # SOME DESCRIPTIVE TITLE. 2 | # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER 3 | # This file is distributed under the same license as the PACKAGE package. 4 | # 5 | # Translators: 6 | # Vincent Driessen , 2011 7 | msgid "" 8 | msgstr "" 9 | "Project-Id-Version: django-robots\n" 10 | "Report-Msgid-Bugs-To: \n" 11 | "POT-Creation-Date: 2011-02-08 12:09+0100\n" 12 | "PO-Revision-Date: 2013-11-20 09:28+0000\n" 13 | "Last-Translator: Jannis Leidel \n" 14 | "Language-Team: Dutch (http://www.transifex.com/projects/p/django-robots/language/nl/)\n" 15 | "MIME-Version: 1.0\n" 16 | "Content-Type: text/plain; charset=UTF-8\n" 17 | "Content-Transfer-Encoding: 8bit\n" 18 | "Language: nl\n" 19 | "Plural-Forms: nplurals=2; plural=(n != 1);\n" 20 | 21 | #: admin.py:11 22 | msgid "URL patterns" 23 | msgstr "URL-patronen" 24 | 25 | #: admin.py:12 26 | msgid "Advanced options" 27 | msgstr "Geavanceerde opties" 28 | 29 | #: forms.py:12 30 | msgid "Please specify at least one allowed or dissallowed URL." 31 | msgstr "Specificeer minimaal een toegestane of niet-toegestane URL." 32 | 33 | #: models.py:11 34 | msgid "pattern" 35 | msgstr "patroon" 36 | 37 | #: models.py:12 38 | msgid "" 39 | "Case-sensitive. A missing trailing slash does also match to files which " 40 | "start with the name of the pattern, e.g., '/admin' matches /admin.html too. " 41 | "Some major search engines allow an asterisk (*) as a wildcard and a dollar " 42 | "sign ($) to match the end of the URL, e.g., '/*.jpg$'." 43 | msgstr "Hoofdlettergevoelig. Een ontbrekende slash aan het eind komt ook overeen met bestanden die beginnen met de naam van het patroon, bijvoorbeeld '/admin' matcht ook '/admin.html'. Sommige grote zoekmachines staan een sterretje (*) toe als wildcard en een dollarteken ($) om het einde van de URL te matchen, bijvoorbeeld '/*.jpg$'." 44 | 45 | #: models.py:19 models.py:20 46 | msgid "url" 47 | msgstr "url" 48 | 49 | #: models.py:37 50 | msgid "robot" 51 | msgstr "robot" 52 | 53 | #: models.py:38 54 | msgid "" 55 | "This should be a user agent string like 'Googlebot'. Enter an asterisk (*) " 56 | "for all user agents. For a full list look at the database of Web Robots." 58 | msgstr "Dit moet een user-agent string zijn zoals 'Googlebot'. Voer een sterretje (*) in voor alle user-agents. Voor een volledige lijst, zie de database van Web Robots." 59 | 60 | #: models.py:46 models.py:83 61 | msgid "allowed" 62 | msgstr "toegestaan" 63 | 64 | #: models.py:47 65 | msgid "The URLs which are allowed to be accessed by bots." 66 | msgstr "De URL's die mogen worden benaderd door bots." 67 | 68 | #: models.py:51 models.py:87 69 | msgid "disallowed" 70 | msgstr "niet toegestaan" 71 | 72 | #: models.py:52 73 | msgid "The URLs which are not allowed to be accessed by bots." 74 | msgstr "De URL's die niet mogen worden benaderd door bots." 75 | 76 | #: models.py:55 77 | msgid "sites" 78 | msgstr "" 79 | 80 | #: models.py:57 81 | msgid "crawl delay" 82 | msgstr "crawl-vertraging" 83 | 84 | #: models.py:59 85 | msgid "" 86 | "Between 0.1 and 99.0. This field is supported by some search engines and " 87 | "defines the delay between successive crawler accesses in seconds. If the " 88 | "crawler rate is a problem for your server, you can set the delay up to 5 or " 89 | "10 or a comfortable value for your server, but it's suggested to start with " 90 | "small values (0.5-1), and increase as needed to an acceptable value for your" 91 | " server. Larger delay values add more delay between successive crawl " 92 | "accesses and decrease the maximum crawl rate to your web server." 93 | msgstr "Tussen 0.1 en 99.0. Dit veld wordt ondersteund door sommige zoekmachines en definiëert de vertraging tussen opeenvolgende crawler-toegang in seconden. Als de crawler-frequentie een probleem is voor uw server, kunt u de vertraging vergroten met 5 of 10 of een comfortabele waarde voor uw server, maar het wordt geadviseerd om te beginnen met kleine waarden (0.5-1) en pas te vergroten indien nodig. Grotere vertragingswaarden voegen meer vertraging toe tussen opeenvolgende crawl-toegangen en verlagen de maximale crawlsnelheid naar uw webserver." 94 | 95 | #: models.py:75 96 | msgid "rule" 97 | msgstr "regel" 98 | 99 | #: models.py:76 100 | msgid "rules" 101 | msgstr "regels" 102 | 103 | #: models.py:82 models.py:86 104 | msgid "and" 105 | msgstr "en" 106 | -------------------------------------------------------------------------------- /src/robots/migrations/0001_initial.py: -------------------------------------------------------------------------------- 1 | from django.db import migrations, models 2 | 3 | 4 | class Migration(migrations.Migration): 5 | dependencies = [ 6 | ("sites", "0001_initial"), 7 | ] 8 | 9 | operations = [ 10 | migrations.CreateModel( 11 | name="Rule", 12 | fields=[ 13 | ( 14 | "id", 15 | models.AutoField( 16 | auto_created=True, 17 | serialize=False, 18 | verbose_name="ID", 19 | primary_key=True, 20 | ), 21 | ), 22 | ( 23 | "robot", 24 | models.CharField( 25 | max_length=255, 26 | help_text="This should be a user agent string like 'Googlebot'. Enter an asterisk (*) for all user agents. For a full list look at the database of Web Robots.", 27 | verbose_name="robot", 28 | ), 29 | ), 30 | ( 31 | "crawl_delay", 32 | models.DecimalField( 33 | blank=True, 34 | help_text="Between 0.1 and 99.0. This field is supported by some search engines and defines the delay between successive crawler accesses in seconds. If the crawler rate is a problem for your server, you can set the delay up to 5 or 10 or a comfortable value for your server, but it's suggested to start with small values (0.5-1), and increase as needed to an acceptable value for your server. Larger delay values add more delay between successive crawl accesses and decrease the maximum crawl rate to your web server.", 35 | verbose_name="crawl delay", 36 | decimal_places=1, 37 | max_digits=3, 38 | null=True, 39 | ), 40 | ), 41 | ( 42 | "sites", 43 | models.ManyToManyField(to="sites.Site", verbose_name="sites"), 44 | ), 45 | ], 46 | options={ 47 | "verbose_name_plural": "rules", 48 | "verbose_name": "rule", 49 | }, 50 | bases=(models.Model,), 51 | ), 52 | migrations.CreateModel( 53 | name="Url", 54 | fields=[ 55 | ( 56 | "id", 57 | models.AutoField( 58 | auto_created=True, 59 | serialize=False, 60 | verbose_name="ID", 61 | primary_key=True, 62 | ), 63 | ), 64 | ( 65 | "pattern", 66 | models.CharField( 67 | max_length=255, 68 | help_text="Case-sensitive. A missing trailing slash does also match to files which start with the name of the pattern, e.g., '/admin' matches /admin.html too. Some major search engines allow an asterisk (*) as a wildcard and a dollar sign ($) to match the end of the URL, e.g., '/*.jpg$'.", 69 | verbose_name="pattern", 70 | ), 71 | ), 72 | ], 73 | options={ 74 | "verbose_name_plural": "url", 75 | "verbose_name": "url", 76 | }, 77 | bases=(models.Model,), 78 | ), 79 | migrations.AddField( 80 | model_name="rule", 81 | name="disallowed", 82 | field=models.ManyToManyField( 83 | to="robots.Url", 84 | blank=True, 85 | related_name="disallowed", 86 | verbose_name="disallowed", 87 | help_text="The URLs which are not allowed to be accessed by bots.", 88 | ), 89 | preserve_default=True, 90 | ), 91 | migrations.AddField( 92 | model_name="rule", 93 | name="allowed", 94 | field=models.ManyToManyField( 95 | to="robots.Url", 96 | blank=True, 97 | related_name="allowed", 98 | verbose_name="allowed", 99 | help_text="The URLs which are allowed to be accessed by bots.", 100 | ), 101 | preserve_default=True, 102 | ), 103 | ] 104 | -------------------------------------------------------------------------------- /src/robots/locale/de/LC_MESSAGES/django.po: -------------------------------------------------------------------------------- 1 | # SOME DESCRIPTIVE TITLE. 2 | # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER 3 | # This file is distributed under the same license as the PACKAGE package. 4 | # 5 | # Translators: 6 | # Jannis Leidel , 2011 7 | msgid "" 8 | msgstr "" 9 | "Project-Id-Version: django-robots\n" 10 | "Report-Msgid-Bugs-To: \n" 11 | "POT-Creation-Date: 2011-02-08 12:09+0100\n" 12 | "PO-Revision-Date: 2013-11-20 09:28+0000\n" 13 | "Last-Translator: Jannis Leidel \n" 14 | "Language-Team: German (http://www.transifex.com/projects/p/django-robots/language/de/)\n" 15 | "MIME-Version: 1.0\n" 16 | "Content-Type: text/plain; charset=UTF-8\n" 17 | "Content-Transfer-Encoding: 8bit\n" 18 | "Language: de\n" 19 | "Plural-Forms: nplurals=2; plural=(n != 1);\n" 20 | 21 | #: admin.py:11 22 | msgid "URL patterns" 23 | msgstr "URL-Muster" 24 | 25 | #: admin.py:12 26 | msgid "Advanced options" 27 | msgstr "Erweiterte Einstellungen" 28 | 29 | #: forms.py:12 30 | msgid "Please specify at least one allowed or dissallowed URL." 31 | msgstr "Bitte geben Sie mindestens eine erlaubte oder unerlaube URL ein." 32 | 33 | #: models.py:11 34 | msgid "pattern" 35 | msgstr "Muster" 36 | 37 | #: models.py:12 38 | msgid "" 39 | "Case-sensitive. A missing trailing slash does also match to files which " 40 | "start with the name of the pattern, e.g., '/admin' matches /admin.html too. " 41 | "Some major search engines allow an asterisk (*) as a wildcard and a dollar " 42 | "sign ($) to match the end of the URL, e.g., '/*.jpg$'." 43 | msgstr "Groß-/Kleinschreibung beachten. Wenn der abschließende Schrägstrich fehlt, werden auch die Dateien erkannt, die mit dem Muster beginnen, also zum Beispiel '/admin' trifft auch auf /admin.html zu. Einige größere Suchmaschinen erlauben eine Asterisk (*) als Wildcard und ein Dollar-Zeichen ($) um das Ende einer URL zu beschreiben, z.B. '/*.jpg$'." 44 | 45 | #: models.py:19 models.py:20 46 | msgid "url" 47 | msgstr "URL" 48 | 49 | #: models.py:37 50 | msgid "robot" 51 | msgstr "Robot" 52 | 53 | #: models.py:38 54 | msgid "" 55 | "This should be a user agent string like 'Googlebot'. Enter an asterisk (*) " 56 | "for all user agents. For a full list look at the database of Web Robots." 58 | msgstr "Das sollte ein User Agent sein wie 'Googlebot'. Ein Asterisk (*) sollte für alle User Agents eingegeben werden. Eine lange Liste mit User Agents ist die Database of Web Robots." 59 | 60 | #: models.py:46 models.py:83 61 | msgid "allowed" 62 | msgstr "erlaubt" 63 | 64 | #: models.py:47 65 | msgid "The URLs which are allowed to be accessed by bots." 66 | msgstr "Die URLs die von Robots abgefragt werden dürfen." 67 | 68 | #: models.py:51 models.py:87 69 | msgid "disallowed" 70 | msgstr "nicht erlaubt" 71 | 72 | #: models.py:52 73 | msgid "The URLs which are not allowed to be accessed by bots." 74 | msgstr "Die URLs die von Robots nicht abgefragt werden dürfen." 75 | 76 | #: models.py:55 77 | msgid "sites" 78 | msgstr "Seiten" 79 | 80 | #: models.py:57 81 | msgid "crawl delay" 82 | msgstr "Abfrageabstand" 83 | 84 | #: models.py:59 85 | msgid "" 86 | "Between 0.1 and 99.0. This field is supported by some search engines and " 87 | "defines the delay between successive crawler accesses in seconds. If the " 88 | "crawler rate is a problem for your server, you can set the delay up to 5 or " 89 | "10 or a comfortable value for your server, but it's suggested to start with " 90 | "small values (0.5-1), and increase as needed to an acceptable value for your" 91 | " server. Larger delay values add more delay between successive crawl " 92 | "accesses and decrease the maximum crawl rate to your web server." 93 | msgstr "Zwischen 0.1 und 99.0. Dieses Feld wird von einigen Suchmaschinen unterstützt und definiert die Dauer der Verzögerung zwischen zwei aufeinanderfolgenden Anfragen des Crawlers. Sollte die Anzahl der Zugriffe durch Crawler ein Problem für Ihren Server darstellen, können Sie den Wert auf 5 oder 10, oder jeden anderen sinnvollen Wert heraufsetzen. Es ist allerdings empfohlen, mit kleinen Werten (0.5-1) anzufangen und dann langsam auf einen akzeptablen Wert zu erhöhen. Größere Abstände zwischen den Anfragen verringern die Anzahl der möglichen erfolgreichen Anfragen und damit die Anzahl aller Anfragen." 94 | 95 | #: models.py:75 96 | msgid "rule" 97 | msgstr "Regel" 98 | 99 | #: models.py:76 100 | msgid "rules" 101 | msgstr "Regeln" 102 | 103 | #: models.py:82 models.py:86 104 | msgid "and" 105 | msgstr "und" 106 | -------------------------------------------------------------------------------- /src/robots/locale/pt_BR/LC_MESSAGES/django.po: -------------------------------------------------------------------------------- 1 | # SOME DESCRIPTIVE TITLE. 2 | # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER 3 | # This file is distributed under the same license as the PACKAGE package. 4 | # 5 | # Translators: 6 | # Herson Hersonls , 2011 7 | # viniciuscainelli , 2011 8 | msgid "" 9 | msgstr "" 10 | "Project-Id-Version: django-robots\n" 11 | "Report-Msgid-Bugs-To: \n" 12 | "POT-Creation-Date: 2011-02-08 12:09+0100\n" 13 | "PO-Revision-Date: 2013-11-20 09:28+0000\n" 14 | "Last-Translator: Jannis Leidel \n" 15 | "Language-Team: Portuguese (Brazil) (http://www.transifex.com/projects/p/django-robots/language/pt_BR/)\n" 16 | "MIME-Version: 1.0\n" 17 | "Content-Type: text/plain; charset=UTF-8\n" 18 | "Content-Transfer-Encoding: 8bit\n" 19 | "Language: pt_BR\n" 20 | "Plural-Forms: nplurals=2; plural=(n > 1);\n" 21 | 22 | #: admin.py:11 23 | msgid "URL patterns" 24 | msgstr "Padrões de URL" 25 | 26 | #: admin.py:12 27 | msgid "Advanced options" 28 | msgstr "Opções avançadas" 29 | 30 | #: forms.py:12 31 | msgid "Please specify at least one allowed or dissallowed URL." 32 | msgstr "Por favor, especifique pelo menos uma URL permitida ou não permitida." 33 | 34 | #: models.py:11 35 | msgid "pattern" 36 | msgstr "padrão" 37 | 38 | #: models.py:12 39 | msgid "" 40 | "Case-sensitive. A missing trailing slash does also match to files which " 41 | "start with the name of the pattern, e.g., '/admin' matches /admin.html too. " 42 | "Some major search engines allow an asterisk (*) as a wildcard and a dollar " 43 | "sign ($) to match the end of the URL, e.g., '/*.jpg$'." 44 | msgstr "Case-sensitive. A falta de uma barra também corresponde a arquivos que começam com o nome do padrão, por exemplo, '/admin' corresponde também a /admin.html. Alguns dos grandes sites de buscas permitem um asterisco (*) como curinga e um símbolo dólar ($) corresponder ao fim da URL, por exemplo, '/*.jpg$'." 45 | 46 | #: models.py:19 models.py:20 47 | msgid "url" 48 | msgstr "url" 49 | 50 | #: models.py:37 51 | msgid "robot" 52 | msgstr "robô" 53 | 54 | #: models.py:38 55 | msgid "" 56 | "This should be a user agent string like 'Googlebot'. Enter an asterisk (*) " 57 | "for all user agents. For a full list look at the database of Web Robots." 59 | msgstr "Este deve ser uma string com o agente de usuário, assim como 'Googlebot'. Insira um asterisco (*) para todos os agentes. Para uma lista completa consulte no banco de dados de Robôs Web." 60 | 61 | #: models.py:46 models.py:83 62 | msgid "allowed" 63 | msgstr "permitido" 64 | 65 | #: models.py:47 66 | msgid "The URLs which are allowed to be accessed by bots." 67 | msgstr "As URLs que são permitidos para serem acessados ​​por robôs." 68 | 69 | #: models.py:51 models.py:87 70 | msgid "disallowed" 71 | msgstr "não permitido" 72 | 73 | #: models.py:52 74 | msgid "The URLs which are not allowed to be accessed by bots." 75 | msgstr "As URLs que não são permitidos para serem acessados ​​por robôs." 76 | 77 | #: models.py:55 78 | msgid "sites" 79 | msgstr "sites" 80 | 81 | #: models.py:57 82 | msgid "crawl delay" 83 | msgstr "atraso de rastreio" 84 | 85 | #: models.py:59 86 | msgid "" 87 | "Between 0.1 and 99.0. This field is supported by some search engines and " 88 | "defines the delay between successive crawler accesses in seconds. If the " 89 | "crawler rate is a problem for your server, you can set the delay up to 5 or " 90 | "10 or a comfortable value for your server, but it's suggested to start with " 91 | "small values (0.5-1), and increase as needed to an acceptable value for your" 92 | " server. Larger delay values add more delay between successive crawl " 93 | "accesses and decrease the maximum crawl rate to your web server." 94 | msgstr "Entre 0.1 e 99.0. Este campo é suportado por alguns motores de busca e define o intervalo entre sucessivos acessos pelo rastreador em segundos. Se a taxa de rastreador é um problema para o seu servidor, você pode definir o atraso de até 5 ou 10 ou um valor confortável para o seu servidor, mas é sugerido começar com valores pequenos (0,5-1), e aumentar conforme necessário, para um valor aceitável para o seu servidor. Maiores valores de atraso adicionam mais demora entre sucessivos acessos de rastreamento e diminui a taxa máxima de rastreamento para o seu servidor web." 95 | 96 | #: models.py:75 97 | msgid "rule" 98 | msgstr "regra" 99 | 100 | #: models.py:76 101 | msgid "rules" 102 | msgstr "regras" 103 | 104 | #: models.py:82 models.py:86 105 | msgid "and" 106 | msgstr "e" 107 | -------------------------------------------------------------------------------- /src/robots/locale/fr/LC_MESSAGES/django.po: -------------------------------------------------------------------------------- 1 | # SOME DESCRIPTIVE TITLE. 2 | # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER 3 | # This file is distributed under the same license as the PACKAGE package. 4 | # 5 | # Translators: 6 | # tim , 2011 7 | # tim , 2012 8 | msgid "" 9 | msgstr "" 10 | "Project-Id-Version: django-robots\n" 11 | "Report-Msgid-Bugs-To: \n" 12 | "POT-Creation-Date: 2011-02-08 12:09+0100\n" 13 | "PO-Revision-Date: 2013-11-20 09:28+0000\n" 14 | "Last-Translator: Jannis Leidel \n" 15 | "Language-Team: French (http://www.transifex.com/projects/p/django-robots/language/fr/)\n" 16 | "MIME-Version: 1.0\n" 17 | "Content-Type: text/plain; charset=UTF-8\n" 18 | "Content-Transfer-Encoding: 8bit\n" 19 | "Language: fr\n" 20 | "Plural-Forms: nplurals=2; plural=(n > 1);\n" 21 | 22 | #: admin.py:11 23 | msgid "URL patterns" 24 | msgstr "formats d'URL" 25 | 26 | #: admin.py:12 27 | msgid "Advanced options" 28 | msgstr "Options avancées" 29 | 30 | #: forms.py:12 31 | msgid "Please specify at least one allowed or dissallowed URL." 32 | msgstr "Veuillez spécifier au moins une URL autorisée ou non." 33 | 34 | #: models.py:11 35 | msgid "pattern" 36 | msgstr "pattern" 37 | 38 | #: models.py:12 39 | msgid "" 40 | "Case-sensitive. A missing trailing slash does also match to files which " 41 | "start with the name of the pattern, e.g., '/admin' matches /admin.html too. " 42 | "Some major search engines allow an asterisk (*) as a wildcard and a dollar " 43 | "sign ($) to match the end of the URL, e.g., '/*.jpg$'." 44 | msgstr "La casse est significative. Une barre oblique manquant en fin de chaine fera aussi correspondre les fichiers qui commencent avec le nom de la pattern, par exemple '/admin' correspondra aussi à /admin.html. Certains moteurs de recherche important permettent aussi d'utiliser l'astérisque (*) en temps que joker et un signe dollar ($) pour correspondre à la fin de l'URL, par exemple, '/*.jpg$'." 45 | 46 | #: models.py:19 models.py:20 47 | msgid "url" 48 | msgstr "url" 49 | 50 | #: models.py:37 51 | msgid "robot" 52 | msgstr "robot" 53 | 54 | #: models.py:38 55 | msgid "" 56 | "This should be a user agent string like 'Googlebot'. Enter an asterisk (*) " 57 | "for all user agents. For a full list look at the database of Web Robots." 59 | msgstr "Ceci doit être une chaine user agent comme par exemple 'Googlebot'. Entrez une astérisque (*) pour tout les user agents. Pour une liste complète réfèrez-vous à la base de données des Web Robots." 60 | 61 | #: models.py:46 models.py:83 62 | msgid "allowed" 63 | msgstr "permis" 64 | 65 | #: models.py:47 66 | msgid "The URLs which are allowed to be accessed by bots." 67 | msgstr "Il est permis aux robots web d'accéder à ces URLs." 68 | 69 | #: models.py:51 models.py:87 70 | msgid "disallowed" 71 | msgstr "interdit" 72 | 73 | #: models.py:52 74 | msgid "The URLs which are not allowed to be accessed by bots." 75 | msgstr "Il n'est pas permis aux robots web d'accéder à ces URLs." 76 | 77 | #: models.py:55 78 | msgid "sites" 79 | msgstr "sites" 80 | 81 | #: models.py:57 82 | msgid "crawl delay" 83 | msgstr "délai de parcours du site" 84 | 85 | #: models.py:59 86 | msgid "" 87 | "Between 0.1 and 99.0. This field is supported by some search engines and " 88 | "defines the delay between successive crawler accesses in seconds. If the " 89 | "crawler rate is a problem for your server, you can set the delay up to 5 or " 90 | "10 or a comfortable value for your server, but it's suggested to start with " 91 | "small values (0.5-1), and increase as needed to an acceptable value for your" 92 | " server. Larger delay values add more delay between successive crawl " 93 | "accesses and decrease the maximum crawl rate to your web server." 94 | msgstr "Entre 0.1 et 99.0. Ce champ est reconnu par certains moteur de recherche comme le délai entre 2 accès du robot. Si la vitesse d'exploration du robot est un problème pour vous, vous pouvez configurer cette valeur entre 5 ou 10, ou toute autre valeur acceptable par votre serveur, mais il est suggéré que vous commenciez par une petite valeur (0.5 ou 1) et de l'augmenter si besoin. Un long délai entre chaque passage du robot augmente le délai entre chaque passage et diminue la vitesse d'exploration maximale du robot sur votre serveur web." 95 | 96 | #: models.py:75 97 | msgid "rule" 98 | msgstr "règle" 99 | 100 | #: models.py:76 101 | msgid "rules" 102 | msgstr "règles" 103 | 104 | #: models.py:82 models.py:86 105 | msgid "and" 106 | msgstr "et" 107 | -------------------------------------------------------------------------------- /src/robots/locale/es_ES/LC_MESSAGES/django.po: -------------------------------------------------------------------------------- 1 | # SOME DESCRIPTIVE TITLE. 2 | # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER 3 | # This file is distributed under the same license as the PACKAGE package. 4 | # 5 | # Translators: 6 | # javisantana , 2011 7 | # strel, 2013 8 | msgid "" 9 | msgstr "" 10 | "Project-Id-Version: django-robots\n" 11 | "Report-Msgid-Bugs-To: \n" 12 | "POT-Creation-Date: 2011-02-08 12:09+0100\n" 13 | "PO-Revision-Date: 2013-11-20 09:28+0000\n" 14 | "Last-Translator: Jannis Leidel \n" 15 | "Language-Team: Spanish (Spain) (http://www.transifex.com/projects/p/django-robots/language/es_ES/)\n" 16 | "MIME-Version: 1.0\n" 17 | "Content-Type: text/plain; charset=UTF-8\n" 18 | "Content-Transfer-Encoding: 8bit\n" 19 | "Language: es_ES\n" 20 | "Plural-Forms: nplurals=2; plural=(n != 1);\n" 21 | 22 | #: admin.py:11 23 | msgid "URL patterns" 24 | msgstr "Patrones de URL" 25 | 26 | #: admin.py:12 27 | msgid "Advanced options" 28 | msgstr "Opciones avanzadas" 29 | 30 | #: forms.py:12 31 | msgid "Please specify at least one allowed or dissallowed URL." 32 | msgstr "Por favor, especifique al menos una URL permitida o denegada" 33 | 34 | #: models.py:11 35 | msgid "pattern" 36 | msgstr "patrón" 37 | 38 | #: models.py:12 39 | msgid "" 40 | "Case-sensitive. A missing trailing slash does also match to files which " 41 | "start with the name of the pattern, e.g., '/admin' matches /admin.html too. " 42 | "Some major search engines allow an asterisk (*) as a wildcard and a dollar " 43 | "sign ($) to match the end of the URL, e.g., '/*.jpg$'." 44 | msgstr "Distingue mayúsculas y minúsculas. Sin una barra al final del patrón, los archivos que comiencen con la cadena del patrón también encajarán con este, ej.: '/admin' también encaja con '/admin.html'. Algunos de los grandes motores de búsqueda permiten usar un asterisco (*) como comodín, y un signo de dolar ($) para marcar el final de la URL, ej.: '/*.jpg$'." 45 | 46 | #: models.py:19 models.py:20 47 | msgid "url" 48 | msgstr "url" 49 | 50 | #: models.py:37 51 | msgid "robot" 52 | msgstr "robot" 53 | 54 | #: models.py:38 55 | msgid "" 56 | "This should be a user agent string like 'Googlebot'. Enter an asterisk (*) " 57 | "for all user agents. For a full list look at the database of Web Robots." 59 | msgstr "Aquí debe ir una cadena de agente de usuario (user agent) de un robot/araña web como 'Googlebot'. Introduzca un asterisco (*) para todos los agentes de usuario de robots. Para una lista completa mire en la base de datos de robots web" 60 | 61 | #: models.py:46 models.py:83 62 | msgid "allowed" 63 | msgstr "permitido" 64 | 65 | #: models.py:47 66 | msgid "The URLs which are allowed to be accessed by bots." 67 | msgstr "Las URLs a las que se permite que accedan las arañas web." 68 | 69 | #: models.py:51 models.py:87 70 | msgid "disallowed" 71 | msgstr "denegado" 72 | 73 | #: models.py:52 74 | msgid "The URLs which are not allowed to be accessed by bots." 75 | msgstr "Las URL a las que no se permite que accedan las arañas web." 76 | 77 | #: models.py:55 78 | msgid "sites" 79 | msgstr "sitios" 80 | 81 | #: models.py:57 82 | msgid "crawl delay" 83 | msgstr "intervalo entre chequeos de los robots" 84 | 85 | #: models.py:59 86 | msgid "" 87 | "Between 0.1 and 99.0. This field is supported by some search engines and " 88 | "defines the delay between successive crawler accesses in seconds. If the " 89 | "crawler rate is a problem for your server, you can set the delay up to 5 or " 90 | "10 or a comfortable value for your server, but it's suggested to start with " 91 | "small values (0.5-1), and increase as needed to an acceptable value for your" 92 | " server. Larger delay values add more delay between successive crawl " 93 | "accesses and decrease the maximum crawl rate to your web server." 94 | msgstr "Valor entre 0.1 y 99.0. Este campo lo soportan algunos motores de búsqueda y define el tiempo en segundos, entre los sucesivos accesos de las arañas web. Si el ritmo de acceso es un problema para el servidor, puede poner un valor mayor de 5 ó 10 o cualquiera que sea adecuado, pero se recomienda empezar con un valor pequeño, (0.5-1) para después incrementarlo hasta un valor aceptable para el servidor. Un tiempo mayor añade retraso entre las peticiones sucesivas de la araña y decrementa el valor máximo de peticiones por segundo al servidor web." 95 | 96 | #: models.py:75 97 | msgid "rule" 98 | msgstr "regla" 99 | 100 | #: models.py:76 101 | msgid "rules" 102 | msgstr "reglas" 103 | 104 | #: models.py:82 models.py:86 105 | msgid "and" 106 | msgstr "y" 107 | -------------------------------------------------------------------------------- /src/robots/locale/ru/LC_MESSAGES/django.po: -------------------------------------------------------------------------------- 1 | # SOME DESCRIPTIVE TITLE. 2 | # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER 3 | # This file is distributed under the same license as the PACKAGE package. 4 | # 5 | # Translators: 6 | # alekam , 2011 7 | # Jannis Leidel , 2011 8 | msgid "" 9 | msgstr "" 10 | "Project-Id-Version: django-robots\n" 11 | "Report-Msgid-Bugs-To: \n" 12 | "POT-Creation-Date: 2011-02-08 12:09+0100\n" 13 | "PO-Revision-Date: 2013-11-20 09:28+0000\n" 14 | "Last-Translator: Jannis Leidel \n" 15 | "Language-Team: Russian (http://www.transifex.com/projects/p/django-robots/language/ru/)\n" 16 | "MIME-Version: 1.0\n" 17 | "Content-Type: text/plain; charset=UTF-8\n" 18 | "Content-Transfer-Encoding: 8bit\n" 19 | "Language: ru\n" 20 | "Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n" 21 | 22 | #: admin.py:11 23 | msgid "URL patterns" 24 | msgstr "Шаблоны URL" 25 | 26 | #: admin.py:12 27 | msgid "Advanced options" 28 | msgstr "Расширенные настройки" 29 | 30 | #: forms.py:12 31 | msgid "Please specify at least one allowed or dissallowed URL." 32 | msgstr "Укажите как минимум один URL-адрес (неважно разрешенный или нет)" 33 | 34 | #: models.py:11 35 | msgid "pattern" 36 | msgstr "шаблон" 37 | 38 | #: models.py:12 39 | msgid "" 40 | "Case-sensitive. A missing trailing slash does also match to files which " 41 | "start with the name of the pattern, e.g., '/admin' matches /admin.html too. " 42 | "Some major search engines allow an asterisk (*) as a wildcard and a dollar " 43 | "sign ($) to match the end of the URL, e.g., '/*.jpg$'." 44 | msgstr "Внимание: учитывается регистр!
Если в конце пропущен слэш, то под шаблон попадут все файлы, путь к которым начинается с таких же символов. Например: под шаблон \"/admin\" так же попадет \"/admin.html\".
Часть поисковых систем понимают звездочку (*) как произвольное количество любых символов и знак доллара ($) как символ конца URL. Например: \"/*.jpg$\"" 45 | 46 | #: models.py:19 models.py:20 47 | msgid "url" 48 | msgstr "URL-адрес" 49 | 50 | #: models.py:37 51 | msgid "robot" 52 | msgstr "робот" 53 | 54 | #: models.py:38 55 | msgid "" 56 | "This should be a user agent string like 'Googlebot'. Enter an asterisk (*) " 57 | "for all user agents. For a full list look at the database of Web Robots." 59 | msgstr "Название робота (User agent). Введите звездочку (*) для применения правил ко всем роботов. Полный список можно посмотреть в базе данных веб-ботов." 60 | 61 | #: models.py:46 models.py:83 62 | msgid "allowed" 63 | msgstr "разрешенные URL" 64 | 65 | #: models.py:47 66 | msgid "The URLs which are allowed to be accessed by bots." 67 | msgstr "URL адреса разрешенные для индексации поисковыми роботами." 68 | 69 | #: models.py:51 models.py:87 70 | msgid "disallowed" 71 | msgstr "запрещенные URL" 72 | 73 | #: models.py:52 74 | msgid "The URLs which are not allowed to be accessed by bots." 75 | msgstr "URL-адреса запрещенные для индексации поисковыми роботами." 76 | 77 | #: models.py:55 78 | msgid "sites" 79 | msgstr "" 80 | 81 | #: models.py:57 82 | msgid "crawl delay" 83 | msgstr "частота обновления" 84 | 85 | #: models.py:59 86 | msgid "" 87 | "Between 0.1 and 99.0. This field is supported by some search engines and " 88 | "defines the delay between successive crawler accesses in seconds. If the " 89 | "crawler rate is a problem for your server, you can set the delay up to 5 or " 90 | "10 or a comfortable value for your server, but it's suggested to start with " 91 | "small values (0.5-1), and increase as needed to an acceptable value for your" 92 | " server. Larger delay values add more delay between successive crawl " 93 | "accesses and decrease the maximum crawl rate to your web server." 94 | msgstr "Введите значение между 0.1 и 99.0. Этот параметр поддерживается некоторыми поисковыми системами и определяет задержку в секундах до следующего запроса робота. Если робот обнаруживает проблемы на вашем сервер, вы можите установить задержку от 5, 10 или более подходяще значение для вашего сервера, но лучше начинать с небольших значений (0.5-1), и постепенно увеличивать до достижения оптимальных значений для вашего сервера. Большие значения увеличивают время выгрузки роботом вашего сайта и могут отрицательно влиять на ваш рейтинг в данной системе." 95 | 96 | #: models.py:75 97 | msgid "rule" 98 | msgstr "правило индексации" 99 | 100 | #: models.py:76 101 | msgid "rules" 102 | msgstr "правила индексации" 103 | 104 | #: models.py:82 models.py:86 105 | msgid "and" 106 | msgstr "и" 107 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. linkcheck to check all external links for integrity 37 | echo. doctest to run all doctests embedded in the documentation if enabled 38 | goto end 39 | ) 40 | 41 | if "%1" == "clean" ( 42 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 43 | del /q /s %BUILDDIR%\* 44 | goto end 45 | ) 46 | 47 | if "%1" == "html" ( 48 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 49 | if errorlevel 1 exit /b 1 50 | echo. 51 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 52 | goto end 53 | ) 54 | 55 | if "%1" == "dirhtml" ( 56 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 57 | if errorlevel 1 exit /b 1 58 | echo. 59 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 60 | goto end 61 | ) 62 | 63 | if "%1" == "singlehtml" ( 64 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 68 | goto end 69 | ) 70 | 71 | if "%1" == "pickle" ( 72 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished; now you can process the pickle files. 76 | goto end 77 | ) 78 | 79 | if "%1" == "json" ( 80 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished; now you can process the JSON files. 84 | goto end 85 | ) 86 | 87 | if "%1" == "htmlhelp" ( 88 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can run HTML Help Workshop with the ^ 92 | .hhp project file in %BUILDDIR%/htmlhelp. 93 | goto end 94 | ) 95 | 96 | if "%1" == "qthelp" ( 97 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 98 | if errorlevel 1 exit /b 1 99 | echo. 100 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 101 | .qhcp project file in %BUILDDIR%/qthelp, like this: 102 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\django-robots.qhcp 103 | echo.To view the help file: 104 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\django-robots.ghc 105 | goto end 106 | ) 107 | 108 | if "%1" == "devhelp" ( 109 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 110 | if errorlevel 1 exit /b 1 111 | echo. 112 | echo.Build finished. 113 | goto end 114 | ) 115 | 116 | if "%1" == "epub" ( 117 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 118 | if errorlevel 1 exit /b 1 119 | echo. 120 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 121 | goto end 122 | ) 123 | 124 | if "%1" == "latex" ( 125 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 129 | goto end 130 | ) 131 | 132 | if "%1" == "text" ( 133 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The text files are in %BUILDDIR%/text. 137 | goto end 138 | ) 139 | 140 | if "%1" == "man" ( 141 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 145 | goto end 146 | ) 147 | 148 | if "%1" == "texinfo" ( 149 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 150 | if errorlevel 1 exit /b 1 151 | echo. 152 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 153 | goto end 154 | ) 155 | 156 | if "%1" == "gettext" ( 157 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 158 | if errorlevel 1 exit /b 1 159 | echo. 160 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 161 | goto end 162 | ) 163 | 164 | if "%1" == "changes" ( 165 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 166 | if errorlevel 1 exit /b 1 167 | echo. 168 | echo.The overview file is in %BUILDDIR%/changes. 169 | goto end 170 | ) 171 | 172 | if "%1" == "linkcheck" ( 173 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 174 | if errorlevel 1 exit /b 1 175 | echo. 176 | echo.Link check complete; look for any errors in the above output ^ 177 | or in %BUILDDIR%/linkcheck/output.txt. 178 | goto end 179 | ) 180 | 181 | if "%1" == "doctest" ( 182 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 183 | if errorlevel 1 exit /b 1 184 | echo. 185 | echo.Testing of doctests in the sources finished, look at the ^ 186 | results in %BUILDDIR%/doctest/output.txt. 187 | goto end 188 | ) 189 | 190 | :end 191 | -------------------------------------------------------------------------------- /CHANGES.rst: -------------------------------------------------------------------------------- 1 | CHANGES 2 | ======= 3 | 4 | master (unreleased) 5 | ------------------- 6 | *This is where upcoming changes will be.* 7 | 8 | Breaking change: ``Url.pattern`` now uses ``MaxLengthValidator`` 9 | """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" 10 | 11 | - ``Url.pattern`` now uses ``MaxLengthValidator`` to account for trailing 12 | slash. Thank you @PetrDlouhy in #133 for #132. 13 | 14 | See also: https://docs.djangoproject.com/en/4.2/ref/validators/#maxlengthvalidator 15 | 16 | 6.1 (2023-09-07) 17 | ---------------- 18 | 19 | Compatibility 20 | """"""""""""" 21 | 22 | - Fix warning make in regards to ``pkg_resources`` in Python 3.11+, (#146, 23 | thank you @blag) 24 | 25 | Maintenance 26 | """"""""""" 27 | 28 | - Fix typos via `typos`_ 29 | 30 | - Special thanks to typo fix from @apoorvaeternity (Apoorva Pandey) in 31 | #145 32 | 33 | .. _typos: https://github.com/crate-ci/typos 34 | 35 | CI updates: 36 | 37 | - Python: Support Python 3.11 and 3.12 in test matrix (#146, thank you @blag) 38 | - Django: Support Django 4.1 and 4.2 in test matrix (#146, thank you @blag) 39 | 40 | 6.0 (2023-09-07) 41 | ---------------- 42 | 43 | Breaking change: Missing ``DEFAULT_AUTO_FIELD`` 44 | """"""""""""""""""""""""""""""""""""""""""""""" 45 | This sets a ``DEFAULT_AUTO_FIELD`` to ``BigAutoField`` for robots in ``AppConfig``. Previously, 46 | there was no app configuration or setting for this. (#134, thank you @jan-szejko-steelseries) 47 | 48 | Robots 5.0 included a ``BigAutoField`` migration file for django 3.2 support (see #112), 49 | however, this caused an extra migration to be created to set ``django.db.models.AutoField`` 50 | for certain users (see #124). 51 | 52 | **Breaking: Migration issues** 53 | 54 | New django sites should work fine. 55 | 56 | *Existing sites that manually created migrations for robots 5.0:* 57 | 58 | If you ran ``./manage.py makemigrations`` to bypass the #112 issue, you may need to resolve 59 | this manually if a custom migration you made for robots was applied in ``django_migrations``. 60 | 61 | Resources: 62 | 63 | - https://docs.djangoproject.com/en/4.1/releases/3.2/#customizing-type-of-auto-created-primary-keys 64 | - https://docs.djangoproject.com/en/3.2/ref/settings/#std-setting-DEFAULT_AUTO_FIELD 65 | 66 | Maintenance 67 | """"""""""" 68 | - Remove django-south migrations folder (#138) 69 | 70 | 5.0 (2022-01-08) 71 | ---------------- 72 | - Dropped support for Python 2.7, 3.5, and 3.6 73 | - Added support for Python 3.9 74 | - Added support for Python 3.10 (#113), thank you @Andrew-Chen-Wang 75 | - Dropped support for Django versions 2.1 and below 76 | - Dropped support for Django 3.0 77 | - Added support for Django 3.1 78 | - Added support for Django 3.2 79 | - Added support for Django 4.0 (#112), thank you @umarmughal824 80 | - Improve documentation for Django 4.0 (#119), thank you @sergioisidoro 81 | - Sync files with @jazzband/.github 82 | - Add *.pre-commit-config.yaml* and run autofixes (#122) 83 | 84 | 4.0 (2020-01-04) 85 | ----------------- 86 | 87 | - Support for Django 2.1, 2.2, 3.0 and Python 3.7 and 3.8. 88 | - Updated the requirements for dropped support of six in Django 3. 89 | - Restructure test setup to use setuptools-scm and more modern Python 90 | patterns. 91 | 92 | 3.1.0 (2017-12-11) 93 | ------------------ 94 | 95 | - Add this changelog file 96 | - Support for Django 2.0 via GH-83, fixes GH-81, GH-79 97 | - Drop support for Django 1.10 and below. For older Django versions, 98 | use django-robots 3.0.0 and below 99 | - Fix docs to include README in index instead of duplicating 100 | 101 | 3.0 (2017-02-28) 102 | ---------------- 103 | 104 | - Dropped support for Django < 1.8 105 | - Added support for Django 1.10 / 1.11 106 | - Improved admin changeform 107 | - Added support for protocol prefix to Host directive 108 | - Added support for sitemap named views (for non standard sitemap views) 109 | - Fixed an error which resulted in doubling the scheme for sitemap 110 | - Fixed support for cached sitemaps 111 | 112 | 2.0 (2016-02-28) 113 | ---------------- 114 | 115 | - Dropped support for Django 1.5 116 | - Added support for Django 1.9 117 | - Improved code / metadata quality 118 | - Added Host directive 119 | - Added support to detect current site via http host var 120 | - Added filter_horizontal for for allowed and disallowed 121 | - Fixed error in which get_sitemap_urls modifies SITEMAP_URLS 122 | - Url patterns marked as safe in template 123 | - disabled localization of decimal fields in template 124 | 125 | 1.1 (2015-05-12) 126 | ---------------- 127 | 128 | - Fixed compatibility to Django 1.7 and 1.8. 129 | 130 | - Moved South migrations into different subdirectory so South>=1.0 is needed. 131 | 132 | 1.0 (2014-01-16) 133 | ---------------- 134 | 135 | - *BACKWARDS-INCOMPATIBLE* change: The default behaviour of this app has 136 | changed to **allow all bots** from the previous opposite behavior. 137 | 138 | - Fixed some backward compatibility issues. 139 | 140 | - Updated existing translations (Danish, German, French, 141 | Portuguese (Brasil), Russian). 142 | 143 | - Added Greek, Spanish (Spain), Japanese, Dutch, Slovak and Ukrainian 144 | translations. 145 | 146 | 0.9.2 (2013-03-24) 147 | ------------------ 148 | 149 | - Fixed compatibility with Django 1.5. Thanks, Russell Keith-Magee. 150 | 151 | 0.9.1 (2012-11-23) 152 | ------------------ 153 | 154 | - Fixed argument signature in new class based view. Thanks, mkai. 155 | 156 | 0.9 (2012-11-21) 157 | ---------------- 158 | 159 | - Deprecated ``ROBOTS_SITEMAP_URL`` setting. Use ``ROBOTS_SITEMAP_URLS`` 160 | instead. 161 | 162 | - Refactored ``rule_list`` view to be class based. django-robots now 163 | requires Django >= 1.3. 164 | 165 | - Stop returning 404 pages if there are no Rules setup on the site. Instead 166 | disallow access for all robots. 167 | 168 | - Added an initial South migration. If you're using South you have to "fake" 169 | the initial database migration:: 170 | 171 | python manage.py migrate --fake robots 0001 172 | 173 | - Added initial Sphinx docs. 174 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | # the i18n builder cannot share the environment and doctrees with the others 15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 16 | 17 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 18 | 19 | help: 20 | @echo "Please use \`make ' where is one of" 21 | @echo " html to make standalone HTML files" 22 | @echo " dirhtml to make HTML files named index.html in directories" 23 | @echo " singlehtml to make a single large HTML file" 24 | @echo " pickle to make pickle files" 25 | @echo " json to make JSON files" 26 | @echo " htmlhelp to make HTML files and a HTML help project" 27 | @echo " qthelp to make HTML files and a qthelp project" 28 | @echo " devhelp to make HTML files and a Devhelp project" 29 | @echo " epub to make an epub" 30 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 31 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 32 | @echo " text to make text files" 33 | @echo " man to make manual pages" 34 | @echo " texinfo to make Texinfo files" 35 | @echo " info to make Texinfo files and run them through makeinfo" 36 | @echo " gettext to make PO message catalogs" 37 | @echo " changes to make an overview of all changed/added/deprecated items" 38 | @echo " linkcheck to check all external links for integrity" 39 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 40 | 41 | clean: 42 | -rm -rf $(BUILDDIR)/* 43 | 44 | html: 45 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 46 | @echo 47 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 48 | 49 | dirhtml: 50 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 51 | @echo 52 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 53 | 54 | singlehtml: 55 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 56 | @echo 57 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 58 | 59 | pickle: 60 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 61 | @echo 62 | @echo "Build finished; now you can process the pickle files." 63 | 64 | json: 65 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 66 | @echo 67 | @echo "Build finished; now you can process the JSON files." 68 | 69 | htmlhelp: 70 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 71 | @echo 72 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 73 | ".hhp project file in $(BUILDDIR)/htmlhelp." 74 | 75 | qthelp: 76 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 77 | @echo 78 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 79 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 80 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/django-robots.qhcp" 81 | @echo "To view the help file:" 82 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/django-robots.qhc" 83 | 84 | devhelp: 85 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 86 | @echo 87 | @echo "Build finished." 88 | @echo "To view the help file:" 89 | @echo "# mkdir -p $$HOME/.local/share/devhelp/django-robots" 90 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/django-robots" 91 | @echo "# devhelp" 92 | 93 | epub: 94 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 95 | @echo 96 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 97 | 98 | latex: 99 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 100 | @echo 101 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 102 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 103 | "(use \`make latexpdf' here to do that automatically)." 104 | 105 | latexpdf: 106 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 107 | @echo "Running LaTeX files through pdflatex..." 108 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 109 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 110 | 111 | text: 112 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 113 | @echo 114 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 115 | 116 | man: 117 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 118 | @echo 119 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 120 | 121 | texinfo: 122 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 123 | @echo 124 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 125 | @echo "Run \`make' in that directory to run these through makeinfo" \ 126 | "(use \`make info' here to do that automatically)." 127 | 128 | info: 129 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 130 | @echo "Running Texinfo files through makeinfo..." 131 | make -C $(BUILDDIR)/texinfo info 132 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 133 | 134 | gettext: 135 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 136 | @echo 137 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 138 | 139 | changes: 140 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 141 | @echo 142 | @echo "The overview file is in $(BUILDDIR)/changes." 143 | 144 | linkcheck: 145 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 146 | @echo 147 | @echo "Link check complete; look for any errors in the above output " \ 148 | "or in $(BUILDDIR)/linkcheck/output.txt." 149 | 150 | doctest: 151 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 152 | @echo "Testing of doctests in the sources finished, look at the " \ 153 | "results in $(BUILDDIR)/doctest/output.txt." 154 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | ======================================= 2 | Robots exclusion application for Django 3 | ======================================= 4 | 5 | .. include:: ../README.rst 6 | 7 | Contents: 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | 12 | history 13 | 14 | 15 | Installation 16 | ============ 17 | 18 | Use your favorite Python installer to install it from PyPI:: 19 | 20 | pip install django-robots 21 | 22 | Or get the source from the application site at:: 23 | 24 | http://github.com/jazzband/django-robots/ 25 | 26 | To install the sitemap app, then follow these steps: 27 | 28 | 1. Add ``'robots'`` to your INSTALLED_APPS_ setting. 29 | 2. Make sure ``'django.template.loaders.app_directories.Loader'`` 30 | is in your TEMPLATES setting. It's in there by default, so 31 | you'll only need to change this if you've changed that setting. 32 | 3. Make sure you've installed the `sites framework`_. 33 | 4. Run the ``migrate`` management command 34 | 35 | .. _INSTALLED_APPS: http://docs.djangoproject.com/en/dev/ref/settings/#installed-apps 36 | .. _TEMPLATES: https://docs.djangoproject.com/en/dev/ref/settings/#templates 37 | .. _sites framework: http://docs.djangoproject.com/en/dev/ref/contrib/sites/ 38 | 39 | Sitemaps 40 | -------- 41 | 42 | By default a ``Sitemap`` statement is automatically added to the resulting 43 | robots.txt by reverse matching the URL of the installed `Sitemap contrib app`_. 44 | This is especially useful if you allow every robot to access your whole site, 45 | since it then gets URLs explicitly instead of searching every link. 46 | 47 | To change the default behaviour to omit the inclusion of a sitemap link, 48 | change the ``ROBOTS_USE_SITEMAP`` setting in your Django settings file to:: 49 | 50 | ROBOTS_USE_SITEMAP = False 51 | 52 | In case you want to use specific sitemap URLs instead of the one that is 53 | automatically discovered, change the ``ROBOTS_SITEMAP_URLS`` setting to:: 54 | 55 | ROBOTS_SITEMAP_URLS = [ 56 | 'http://www.example.com/sitemap.xml', 57 | ] 58 | 59 | If the sitemap is wrapped in a decorator, dotted path reverse to discover 60 | the sitemap URL does not work. 61 | To overcome this, provide a name to the sitemap instance in ``urls.py``:: 62 | 63 | urlpatterns = [ 64 | ... 65 | re_path(r'^sitemap.xml$', cache_page(60)(sitemap_view), {'sitemaps': [...]}, name='cached-sitemap'), 66 | ... 67 | ] 68 | 69 | and inform django-robots about the view name by adding the following setting:: 70 | 71 | ROBOTS_SITEMAP_VIEW_NAME = 'cached-sitemap' 72 | 73 | 74 | .. _Sitemap contrib app: http://docs.djangoproject.com/en/dev/ref/contrib/sitemaps/ 75 | 76 | Use ``ROBOTS_SITEMAP_VIEW_NAME`` also if you use custom sitemap views (e.g.: wagtail custom sitemaps). 77 | 78 | Initialization 79 | ============== 80 | 81 | To activate robots.txt generation on your Django site, add this line to your 82 | URLconf_:: 83 | 84 | re_path(r'^robots\.txt', include('robots.urls')), 85 | 86 | This tells Django to build a robots.txt when a robot accesses ``/robots.txt``. 87 | Then, please sync your database to create the necessary tables and create 88 | ``Rule`` objects in the admin interface or via the shell. 89 | 90 | .. _URLconf: http://docs.djangoproject.com/en/dev/topics/http/urls/ 91 | .. _sync your database: http://docs.djangoproject.com/en/dev/ref/django-admin/#syncdb 92 | 93 | Rules 94 | ===== 95 | 96 | ``Rule`` - defines an abstract rule which is used to respond to crawling web 97 | robots, using the `robots exclusion protocol`_, a.k.a. robots.txt. 98 | 99 | You can link multiple URL pattern to allows or disallows the robot identified 100 | by its user agent to access the given URLs. 101 | 102 | The crawl delay field is supported by some search engines and defines the 103 | delay between successive crawler accesses in seconds. If the crawler rate is a 104 | problem for your server, you can set the delay up to 5 or 10 or a comfortable 105 | value for your server, but it's suggested to start with small values (0.5-1), 106 | and increase as needed to an acceptable value for your server. Larger delay 107 | values add more delay between successive crawl accesses and decrease the 108 | maximum crawl rate to your web server. 109 | 110 | The `sites framework`_ is used to enable multiple robots.txt per Django instance. 111 | If no rule exists it automatically allows every web robot access to every URL. 112 | 113 | Please have a look at the `database of web robots`_ for a full list of 114 | existing web robots user agent strings. 115 | 116 | .. _robots exclusion protocol: http://en.wikipedia.org/wiki/Robots_exclusion_standard 117 | .. _'sites' framework: http://www.djangoproject.com/documentation/sites/ 118 | .. _database of web robots: http://www.robotstxt.org/db.html 119 | 120 | Host directive 121 | ============== 122 | By default a ``Host`` statement is automatically added to the resulting 123 | robots.txt to avoid mirrors and select the main website properly. 124 | 125 | To change the default behaviour to omit the inclusion of host directive, 126 | change the ``ROBOTS_USE_HOST`` setting in your Django settings file to:: 127 | 128 | ROBOTS_USE_HOST = False 129 | 130 | if you want to prefix the domain with the current request protocol 131 | (**http** or **https** as in ``Host: https://www.mysite.com``) add this setting:: 132 | 133 | ROBOTS_USE_SCHEME_IN_HOST = True 134 | 135 | URLs 136 | ==== 137 | 138 | ``Url`` - defines a case-sensitive and exact URL pattern which is used to 139 | allow or disallow the access for web robots. Case-sensitive. 140 | 141 | A missing trailing slash does also match files which start with the name of 142 | the given pattern, e.g., ``'/admin'`` matches ``/admin.html`` too. 143 | 144 | Some major search engines allow an asterisk (``*``) as a wildcard to match any 145 | sequence of characters and a dollar sign (``$``) to match the end of the URL, 146 | e.g., ``'/*.jpg$'`` can be used to match all jpeg files. 147 | 148 | Caching 149 | ======= 150 | 151 | You can optionally cache the generation of the ``robots.txt``. Add or change 152 | the ``ROBOTS_CACHE_TIMEOUT`` setting with a value in seconds in your Django 153 | settings file:: 154 | 155 | ROBOTS_CACHE_TIMEOUT = 60*60*24 156 | 157 | This tells Django to cache the ``robots.txt`` for 24 hours (86400 seconds). 158 | The default value is ``None`` (no caching). 159 | 160 | Developing 161 | ========== 162 | 163 | To run tests, create a virtualenv: 164 | 165 | virtualenv --python 3.10 .venv 166 | 167 | Source it: 168 | 169 | . .venv/bin/activate 170 | 171 | # or if you use source 172 | source .venv/bin/activate 173 | 174 | Install: 175 | 176 | pip install -e . 177 | pip install -r tests/requirements.txt 178 | pip install django 179 | 180 | Run the tests: 181 | 182 | env PYTHONPATH=. DJANGO_SETTINGS_MODULE=tests.settings django-admin test robots -v2 183 | 184 | Bugs and feature requests 185 | ========================= 186 | 187 | As always your mileage may vary, so please don't hesitate to send feature 188 | requests and bug reports: 189 | 190 | https://github.com/jazzband/django-robots/issues 191 | -------------------------------------------------------------------------------- /src/robots/tests.py: -------------------------------------------------------------------------------- 1 | from io import StringIO 2 | 3 | from django.contrib.auth import SESSION_KEY 4 | from django.contrib.auth.models import AnonymousUser 5 | from django.contrib.sites.models import Site 6 | from django.core.exceptions import ValidationError 7 | from django.http import SimpleCookie 8 | from django.test import RequestFactory, TestCase 9 | from django.utils.encoding import force_str 10 | 11 | from robots.models import Rule, Url 12 | from robots.views import RuleList 13 | 14 | 15 | class ViewTest(TestCase): 16 | @classmethod 17 | def setUpClass(cls): 18 | super().setUpClass() 19 | cls.request_factory = RequestFactory() 20 | 21 | def get_request(self, path, user, lang, secure=False): 22 | from django.contrib.auth.models import AnonymousUser 23 | 24 | request = self.request_factory.get(path, secure=secure) 25 | 26 | if not user: 27 | user = AnonymousUser() 28 | request.user = user 29 | request._cached_user = user 30 | request.session = {} 31 | if secure: 32 | request.environ["SERVER_PORT"] = "443" 33 | request.environ["wsgi.url_scheme"] = "https" 34 | if user.is_authenticated: 35 | request.session[SESSION_KEY] = user._meta.pk.value_to_string(user) 36 | request.cookies = SimpleCookie() 37 | request.errors = StringIO() 38 | request.LANGUAGE_CODE = lang 39 | if request.method == "POST": 40 | request._dont_enforce_csrf_checks = True 41 | return request 42 | 43 | def setUp(self): 44 | super().setUp() 45 | site_1 = Site.objects.get(domain="example.com") 46 | site_2 = Site.objects.create(domain="https://sub.example.com") 47 | 48 | url_admin = Url.objects.create(pattern="/admin") 49 | url_root = Url.objects.create(pattern="/") 50 | url_media = Url.objects.create(pattern="/media") 51 | 52 | rule_all = Rule.objects.create(robot="*", crawl_delay=10) 53 | rule_1 = Rule.objects.create(robot="Bing", crawl_delay=20) 54 | rule_2 = Rule.objects.create(robot="Googlebot") 55 | 56 | rule_all.allowed.add(url_root) 57 | for url in [url_admin, url_media]: 58 | rule_all.disallowed.add(url) 59 | for site in [site_1, site_2]: 60 | rule_all.sites.add(site) 61 | 62 | rule_1.allowed.add(url_root) 63 | rule_1.disallowed.add(url_admin) 64 | rule_1.sites.add(site_1) 65 | 66 | rule_2.disallowed.add(url_media) 67 | rule_2.sites.add(site_2) 68 | 69 | def _test_stanzas(self, stanzas): 70 | for stanza in stanzas: 71 | if stanza.startswith("User-agent: *"): 72 | self.assertTrue("Allow: /" in stanza) 73 | self.assertTrue("Disallow: /admin" in stanza) 74 | self.assertTrue("Disallow: /media" in stanza) 75 | self.assertTrue("Crawl-delay: 10" in stanza) 76 | elif stanza.startswith("User-agent: Bing"): 77 | self.assertTrue("Allow: /" in stanza) 78 | self.assertTrue("Disallow: /admin" in stanza) 79 | self.assertFalse("Disallow: /media" in stanza) 80 | self.assertFalse("Crawl-delay: 10" in stanza) 81 | self.assertTrue("Crawl-delay: 20" in stanza) 82 | elif stanza.startswith("User-agent: Googlebot"): 83 | self.assertFalse("Allow: /" in stanza) 84 | self.assertFalse("Disallow: /admin" in stanza) 85 | self.assertTrue("Disallow: /media" in stanza) 86 | self.assertFalse("Crawl-delay: 10" in stanza) 87 | self.assertFalse("Crawl-delay: 20" in stanza) 88 | self.assertFalse("Crawl-delay" in stanza) 89 | 90 | def test_view_site_1(self): 91 | request = self.get_request(path="/", user=AnonymousUser(), lang="en") 92 | 93 | view_obj = RuleList() 94 | view_obj.request = request 95 | view_obj.current_site = view_obj.get_current_site(request) 96 | view_obj.object_list = view_obj.get_queryset() 97 | context = view_obj.get_context_data(object_list=view_obj.object_list) 98 | self.assertEqual(context["object_list"].count(), 2) 99 | self.assertTrue(context["object_list"].filter(robot="*").exists()) 100 | self.assertTrue(context["object_list"].filter(robot="Bing").exists()) 101 | 102 | response = view_obj.render_to_response(context) 103 | response.render() 104 | content = force_str(response.content) 105 | self.assertTrue("Sitemap: http://example.com/sitemap.xml" in content) 106 | stanzas = content.split("\n\n") 107 | self._test_stanzas(stanzas) 108 | 109 | def test_view_site_2(self): 110 | request = self.get_request(path="/", user=AnonymousUser(), lang="en") 111 | 112 | view_obj = RuleList() 113 | view_obj.request = request 114 | view_obj.current_site = Site.objects.get(pk=2) 115 | view_obj.object_list = view_obj.get_queryset() 116 | context = view_obj.get_context_data(object_list=view_obj.object_list) 117 | self.assertEqual(context["object_list"].count(), 2) 118 | self.assertTrue(context["object_list"].filter(robot="*").exists()) 119 | self.assertTrue(context["object_list"].filter(robot="Googlebot").exists()) 120 | 121 | response = view_obj.render_to_response(context) 122 | response.render() 123 | content = force_str(response.content) 124 | self.assertTrue("Sitemap: https://sub.example.com/sitemap.xml" in content) 125 | self.assertTrue("Host: https://sub.example.com" in content) 126 | stanzas = content.split("\n\n") 127 | self._test_stanzas(stanzas) 128 | 129 | def test_use_scheme_in_host_setting(self): 130 | request = self.get_request(path="/", user=AnonymousUser(), lang="en") 131 | 132 | view_obj = RuleList() 133 | view_obj.request = request 134 | view_obj.current_site = Site.objects.get(pk=1) 135 | view_obj.object_list = view_obj.get_queryset() 136 | 137 | with self.settings(ROBOTS_USE_HOST=True): 138 | with self.settings(ROBOTS_USE_SCHEME_IN_HOST=True): 139 | context = view_obj.get_context_data(object_list=view_obj.object_list) 140 | response = view_obj.render_to_response(context) 141 | response.render() 142 | content = force_str(response.content) 143 | self.assertTrue("Host: http://example.com" in content) 144 | with self.settings(ROBOTS_USE_SCHEME_IN_HOST=False): 145 | context = view_obj.get_context_data(object_list=view_obj.object_list) 146 | response = view_obj.render_to_response(context) 147 | response.render() 148 | content = force_str(response.content) 149 | self.assertTrue("Host: example.com" in content) 150 | 151 | with self.settings(ROBOTS_USE_HOST=False): 152 | context = view_obj.get_context_data(object_list=view_obj.object_list) 153 | response = view_obj.render_to_response(context) 154 | response.render() 155 | content = force_str(response.content) 156 | self.assertFalse("Host: example.com" in content) 157 | 158 | def test_cached_sitemap(self): 159 | request = self.get_request(path="/", user=AnonymousUser(), lang="en") 160 | 161 | view_obj = RuleList() 162 | view_obj.request = request 163 | view_obj.current_site = Site.objects.get(pk=1) 164 | view_obj.object_list = view_obj.get_queryset() 165 | context = view_obj.get_context_data(object_list=view_obj.object_list) 166 | response = view_obj.render_to_response(context) 167 | response.render() 168 | content = force_str(response.content) 169 | self.assertTrue("Sitemap: http://example.com/sitemap.xml" in content) 170 | 171 | with self.settings(ROBOTS_SITEMAP_VIEW_NAME="cached-sitemap"): 172 | context = view_obj.get_context_data(object_list=view_obj.object_list) 173 | response = view_obj.render_to_response(context) 174 | response.render() 175 | content = force_str(response.content) 176 | self.assertTrue("Sitemap: http://example.com/other/sitemap.xml" in content) 177 | 178 | 179 | class UrlModelTests(TestCase): 180 | def test_str(self): 181 | url = Url.objects.create(pattern="/") 182 | self.assertEqual(str(url), "/") 183 | 184 | def test_pattern(self): 185 | url = Url.objects.create(pattern="foo") 186 | self.assertEqual(url.pattern, "/foo") 187 | 188 | def test_pattern_slash(self): 189 | url = Url.objects.create(pattern="/foo") 190 | self.assertEqual(url.pattern, "/foo") 191 | 192 | def test_pattern_too_long(self): 193 | """Patern 256 chars long should not throw field too long error""" 194 | with self.assertRaises(ValidationError): 195 | r = Url(pattern="a" * 255) 196 | r.full_clean() 197 | r.save() 198 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # django-robots documentation build configuration file, created by 4 | # sphinx-quickstart on Wed Nov 21 11:54:26 2012. 5 | # 6 | # This file is execfile()d with the current directory set to its containing dir. 7 | # 8 | # Note that not all possible configuration values are present in this 9 | # autogenerated file. 10 | # 11 | # All configuration values have a default; values that are commented out 12 | # serve to show the default. 13 | import os 14 | import sys 15 | 16 | from pkg_resources import get_distribution 17 | 18 | # If extensions (or modules to document with autodoc) are in another directory, 19 | # add these directories to sys.path here. If the directory is relative to the 20 | # documentation root, use os.path.abspath to make it absolute, like shown here. 21 | sys.path.insert(0, os.path.abspath(os.path.join("..", "src"))) 22 | 23 | # -- General configuration ----------------------------------------------------- 24 | 25 | # If your documentation needs a minimal Sphinx version, state it here. 26 | # needs_sphinx = '1.0' 27 | 28 | # Add any Sphinx extension module names here, as strings. They can be extensions 29 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 30 | extensions = [] 31 | 32 | # Add any paths that contain templates here, relative to this directory. 33 | templates_path = ["_templates"] 34 | 35 | # The suffix of source filenames. 36 | source_suffix = ".rst" 37 | 38 | # The encoding of source files. 39 | # source_encoding = 'utf-8-sig' 40 | 41 | # The master toctree document. 42 | master_doc = "index" 43 | 44 | # General information about the project. 45 | project = "django-robots" 46 | copyright = "2008-, Jannis Leidel" 47 | 48 | # The version info for the project you're documenting, acts as replacement for 49 | # |version| and |release|, also used in various other places throughout the 50 | # built documents. 51 | # 52 | # The full version, including alpha/beta/rc tags. 53 | release = get_distribution("django-robots").version 54 | 55 | # The short X.Y version. 56 | version = ".".join(release.split(".")[:2]) 57 | 58 | # The language for content autogenerated by Sphinx. Refer to documentation 59 | # for a list of supported languages. 60 | # language = None 61 | 62 | # There are two options for replacing |today|: either, you set today to some 63 | # non-false value, then it is used: 64 | # today = '' 65 | # Else, today_fmt is used as the format for a strftime call. 66 | # today_fmt = '%B %d, %Y' 67 | 68 | # List of patterns, relative to source directory, that match files and 69 | # directories to ignore when looking for source files. 70 | exclude_patterns = ["_build"] 71 | 72 | # The reST default role (used for this markup: `text`) to use for all documents. 73 | # default_role = None 74 | 75 | # If true, '()' will be appended to :func: etc. cross-reference text. 76 | # add_function_parentheses = True 77 | 78 | # If true, the current module name will be prepended to all description 79 | # unit titles (such as .. function::). 80 | # add_module_names = True 81 | 82 | # If true, sectionauthor and moduleauthor directives will be shown in the 83 | # output. They are ignored by default. 84 | # show_authors = False 85 | 86 | # The name of the Pygments (syntax highlighting) style to use. 87 | pygments_style = "sphinx" 88 | 89 | # A list of ignored prefixes for module index sorting. 90 | # modindex_common_prefix = [] 91 | 92 | 93 | # -- Options for HTML output --------------------------------------------------- 94 | 95 | # The theme to use for HTML and HTML Help pages. See the documentation for 96 | # a list of builtin themes. 97 | # html_theme = 'sphinx_rtd_theme' 98 | 99 | # Theme options are theme-specific and customize the look and feel of a theme 100 | # further. For a list of options available for each theme, see the 101 | # documentation. 102 | # html_theme_options = {} 103 | 104 | # Add any paths that contain custom themes here, relative to this directory. 105 | # html_theme_path = [] 106 | 107 | # The name for this set of Sphinx documents. If None, it defaults to 108 | # " v documentation". 109 | # html_title = None 110 | 111 | # A shorter title for the navigation bar. Default is the same as html_title. 112 | # html_short_title = None 113 | 114 | # The name of an image file (relative to this directory) to place at the top 115 | # of the sidebar. 116 | # html_logo = None 117 | 118 | # The name of an image file (within the static path) to use as favicon of the 119 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 120 | # pixels large. 121 | # html_favicon = None 122 | 123 | # Add any paths that contain custom static files (such as style sheets) here, 124 | # relative to this directory. They are copied after the builtin static files, 125 | # so a file named "default.css" will overwrite the builtin "default.css". 126 | html_static_path = ["_static"] 127 | 128 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 129 | # using the given strftime format. 130 | # html_last_updated_fmt = '%b %d, %Y' 131 | 132 | # If true, SmartyPants will be used to convert quotes and dashes to 133 | # typographically correct entities. 134 | # html_use_smartypants = True 135 | 136 | # Custom sidebar templates, maps document names to template names. 137 | # html_sidebars = {} 138 | 139 | # Additional templates that should be rendered to pages, maps page names to 140 | # template names. 141 | # html_additional_pages = {} 142 | 143 | # If false, no module index is generated. 144 | # html_domain_indices = True 145 | 146 | # If false, no index is generated. 147 | # html_use_index = True 148 | 149 | # If true, the index is split into individual pages for each letter. 150 | # html_split_index = False 151 | 152 | # If true, links to the reST sources are added to the pages. 153 | # html_show_sourcelink = True 154 | 155 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 156 | # html_show_sphinx = True 157 | 158 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 159 | # html_show_copyright = True 160 | 161 | # If true, an OpenSearch description file will be output, and all pages will 162 | # contain a tag referring to it. The value of this option must be the 163 | # base URL from which the finished HTML is served. 164 | # html_use_opensearch = '' 165 | 166 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 167 | # html_file_suffix = None 168 | 169 | # Output file base name for HTML help builder. 170 | htmlhelp_basename = "django-robotsdoc" 171 | 172 | 173 | # -- Options for LaTeX output -------------------------------------------------- 174 | 175 | latex_elements = { 176 | # The paper size ('letterpaper' or 'a4paper'). 177 | #'papersize': 'letterpaper', 178 | # The font size ('10pt', '11pt' or '12pt'). 179 | #'pointsize': '10pt', 180 | # Additional stuff for the LaTeX preamble. 181 | #'preamble': '', 182 | } 183 | 184 | # Grouping the document tree into LaTeX files. List of tuples 185 | # (source start file, target name, title, author, documentclass [howto/manual]). 186 | latex_documents = [ 187 | ( 188 | "index", 189 | "django-robots.tex", 190 | "django-robots Documentation", 191 | "Jannis Leidel", 192 | "manual", 193 | ), 194 | ] 195 | 196 | # The name of an image file (relative to this directory) to place at the top of 197 | # the title page. 198 | # latex_logo = None 199 | 200 | # For "manual" documents, if this is true, then toplevel headings are parts, 201 | # not chapters. 202 | # latex_use_parts = False 203 | 204 | # If true, show page references after internal links. 205 | # latex_show_pagerefs = False 206 | 207 | # If true, show URL addresses after external links. 208 | # latex_show_urls = False 209 | 210 | # Documents to append as an appendix to all manuals. 211 | # latex_appendices = [] 212 | 213 | # If false, no module index is generated. 214 | # latex_domain_indices = True 215 | 216 | 217 | # -- Options for manual page output -------------------------------------------- 218 | 219 | # One entry per manual page. List of tuples 220 | # (source start file, name, description, authors, manual section). 221 | man_pages = [ 222 | ("index", "django-robots", "django-robots Documentation", ["Jannis Leidel"], 1) 223 | ] 224 | 225 | # If true, show URL addresses after external links. 226 | # man_show_urls = False 227 | 228 | 229 | # -- Options for Texinfo output ------------------------------------------------ 230 | 231 | # Grouping the document tree into Texinfo files. List of tuples 232 | # (source start file, target name, title, author, 233 | # dir menu entry, description, category) 234 | texinfo_documents = [ 235 | ( 236 | "index", 237 | "django-robots", 238 | "django-robots Documentation", 239 | "Jannis Leidel", 240 | "django-robots", 241 | "One line description of project.", 242 | "Miscellaneous", 243 | ), 244 | ] 245 | 246 | # Documents to append as an appendix to all manuals. 247 | # texinfo_appendices = [] 248 | 249 | # If false, no module index is generated. 250 | # texinfo_domain_indices = True 251 | 252 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 253 | # texinfo_show_urls = 'footnote' 254 | --------------------------------------------------------------------------------