├── .coveragerc ├── .env ├── .github ├── ISSUE_TEMPLATE │ ├── config.yml │ ├── new-feature.yml │ └── report-a-bug.yml └── workflows │ ├── html-checks.yml │ ├── publish.yml │ └── python-app.yml ├── .gitignore ├── .htmlvalidate.json ├── .ruby-version ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── Dockerfile ├── Gemfile ├── Gemfile.lock ├── LICENSE ├── README.md ├── api.yml ├── at ├── .api.py.swp ├── __init__.py ├── api.py └── utils │ ├── .validation.py.swp │ ├── __init__.py │ ├── abnf.py │ ├── authentication.py │ ├── file.py │ ├── iddiff.py │ ├── logs.py │ ├── net.py │ ├── processor.py │ ├── runner.py │ ├── text.py │ └── validation.py ├── constraints.txt ├── dev.Dockerfile ├── docker-compose.yml ├── docker ├── gunicorn.py ├── nginx-default-site.conf ├── supervisord.conf └── version.py ├── k8s ├── author-tools.yaml ├── kustomization.yaml └── secrets.yaml ├── package-lock.json ├── package.json ├── requirements.dev.txt ├── requirements.txt ├── static ├── abnf.html ├── about.html ├── clean-svg-ids.html ├── doc │ ├── LICENSE │ ├── favicon-16x16.png │ ├── favicon-32x32.png │ ├── index.html │ ├── oauth2-redirect.html │ ├── swagger-ui-bundle.js │ ├── swagger-ui-bundle.js.map │ ├── swagger-ui-es-bundle-core.js │ ├── swagger-ui-es-bundle-core.js.map │ ├── swagger-ui-es-bundle.js │ ├── swagger-ui-es-bundle.js.map │ ├── swagger-ui-standalone-preset.js │ ├── swagger-ui-standalone-preset.js.map │ ├── swagger-ui.css │ ├── swagger-ui.css.map │ ├── swagger-ui.js │ └── swagger-ui.js.map ├── favicon.ico ├── iddiff.html ├── idnits.html ├── index.html ├── rfcdiff.html ├── robots.txt ├── scripts │ ├── abnf.js │ ├── about.js │ ├── clean-svg-ids.js │ ├── iddiff.js │ ├── idnits.js │ ├── main.js │ ├── rfcdiff.js │ └── svgcheck.js ├── sitemap.xml ├── styles │ └── custom.css └── svgcheck.html └── tests ├── data ├── draft-doe-smoke-signals-00.rst ├── draft-iab-xml2rfc-02.txt ├── draft-smoke-signals-00.error.md ├── draft-smoke-signals-00.error.txt ├── draft-smoke-signals-00.error.xml ├── draft-smoke-signals-00.invalid.xml ├── draft-smoke-signals-00.md ├── draft-smoke-signals-00.mmark.md ├── draft-smoke-signals-00.odt ├── draft-smoke-signals-00.txt ├── draft-smoke-signals-00.v2.xml ├── draft-smoke-signals-00.xml ├── draft-smoke-signals-01.txt ├── draft-smoke-signals-01.xml ├── draft-smoke-signals-02.xml ├── ietf.svg ├── invalid.svg ├── name-error.abnf ├── name.abnf └── rfc8855.txt ├── test_api_abnf_extract.py ├── test_api_abnf_parse.py ├── test_api_clean_svg_ids.py ├── test_api_iddiff.py ├── test_api_idnits.py ├── test_api_render.py ├── test_api_svgcheck.py ├── test_api_validate.py ├── test_api_version.py ├── test_utils_abnf.py ├── test_utils_authentication.py ├── test_utils_file.py ├── test_utils_iddiff.py ├── test_utils_logs.py ├── test_utils_net.py ├── test_utils_processor.py ├── test_utils_runner.py ├── test_utils_text.py ├── test_utils_validation.py └── test_utils_version.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source=at 3 | omit = tests/*, at/__init__.py, at/config.py 4 | 5 | [report] 6 | show_missing = true 7 | -------------------------------------------------------------------------------- /.env: -------------------------------------------------------------------------------- 1 | AT_PORT=8888 2 | SENTRY_DSN='' 3 | GUNICORN_WORKERS=2 4 | SITE_URL=http://localhost:8888 5 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/new-feature.yml: -------------------------------------------------------------------------------- 1 | name: New Feature / Enhancement 2 | description: Propose a new idea to be implemented 3 | labels: ["enhancement"] 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: | 8 | Thanks for taking the time to propose a new feature / enhancement idea. 9 | - type: textarea 10 | id: description 11 | attributes: 12 | label: Description 13 | description: Include as much info as possible, including mockups / screenshots if available. 14 | placeholder: Description 15 | validations: 16 | required: true 17 | - type: checkboxes 18 | id: terms 19 | attributes: 20 | label: Code of Conduct 21 | description: By submitting this request, you agree to follow our [Code of Conduct](https://github.com/ietf-tools/.github/blob/main/CODE_OF_CONDUCT.md). 22 | options: 23 | - label: I agree to follow the [IETF's Code of Conduct](https://github.com/ietf-tools/.github/blob/main/CODE_OF_CONDUCT.md) 24 | required: true 25 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/report-a-bug.yml: -------------------------------------------------------------------------------- 1 | name: Report a Bug 2 | description: Something isn't right? File a bug report 3 | labels: ["bug"] 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: | 8 | Thanks for taking the time to fill out this bug report. 9 | - type: textarea 10 | id: description 11 | attributes: 12 | label: Describe the issue 13 | description: Include as much info as possible, including the current behavior, expected behavior, screenshots, etc. If this is a display / UX issue, make sure to list the browser(s) you're experiencing the issue on. 14 | placeholder: Description 15 | validations: 16 | required: true 17 | - type: checkboxes 18 | id: terms 19 | attributes: 20 | label: Code of Conduct 21 | description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/ietf-tools/.github/blob/main/CODE_OF_CONDUCT.md) 22 | options: 23 | - label: I agree to follow the [IETF's Code of Conduct](https://github.com/ietf-tools/.github/blob/main/CODE_OF_CONDUCT.md) 24 | required: true 25 | -------------------------------------------------------------------------------- /.github/workflows/html-checks.yml: -------------------------------------------------------------------------------- 1 | name: HTML Tests 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | paths: 7 | - 'static/**' 8 | pull_request: 9 | branches: [ main ] 10 | paths: 11 | - 'static/**' 12 | 13 | jobs: 14 | validate-html: 15 | name: Validate HTML 16 | runs-on: ubuntu-latest 17 | 18 | steps: 19 | - name: Checkout repository 20 | uses: actions/checkout@v4 21 | 22 | - name: Install HTML validator 23 | run: 24 | npm install html-validate 25 | 26 | - name: Validate HTML 27 | run: npm exec html-validate static/*.html static/doc/*.html 28 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish 2 | run-name: Publish build ${{ github.run_number }} of branch ${{ github.ref_name }} by @${{ github.actor }} 3 | 4 | on: 5 | workflow_dispatch: 6 | 7 | jobs: 8 | build: 9 | runs-on: ubuntu-latest 10 | permissions: 11 | contents: write 12 | packages: write 13 | id-token: write 14 | 15 | steps: 16 | - name: Checkout repository 17 | uses: actions/checkout@v4 18 | with: 19 | fetch-depth: 0 20 | 21 | - name: Get Next Version 22 | id: semver 23 | uses: ietf-tools/semver-action@v1 24 | with: 25 | token: ${{ github.token }} 26 | patchList: fix, bugfix, perf, refactor, test, tests, chore, ci, build 27 | branch: main 28 | skipInvalidTags: true 29 | 30 | - name: Create Draft Release 31 | uses: ncipollo/release-action@v1.13.0 32 | with: 33 | prerelease: true 34 | draft: false 35 | commit: ${{ github.sha }} 36 | tag: ${{ steps.semver.outputs.nextStrict }} 37 | name: ${{ steps.semver.outputs.nextStrict }} 38 | body: '*pending*' 39 | token: ${{ secrets.GITHUB_TOKEN }} 40 | 41 | - name: Set up QEMU 42 | uses: docker/setup-qemu-action@v3 43 | 44 | - name: Setup Docker buildx 45 | uses: docker/setup-buildx-action@v3 46 | 47 | - name: Login to GitHub Container Registry 48 | uses: docker/login-action@v3 49 | with: 50 | registry: ghcr.io 51 | username: ${{ github.actor }} 52 | password: ${{ secrets.GITHUB_TOKEN }} 53 | 54 | - name: Build and push Docker image 55 | uses: docker/build-push-action@v5 56 | with: 57 | context: . 58 | file: Dockerfile 59 | push: true 60 | build-args: | 61 | VERSION=${{ steps.semver.outputs.nextStrict }} 62 | platforms: linux/amd64,linux/arm64 63 | tags: ghcr.io/${{ github.repository }}:${{ steps.semver.outputs.nextStrict }}, ghcr.io/${{ github.repository }}:latest 64 | 65 | - name: Update CHANGELOG 66 | id: changelog 67 | uses: requarks/changelog-action@v1 68 | with: 69 | token: ${{ github.token }} 70 | fromTag: ${{ steps.semver.outputs.nextStrict }} 71 | toTag: ${{ steps.semver.outputs.current }} 72 | excludeTypes: '' 73 | writeToFile: false 74 | 75 | - name: Create Release 76 | uses: ncipollo/release-action@v1.13.0 77 | with: 78 | allowUpdates: true 79 | makeLatest: true 80 | draft: false 81 | tag: ${{ steps.semver.outputs.nextStrict }} 82 | name: ${{ steps.semver.outputs.nextStrict }} 83 | body: ${{ steps.changelog.outputs.changes }} 84 | token: ${{ secrets.GITHUB_TOKEN }} 85 | -------------------------------------------------------------------------------- /.github/workflows/python-app.yml: -------------------------------------------------------------------------------- 1 | name: Author Tools Tests 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | paths-ignore: 7 | - 'static/**' 8 | - 'helm/**' 9 | - 'docker/**' 10 | - '**.md' 11 | - '**.yml' 12 | pull_request: 13 | branches: [ main ] 14 | paths-ignore: 15 | - 'static/**' 16 | - 'helm/**' 17 | - 'docker/**' 18 | - '**.md' 19 | - '**.yml' 20 | 21 | jobs: 22 | build: 23 | 24 | runs-on: ubuntu-latest 25 | 26 | steps: 27 | - uses: actions/checkout@v4 28 | # Required OS dependencies 29 | - name: Install OS dependencies 30 | run: | 31 | sudo apt-get update 32 | sudo apt-get install -y software-properties-common gcc 33 | sudo apt-get install -y python3-cffi python3-brotli libpango-1.0-0 libharfbuzz0b libpangoft2-1.0-0 libcairo2-dev libpangocairo-1.0-0 34 | sudo apt-get install -y wdiff gawk 35 | sudo wget https://raw.githubusercontent.com/ietf-tools/idnits-mirror/main/idnits 36 | sudo cp idnits /bin 37 | sudo chmod +x /bin/idnits 38 | wget https://github.com/ietf-tools/rfcdiff/archive/refs/tags/1.49.tar.gz 39 | sudo tar zxf 1.49.tar.gz -C /tmp/ 40 | sudo mv /tmp/rfcdiff-1.49/rfcdiff /bin/ 41 | sudo chmod +x /bin/rfcdiff 42 | sudo cp idnits /bin 43 | arch=$(arch | sed s/aarch64/arm64/ | sed s/x86_64/amd64/) 44 | wget "https://github.com/mmarkdown/mmark/releases/download/v2.2.47/mmark_2.2.47_linux_$arch.tgz" 45 | sudo tar zxf mmark_*.tgz -C /bin/ 46 | wget https://github.com/ietf-tools/bap/archive/refs/heads/master.zip 47 | unzip -q master.zip -d /tmp/bap 48 | wget "https://github.com/dthaler/rst2rfcxml/releases/download/v1.6.0/Ubuntu.Release.rst2rfcxml.zip" 49 | sudo unzip Ubuntu.Release.rst2rfcxml.zip -d /bin/ 50 | sudo chmod +x /bin/rst2rfcxml 51 | 52 | - name: Build bap 53 | working-directory: /tmp/bap/bap-master/ 54 | run: | 55 | sudo apt-get install -y bison flex libfl-dev 56 | sudo ./configure 57 | sudo make 58 | sudo cp aex bap /bin 59 | 60 | # Python 61 | - name: Set up Python 3.12 62 | uses: actions/setup-python@v5 63 | with: 64 | python-version: "3.12" 65 | - name: Install dependencies 66 | run: | 67 | python -m pip install --upgrade pip 68 | pip install -r requirements.txt -c constraints.txt 69 | pip install -r requirements.dev.txt 70 | # Ruby 71 | - name: Set up Ruby 72 | uses: ruby/setup-ruby@v1 73 | with: 74 | ruby-version: '3.2' 75 | - name: Install dependencies 76 | run: bundle install 77 | # Node 78 | - name: Set up Node 79 | uses: actions/setup-node@v4 80 | with: 81 | node-version: '18' 82 | - name: Install dependencies 83 | run: | 84 | npm install 85 | # Tests 86 | - name: Run Black 87 | run: black --check . 88 | - name: Run Pyflakes 89 | run: pyflakes at tests 90 | - name: Run tests and collect coverage 91 | env: 92 | PATH: $PATH:/bin:./node_modules/.bin/ 93 | run: | 94 | coverage run -m unittest discover tests 95 | coverage xml 96 | - name: Upload coverage to Codecov 97 | env: 98 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 99 | uses: codecov/codecov-action@v3 100 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .venv 106 | env/ 107 | venv/ 108 | ENV/ 109 | env.bak/ 110 | venv.*/ 111 | 112 | # Spyder project settings 113 | .spyderproject 114 | .spyproject 115 | 116 | # Rope project settings 117 | .ropeproject 118 | 119 | # mkdocs documentation 120 | /site 121 | 122 | # mypy 123 | .mypy_cache/ 124 | .dmypy.json 125 | dmypy.json 126 | 127 | # Pyre type checker 128 | .pyre/ 129 | 130 | # at specific 131 | /tmp 132 | at/config.py 133 | 134 | # JavaScript 135 | /node_modules 136 | -------------------------------------------------------------------------------- /.htmlvalidate.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "html-validate:recommended" 4 | ], 5 | "rules": { 6 | "wcag/h32": "off" 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /.ruby-version: -------------------------------------------------------------------------------- 1 | ruby-3.0 2 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | This is a reminder of IETF policies in effect on various topics such as patents 4 | or code of conduct. It is only meant to point you in the right direction. 5 | Exceptions may apply. The IETF's patent policy and the definition of an IETF 6 | "contribution" and "participation" are set forth in 7 | [BCP 79](https://www.rfc-editor.org/info/bcp79); please read it carefully. 8 | 9 | As a reminder: 10 | * By participating in the IETF, you agree to follow IETF processes and 11 | policies. 12 | * If you are aware that any IETF contribution is covered by patents or patent 13 | applications that are owned or controlled by you or your sponsor, you must 14 | disclose that fact, or not participate in the discussion. 15 | * As a participant in or attendee to any IETF activity you acknowledge that 16 | written, audio, video, and photographic records of meetings may be made public. 17 | * Personal information that you provide to IETF will be handled in accordance 18 | with the IETF Privacy Statement. 19 | * As a participant or attendee, you agree to work respectfully with other 20 | participants; please contact the 21 | [ombudsteam](https://www.ietf.org/contact/ombudsteam/) if you have questions 22 | or concerns about this. 23 | 24 | Definitive information is in the documents listed below and other IETF BCPs. 25 | For advice, please talk to WG chairs or ADs: 26 | 27 | * [BCP 9 (Internet Standards Process)](https://www.rfc-editor.org/info/bcp9) 28 | * [BCP 25 (Working Group processes)](https://www.rfc-editor.org/info/bcp25) 29 | * [BCP 25 (Anti-Harassment Procedures)](https://www.rfc-editor.org/info/bcp25) 30 | * [BCP 54 (Code of Conduct)](https://www.rfc-editor.org/info/bcp54) 31 | * [BCP 78 (Copyright)](https://www.rfc-editor.org/info/bcp78) 32 | * [BCP 79 (Patents, Participation)](https://www.rfc-editor.org/info/bcp79) 33 | * [Privacy Policy](https://www.ietf.org/privacy-policy/) 34 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | This repository relates to activities in the Internet Engineering Task Force 4 | ([IETF](https://www.ietf.org/)). All material in this repository is considered 5 | Contributions to the IETF Standards Process, as defined in the intellectual 6 | property policies of IETF currently designated as 7 | [BCP 78](https://www.rfc-editor.org/info/bcp78), 8 | [BCP 79](https://www.rfc-editor.org/info/bcp79) and the 9 | [IETF Trust Legal Provisions (TLP) Relating to IETF Documents](http://trustee.ietf.org/trust-legal-provisions.html). 10 | 11 | Any edit, commit, pull request, issue, comment or other change made to this 12 | repository constitutes Contributions to the IETF Standards Process 13 | (https://www.ietf.org/). 14 | 15 | You agree to comply with all applicable IETF policies and procedures, including, 16 | BCP 78, 79, the TLP, and the TLP rules regarding code components (e.g. being 17 | subject to a Simplified BSD License) in Contributions. 18 | 19 | #### Table Of Contents 20 | 21 | [Code of Conduct](#code-of-conduct) 22 | 23 | [Setting up development environment](#setting-up-development-environment) 24 | 25 | [Running tests](#running-tests) 26 | 27 | ## Code of Conduct 28 | 29 | See [Code of Conduct](CODE_OF_CONDUCT.md). 30 | 31 | ## Setting up development environment 32 | 33 | * Build docker image 34 | ```sh 35 | docker build -f dev.Dockerfile -t author-tools:dev . 36 | ``` 37 | 38 | * Run the docker image 39 | ```sh 40 | docker run -it author-tools:dev 41 | ``` 42 | 43 | * Run unit tests. 44 | ```sh 45 | python3 -m unittest discover tests 46 | ``` 47 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:noble 2 | LABEL maintainer="Kesara Rathnayake " 3 | 4 | ARG VERSION=6.6.6 5 | 6 | ENV DEBIAN_FRONTEND=noninteractive 7 | ENV PATH=$PATH:./node_modules/.bin 8 | # Disable local file read for kramdown-rfc 9 | ENV KRAMDOWN_SAFE=1 10 | 11 | WORKDIR /usr/src/app 12 | 13 | SHELL ["/bin/bash", "-o", "pipefail", "-c"] 14 | 15 | # Add nodejs 18.x 16 | RUN apt-get update && \ 17 | apt-get install -y curl gpg && \ 18 | curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg && \ 19 | echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_18.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list 20 | 21 | RUN apt-get update && \ 22 | apt-get install -y \ 23 | software-properties-common \ 24 | gcc \ 25 | wget \ 26 | ruby \ 27 | python3 \ 28 | python3-venv \ 29 | libpango-1.0-0 \ 30 | libpango1.0-dev \ 31 | wdiff \ 32 | nodejs \ 33 | gawk \ 34 | bison \ 35 | flex \ 36 | make \ 37 | git \ 38 | build-essential \ 39 | cmake \ 40 | nginx \ 41 | supervisor && \ 42 | rm -rf /var/lib/apt/lists/* /var/log/dpkg.log && \ 43 | apt-get autoremove -y && \ 44 | apt-get clean -y 45 | 46 | # Install required fonts 47 | RUN mkdir -p /var/www/.fonts/opentype /tmp/fonts && \ 48 | wget -q -O /tmp/fonts.tar.gz https://github.com/ietf-tools/xml2rfc-fonts/archive/refs/tags/3.22.0.tar.gz && \ 49 | tar zxf /tmp/fonts.tar.gz -C /tmp/fonts && \ 50 | mv /tmp/fonts/*/noto/* /var/www/.fonts/opentype/ && \ 51 | mv /tmp/fonts/*/roboto_mono/* /var/www/.fonts/opentype/ && \ 52 | chown -R www-data:0 /var/www/.fonts && \ 53 | rm -rf /tmp/fonts.tar.gz /tmp/fonts/ && \ 54 | fc-cache -f 55 | 56 | # Install rfcdiff 57 | RUN wget https://github.com/ietf-tools/rfcdiff/archive/refs/tags/1.49.tar.gz && \ 58 | tar zxf 1.49.tar.gz -C /tmp/ && \ 59 | mv /tmp/rfcdiff-1.49/rfcdiff /bin && \ 60 | chmod +x /bin/rfcdiff && \ 61 | rm -rf 1.49.tar.gz /tmp/rfcdiff-1.49 62 | 63 | # Install bap 64 | RUN wget https://github.com/ietf-tools/bap/archive/refs/heads/master.zip && \ 65 | unzip -q master.zip -d /tmp/bap && \ 66 | cd /tmp/bap/bap-master/ && \ 67 | ./configure && \ 68 | make && \ 69 | cp aex bap /bin && \ 70 | cd && \ 71 | rm -rf /tmp/bap master.zip 72 | 73 | # Install idnits 74 | RUN wget https://github.com/ietf-tools/idnits/archive/refs/tags/2.17.1.zip && \ 75 | unzip -q 2.17.1.zip -d ~/idnits && \ 76 | cp ~/idnits/idnits-2.17.1/idnits /bin && \ 77 | chmod +x /bin/idnits && \ 78 | rm -rf ~/idnits/idnits-2.17.1/idnits idnit 2.17.1.zip 79 | 80 | # Install mmark 81 | RUN arch=$(arch | sed s/aarch64/arm64/ | sed s/x86_64/amd64/) && \ 82 | wget "https://github.com/mmarkdown/mmark/releases/download/v2.2.47/mmark_2.2.47_linux_$arch.tgz" && \ 83 | tar zxf mmark_*.tgz -C /bin/ && \ 84 | rm mmark_*.tgz 85 | 86 | # Build & install rst2rfcxml 87 | RUN git clone --branch v1.6.0 --recurse-submodules https://github.com/dthaler/rst2rfcxml.git && \ 88 | cd rst2rfcxml && \ 89 | cmake -B build -DCMAKE_BUILD_TYPE=Release && \ 90 | cmake --build build && \ 91 | mv ./build/rst2rfcxml/rst2rfcxml /bin && \ 92 | chmod +x /bin/rst2rfcxml && \ 93 | cd .. && \ 94 | rm -rf rst2rfcxml 95 | 96 | COPY Gemfile Gemfile.lock LICENSE README.md api.yml constraints.txt package-lock.json package.json requirements.txt docker/version.py ./ 97 | COPY at ./at 98 | 99 | # Install JavaScript dependencies 100 | RUN npm install 101 | 102 | # Install Python dependencies 103 | ENV PYTHONUNBUFFERED=1 104 | ENV VENV_DIR=/usr/src/app/venv 105 | RUN python3 -m venv $VENV_DIR 106 | ENV PATH="$VENV_DIR/bin:$PATH" 107 | RUN python -m pip install --upgrade pip 108 | RUN python -m pip install -r requirements.txt -c constraints.txt 109 | 110 | # Install Ruby dependencies 111 | RUN gem install bundler && bundle install 112 | 113 | # nginx unprivileged setup 114 | RUN ln -sf /dev/stdout /var/log/nginx/access.log && \ 115 | ln -sf /dev/stderr /var/log/nginx/error.log && \ 116 | sed -i '/user www-data;/d' /etc/nginx/nginx.conf && \ 117 | sed -i 's,/run/nginx.pid,/tmp/nginx.pid,' /etc/nginx/nginx.conf && \ 118 | sed -i "/^http {/a \ proxy_temp_path /tmp/proxy_temp;\n client_body_temp_path /tmp/client_temp;\n fastcgi_temp_path /tmp/fastcgi_temp;\n uwsgi_temp_path /tmp/uwsgi_temp;\n scgi_temp_path /tmp/scgi_temp;\n" /etc/nginx/nginx.conf && \ 119 | mkdir -p /var/cache/nginx && \ 120 | chown -R www-data:0 /var/cache/nginx && \ 121 | chmod -R g+w /var/cache/nginx 122 | 123 | RUN mkdir -p tmp && \ 124 | echo "UPLOAD_DIR = '$PWD/tmp'" > at/config.py && \ 125 | echo "VERSION = '${VERSION}'" >> at/config.py && \ 126 | echo "REQUIRE_AUTH = False" >> at/config.py && \ 127 | echo "DT_LATEST_DRAFT_URL = 'https://datatracker.ietf.org/api/rfcdiff-latest-json'" >> at/config.py && \ 128 | echo "ALLOWED_DOMAINS = ['ietf.org', 'rfc-editor.org', 'github.com', 'githubusercontent.com', 'github.io', 'gitlab.com', 'gitlab.io', 'codeberg.page', 'httpwg.org', 'quicwg.org']" >> at/config.py && \ 129 | python3 version.py >> at/config.py && \ 130 | chown -R www-data:0 /usr/src/app/tmp 131 | 132 | # cache configuration 133 | RUN mkdir -p /tmp/cache/xml2rfc && \ 134 | mkdir -p /tmp/cache/refcache && \ 135 | mkdir /var/www/.cache && \ 136 | ln -sf /tmp/cache/xml2rfc /var/cache/xml2rfc && \ 137 | chown -R www-data:0 /tmp/cache /var/www/.cache 138 | ENV KRAMDOWN_REFCACHEDIR=/tmp/cache/refcache 139 | 140 | 141 | # COPY required files 142 | COPY static /usr/share/nginx/html/ 143 | COPY api.yml /usr/share/nginx/html/ 144 | COPY docker/gunicorn.py /usr/src/app/ 145 | COPY docker/nginx-default-site.conf /etc/nginx/sites-available/default 146 | COPY docker/supervisord.conf /etc/supervisor/ 147 | 148 | USER www-data 149 | 150 | EXPOSE 8080 151 | WORKDIR /usr/src/app/ 152 | 153 | CMD ["supervisord"] 154 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org' 2 | gem 'kramdown-rfc' 3 | -------------------------------------------------------------------------------- /Gemfile.lock: -------------------------------------------------------------------------------- 1 | GEM 2 | remote: https://rubygems.org/ 3 | specs: 4 | base64 (0.2.0) 5 | certified (1.0.0) 6 | connection_pool (2.5.0) 7 | differ (0.1.2) 8 | json_pure (2.8.1) 9 | kramdown (2.4.0) 10 | rexml 11 | kramdown-parser-gfm (1.1.0) 12 | kramdown (~> 2.0) 13 | kramdown-rfc (1.7.29) 14 | kramdown-rfc2629 (= 1.7.29) 15 | kramdown-rfc2629 (1.7.29) 16 | base64 (>= 0.1) 17 | certified (~> 1.0) 18 | differ (~> 0.1) 19 | json_pure (~> 2.0) 20 | kramdown (~> 2.4.0) 21 | kramdown-parser-gfm (~> 1.1) 22 | net-http-persistent (~> 4.0) 23 | ostruct (~> 0.6) 24 | unicode-blocks (~> 1.0) 25 | unicode-name (~> 1.0) 26 | unicode-scripts (~> 1.0) 27 | net-http-persistent (4.0.5) 28 | connection_pool (~> 2.2) 29 | ostruct (0.6.1) 30 | rexml (3.4.1) 31 | unicode-blocks (1.10.0) 32 | unicode-name (1.13.5) 33 | unicode-types (~> 1.10) 34 | unicode-scripts (1.11.0) 35 | unicode-types (1.10.0) 36 | 37 | PLATFORMS 38 | arm64-darwin-20 39 | arm64-darwin-23 40 | arm64-darwin-24 41 | x86_64-linux 42 | 43 | DEPENDENCIES 44 | kramdown-rfc 45 | 46 | BUNDLED WITH 47 | 2.6.3 48 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2021-2024, The IETF Trust 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 | IETF Author Tools 4 | 5 | [![Release](https://img.shields.io/github/release/ietf-tools/author-tools.svg?style=flat&maxAge=300)](https://github.com/ietf-tools/author-tools/releases) 6 | [![License](https://img.shields.io/github/license/ietf-tools/author-tools?20240110)](https://github.com/ietf-tools/author-tools/blob/main/LICENSE) 7 | 8 | ##### IETF Author Tools 9 | 10 |
11 | 12 | - [**Production Website**](https://datatracker.ietf.org) 13 | - [Changelog](https://github.com/ietf-tools/author-tools/releases) 14 | - [Contributing](https://github.com/ietf-tools/author-tools/blob/main/CONTRIBUTING.md) 15 | - [License](https://github.com/ietf-tools/author-tools/blob/main/LICENSE) 16 | - [Documentation](#documentation) 17 | - [Running Author Tools service](#running-author-tools-service) 18 | - [Testing Web UI](#testing-web-ui) 19 | - [Testing API](#testing-api) 20 | 21 | --- 22 | 23 | ## Documentation 24 | 25 | * [API documenation](https://author-tools.ietf.org/doc/) 26 | * [OpenAPI specification](https://author-tools.ietf.org/api.yml) 27 | 28 | ## Running Author Tools service 29 | 30 | ``` 31 | docker compose up --build -d 32 | ``` 33 | 34 | ## Testing Web UI 35 | 36 | * Visit http://localhost:8888 37 | 38 | ## Testing API 39 | 40 | * Test XML RFC generation 41 | ``` 42 | curl localhost:8888/api/render/xml -X POST -F "file=@" 43 | ``` 44 | 45 | * Test HTML RFC generation 46 | ``` 47 | curl localhost:8888/api/render/html -X POST -F "file=@" 48 | ``` 49 | 50 | * Test text RFC generation 51 | ``` 52 | curl localhost:8888/api/render/text -X POST -F "file=@" 53 | ``` 54 | 55 | * Test PDF RFC generation 56 | ``` 57 | curl localhost:8888/api/render/pdf -X POST -F "file=@" -o draft-output.pdf 58 | ``` 59 | 60 | * Test validation 61 | ``` 62 | curl localhost:8888/api/validate -X POST -F "file=@" 63 | ``` 64 | 65 | ## Contributing 66 | 67 | See [contributing guide](CONTRIBUTING.md). 68 | 69 | ## License 70 | 71 | * [IETF Author Tools License](LICENSE) 72 | -------------------------------------------------------------------------------- /at/.api.py.swp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ietf-tools/author-tools/a50c3997464b1a32005eab6ad410c83e616f8ab3/at/.api.py.swp -------------------------------------------------------------------------------- /at/__init__.py: -------------------------------------------------------------------------------- 1 | from logging import ERROR as LOG_ERROR 2 | from os import getenv 3 | 4 | from flask import Flask 5 | from flask_cors import CORS 6 | from sentry_sdk import init as sentry_init 7 | from sentry_sdk.integrations.flask import FlaskIntegration 8 | from sentry_sdk.integrations.logging import LoggingIntegration 9 | 10 | 11 | def create_app(config=None): 12 | app = Flask(__name__) 13 | CORS(app) 14 | 15 | if config is None: 16 | app.logger.info("Using configuration settings from at/config.py") 17 | app.config.from_object("at.config") 18 | else: 19 | app.logger.info(f"Using configuration settings from {str(config)}") 20 | app.config.from_mapping(config) 21 | 22 | from . import api 23 | 24 | app.register_blueprint(api.bp) 25 | 26 | if site_url := getenv("SITE_URL"): 27 | app.logger.info("Using SITE_URL from ENV.") 28 | app.config["SITE_URL"] = site_url 29 | elif "SITE_URL" not in app.config.keys(): 30 | app.logger.info("SITE_URL not set. Using default.") 31 | app.config["SITE_URL"] = "http://localhost" 32 | app.logger.info(f"SITE_URL: {app.config['SITE_URL']}") 33 | 34 | if dt_latest_draft_url := getenv("DT_LATEST_DRAFT_URL"): 35 | app.logger.info(f"Using DT_LATEST_DRAFT_URL from ENV: {dt_latest_draft_url}") 36 | app.config["DT_LATEST_DRAFT_URL"] = dt_latest_draft_url 37 | 38 | if sentry_dsn := getenv("SENTRY_DSN"): 39 | sentry_init( 40 | dsn=sentry_dsn, 41 | integrations=[ 42 | FlaskIntegration(), 43 | LoggingIntegration(level=LOG_ERROR, event_level=LOG_ERROR), 44 | ], 45 | traces_sample_rate=1.0, 46 | ) 47 | app.logger.info("Sentry is enabled.") 48 | else: 49 | app.logger.info("Sentry is disabled.") 50 | 51 | return app 52 | -------------------------------------------------------------------------------- /at/utils/.validation.py.swp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ietf-tools/author-tools/a50c3997464b1a32005eab6ad410c83e616f8ab3/at/utils/.validation.py.swp -------------------------------------------------------------------------------- /at/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ietf-tools/author-tools/a50c3997464b1a32005eab6ad410c83e616f8ab3/at/utils/__init__.py -------------------------------------------------------------------------------- /at/utils/abnf.py: -------------------------------------------------------------------------------- 1 | from logging import getLogger 2 | 3 | from at.utils.runner import proc_run, RunnerError 4 | 5 | 6 | def extract_abnf(filename, logger=getLogger()): 7 | """Extract ABNF using BAP aex""" 8 | logger.debug("running bap aex") 9 | output = None 10 | 11 | try: 12 | output = proc_run(args=["aex", filename], capture_output=True) 13 | except RunnerError as e: # pragma: no cover 14 | logger.info(f"process error: {str(e)}") 15 | result = "" 16 | 17 | if output and output.stderr: 18 | error = output.stderr.decode("utf-8") 19 | result += error 20 | logger.info("bap aex error: {}".format(error)) 21 | 22 | if output and output.stdout: 23 | result += output.stdout.decode("utf-8", errors="ignore") 24 | 25 | if result == "": 26 | result = "No output from BAP aex." 27 | 28 | return result 29 | 30 | 31 | def parse_abnf(filename, logger=getLogger()): 32 | """Parse ABNF using BAP""" 33 | logger.debug("running bap") 34 | output = None 35 | 36 | try: 37 | output = proc_run(args=["bap", filename], capture_output=True) 38 | except RunnerError as e: # pragma: no cover 39 | logger.info(f"process error: {str(e)}") 40 | 41 | errors = "" 42 | abnf = "" 43 | 44 | if output and output.stderr: 45 | errors = output.stderr.decode("utf-8").replace(filename, "") 46 | 47 | if output and output.stdout: 48 | abnf = output.stdout.decode("utf-8", errors="ignore") 49 | 50 | return (errors, abnf) 51 | -------------------------------------------------------------------------------- /at/utils/authentication.py: -------------------------------------------------------------------------------- 1 | from decorator import decorator 2 | from flask import current_app, jsonify, request 3 | from requests import post 4 | 5 | UNAUTHORIZED = 401 6 | OK = 200 7 | 8 | 9 | @decorator 10 | def require_api_key(f, *args, **kwargs): 11 | """Returns the function if api authentication passes. 12 | Else returns JSON reponse with an error.""" 13 | logger = current_app.logger 14 | config = current_app.config 15 | 16 | if not config["REQUIRE_AUTH"]: 17 | logger.debug("Datatracker authentication is not required") 18 | else: 19 | apikey = request.headers.get("X-API-KEY") 20 | if apikey is None or apikey.strip() == "": 21 | if "apikey" in request.form.keys(): 22 | apikey = request.form["apikey"] 23 | else: 24 | logger.error("missing api key") 25 | return jsonify(error="API key is missing"), UNAUTHORIZED 26 | 27 | with post( 28 | config["DT_APPAUTH_URL"], data={"apikey": apikey.strip()} 29 | ) as response: 30 | if response.status_code == OK and response.json()["success"] is True: 31 | logger.debug("valid apikey") 32 | else: 33 | logger.error("invalid api key") 34 | return jsonify(error="API key is invalid"), UNAUTHORIZED 35 | 36 | return f(*args, **kwargs) 37 | -------------------------------------------------------------------------------- /at/utils/file.py: -------------------------------------------------------------------------------- 1 | from logging import getLogger 2 | from os import mkdir, path 3 | from re import compile as re_compile 4 | from uuid import uuid4 5 | 6 | from decorator import decorator 7 | from flask import current_app, jsonify, request 8 | from requests import get 9 | from requests.exceptions import ConnectionError, Timeout 10 | from werkzeug.utils import secure_filename 11 | 12 | 13 | ALLOWED_EXTENSIONS = ( 14 | "txt", 15 | "xml", 16 | "md", 17 | "mkd", 18 | "rst", 19 | ) 20 | ALLOWED_EXTENSIONS_BY_PROCESS = { 21 | "svgcheck": ("svg",), 22 | "clean_svg_ids": ("xml",), 23 | } 24 | DIR_MODE = 0o770 25 | DRAFT_NAME = re_compile(r"(-\d+)?(\..*)?$") 26 | DRAFT_NAME_WITH_REVISION = re_compile(r"\..*$") 27 | OK = 200 28 | BAD_REQUEST = 400 29 | 30 | 31 | # Exceptions 32 | class DownloadError(Exception): 33 | """Error class for download errors""" 34 | 35 | pass 36 | 37 | 38 | def allowed_file(filename, process=None): 39 | """Return true if file extension in allowed list""" 40 | 41 | if "." in filename: 42 | ext = filename.rsplit(".", 1)[1].lower() 43 | if process: 44 | return ext in ALLOWED_EXTENSIONS_BY_PROCESS[process] 45 | else: 46 | return ext in ALLOWED_EXTENSIONS 47 | else: 48 | return False 49 | 50 | 51 | def get_extension(filename): 52 | """Returns file extension""" 53 | _, file_ext = path.splitext(filename) 54 | return file_ext 55 | 56 | 57 | def get_filename(filename, ext): 58 | """Returns filename with given extension""" 59 | 60 | root, _ = path.splitext(filename) 61 | return ".".join([root, ext]) 62 | 63 | 64 | def get_file(filename): 65 | """Returns the filename and the parent directory""" 66 | 67 | return "/".join(filename.split("/")[-2:]) 68 | 69 | 70 | def save_file(file, upload_dir): 71 | """Save given file and returns path""" 72 | dir_path = path.join(upload_dir, str(uuid4())) 73 | mkdir(dir_path, mode=DIR_MODE) 74 | 75 | filename = path.join(dir_path, secure_filename(file.filename)) 76 | file.save(filename) 77 | 78 | return (dir_path, filename) 79 | 80 | 81 | def save_file_from_text(text, upload_dir): 82 | """Save given text to file and returns path""" 83 | dir_path = path.join(upload_dir, str(uuid4())) 84 | mkdir(dir_path, mode=DIR_MODE) 85 | 86 | filename = path.join(dir_path, secure_filename(".".join([str(uuid4()), "txt"]))) 87 | 88 | with open(filename, "w") as file: 89 | file.write(text) 90 | 91 | return (dir_path, filename) 92 | 93 | 94 | def save_file_from_url(url, upload_dir, logger=getLogger()): 95 | """Download and save the file from given URL and returns path""" 96 | dir_path = path.join(upload_dir, str(uuid4())) 97 | mkdir(dir_path, mode=DIR_MODE) 98 | save_filename = secure_filename(url.split("/")[-1]) 99 | if len(save_filename) == 0: 100 | error = "Can not determine the filename: {}".format(url) 101 | logger.error(error) 102 | raise DownloadError(error) 103 | filename = path.join(dir_path, save_filename) 104 | 105 | try: 106 | with get(url) as response: 107 | if response.status_code == OK: 108 | with open(filename, "w") as file: 109 | file.write(response.text) 110 | else: 111 | logger.error("Error downloading file: {}".format(url)) 112 | raise DownloadError("Error occured while downloading file.") 113 | 114 | return (dir_path, filename) 115 | except (ConnectionError, Timeout) as e: 116 | logger.error("Connection error on {url}: {error}".format(url=url, error=e)) 117 | raise DownloadError("Error occured while downloading file.") 118 | 119 | 120 | def get_name(filename): 121 | """Returns draft/rfc name""" 122 | name = None 123 | 124 | if filename.lower().startswith("draft-") or filename.lower().startswith("rfc"): 125 | name = DRAFT_NAME.sub("", filename.lower(), count=1) 126 | 127 | return name 128 | 129 | 130 | def get_name_with_revision(filename): 131 | """Retuns draft/rfc name with revision""" 132 | name = None 133 | 134 | if filename.lower().startswith("draft-") or filename.lower().startswith("rfc"): 135 | name = DRAFT_NAME_WITH_REVISION.sub("", filename.lower(), count=1) 136 | 137 | return name 138 | 139 | 140 | def cleanup_output(filename, output): 141 | """Return output without directory information""" 142 | 143 | if output: 144 | return output.replace(path.dirname(filename) + "/", "").replace( 145 | path.dirname(path.relpath(filename)) + "/", "" 146 | ) 147 | else: 148 | return None 149 | 150 | 151 | @decorator 152 | def check_file(f, *args, **kwargs): 153 | """Check posted files""" 154 | logger = current_app.logger 155 | 156 | file_check_process = None 157 | if "/svgcheck" in request.path: 158 | file_check_process = "svgcheck" 159 | if "/clean_svg_ids" in request.path: 160 | file_check_process = "clean_svg_ids" 161 | 162 | for file_entry in request.files: 163 | file = request.files[file_entry] 164 | 165 | if file.filename == "": 166 | logger.info("filename missing") 167 | return jsonify(error="Filename is missing"), BAD_REQUEST 168 | if not allowed_file(file.filename, process=file_check_process): 169 | logger.info("File format not supportted: {}".format(file.filename)) 170 | return jsonify(error="Input file format not supported"), BAD_REQUEST 171 | 172 | return f(*args, **kwargs) 173 | -------------------------------------------------------------------------------- /at/utils/iddiff.py: -------------------------------------------------------------------------------- 1 | from logging import getLogger 2 | from subprocess import CalledProcessError 3 | 4 | from at.utils.runner import proc_run, RunnerError 5 | 6 | 7 | OK = 200 8 | ALLOWED_SCHEMES = ["http", "https"] 9 | TIMEOUT = 20 # in seconds 10 | 11 | 12 | # Exceptions 13 | class IddiffError(Exception): 14 | """Error class for iddiff errors""" 15 | 16 | pass 17 | 18 | 19 | def get_id_diff( 20 | old_draft, 21 | new_draft, 22 | diff_tool="iddiff", 23 | table=False, 24 | wdiff=False, 25 | chbars=False, 26 | abdiff=False, 27 | logger=getLogger(), 28 | ): 29 | """Returns iddiff output""" 30 | 31 | if diff_tool == "rfcdiff": 32 | logger.debug("running rfcdiff") 33 | diff = [ 34 | "rfcdiff", 35 | "--stdout", 36 | ] 37 | else: 38 | logger.debug("running iddiff") 39 | diff = [ 40 | "iddiff", 41 | ] 42 | 43 | try: 44 | if wdiff: 45 | output = proc_run( 46 | args=diff + ["--hwdiff", old_draft, new_draft], 47 | timeout=TIMEOUT, 48 | capture_output=True, 49 | ) 50 | elif chbars: 51 | output = proc_run( 52 | args=diff + ["--chbars", old_draft, new_draft], 53 | timeout=TIMEOUT, 54 | capture_output=True, 55 | ) 56 | elif abdiff: 57 | output = proc_run( 58 | args=diff + ["--abdiff", old_draft, new_draft], 59 | timeout=TIMEOUT, 60 | capture_output=True, 61 | ) 62 | elif table and diff_tool == "iddiff": 63 | output = proc_run( 64 | args=diff + ["-t", old_draft, new_draft], 65 | timeout=TIMEOUT, 66 | capture_output=True, 67 | ) 68 | else: 69 | output = proc_run( 70 | args=diff + [old_draft, new_draft], timeout=TIMEOUT, capture_output=True 71 | ) 72 | output.check_returncode() 73 | except RunnerError as e: 74 | if diff_tool == "rfcdiff": 75 | error = f"iddiff error: {str(e)}" 76 | logger.info(error) 77 | raise IddiffError(error) 78 | else: 79 | # try again with rfcdiff 80 | return get_id_diff( 81 | old_draft, 82 | new_draft, 83 | diff_tool="rfcdiff", 84 | table=table, 85 | wdiff=wdiff, 86 | chbars=chbars, 87 | abdiff=abdiff, 88 | logger=logger, 89 | ) 90 | except CalledProcessError: 91 | logger.info("iddiff error: {}".format(output.stderr.decode("utf-8"))) 92 | raise IddiffError(output.stderr.decode("utf-8")) 93 | 94 | return output.stdout.decode("utf-8") 95 | -------------------------------------------------------------------------------- /at/utils/logs.py: -------------------------------------------------------------------------------- 1 | from re import compile as re_compile, IGNORECASE 2 | 3 | from at.utils.file import cleanup_output 4 | 5 | XML2RFC_ERROR_REGEX = re_compile(r"^.*?Error: (?P.*)$", IGNORECASE) 6 | XML2RFC_WARN_REGEX = re_compile(r"^.*?Warning: (?P.*)$", IGNORECASE) 7 | XML2RFC_LINE_NUMBER_REGEX = re_compile( 8 | r"^.*?\((?P.*?)\): (Error|Warning): ", IGNORECASE 9 | ) 10 | 11 | 12 | def process_xml2rfc_log(output, filename): 13 | """Process xml2rfc output and return dictionary of errors and warnings""" 14 | log = [] 15 | errors = [] 16 | warnings = [] 17 | unicode = [] 18 | 19 | if output.stderr: 20 | log = cleanup_output( 21 | filename, output.stderr.decode("utf-8", errors="ignore") 22 | ).split("\n") 23 | 24 | for entry in log: 25 | error = XML2RFC_ERROR_REGEX.search(entry) 26 | warning = XML2RFC_WARN_REGEX.search(entry) 27 | line = XML2RFC_LINE_NUMBER_REGEX.search(entry) 28 | if error and (message := error.group("message")): 29 | if line and (line := line.group("line")): 30 | errors.append(f"({line}) {message}") 31 | else: 32 | errors.append(message) 33 | elif warning and (message := warning.group("message")): 34 | if "Found non-ascii characters" in message: 35 | if line and (line := line.group("line")): 36 | unicode.append(f"({line}) {message}") 37 | else: 38 | warnings.append(message) 39 | else: 40 | if line and (line := line.group("line")): 41 | warnings.append(f"({line}) {message}") 42 | else: 43 | warnings.append(message) 44 | 45 | return {"errors": errors, "warnings": warnings, "bare_unicode": unicode} 46 | 47 | 48 | def get_errors(output, filename): 49 | """Returns errors as a string""" 50 | 51 | log = process_xml2rfc_log(output, filename) 52 | 53 | if len(log["errors"]) > 0: 54 | return "\n".join(log["errors"]) 55 | else: 56 | return None 57 | 58 | 59 | def update_logs(logs, new_entries): 60 | """Adds new entries to logs""" 61 | if new_entries: 62 | logs["errors"].extend(new_entries["errors"]) 63 | logs["warnings"].extend(new_entries["warnings"]) 64 | 65 | return logs 66 | -------------------------------------------------------------------------------- /at/utils/net.py: -------------------------------------------------------------------------------- 1 | from logging import getLogger 2 | from urllib.parse import urlsplit 3 | 4 | from requests import get 5 | 6 | 7 | OK = 200 8 | ALLOWED_SCHEMES = ["http", "https"] 9 | 10 | 11 | # Exceptions 12 | class DocumentNotFound(Exception): 13 | """Error class for latest draft not found error""" 14 | 15 | pass 16 | 17 | 18 | class InvalidURL(Exception): 19 | """Error class for invalid URLs""" 20 | 21 | pass 22 | 23 | 24 | def is_valid_url(url, allowed_domains=None, logger=getLogger()): 25 | """Checks if provided URL is valid and allowed URL""" 26 | 27 | try: 28 | url_parts = urlsplit(url) 29 | if url_parts.scheme not in ALLOWED_SCHEMES: 30 | logger.info(f"URL: {url_parts.scheme} scheme is not allowed.") 31 | raise InvalidURL(f"{url_parts.scheme} scheme is not allowed.") 32 | if ".".join(url_parts.netloc.split(".")[-2:]) not in allowed_domains: 33 | logger.info(f"URL: {url_parts.netloc} domain is not allowed.") 34 | raise InvalidURL(f"{url_parts.netloc} domain is not allowed.") 35 | except ValueError as e: 36 | logger.info(f"invalid URL: {url} error: {str(e)}") 37 | raise InvalidURL(f"Invalid URL: {url}") 38 | 39 | return True 40 | 41 | 42 | def get_latest(doc, dt_latest_url, logger=getLogger()): 43 | """Returns URL latest ID/RFC from Datatracker.""" 44 | 45 | url = "/".join([dt_latest_url, doc]) 46 | with get(url) as response: 47 | if response.status_code == OK: 48 | try: 49 | data = response.json() 50 | latest_doc = data["content_url"] 51 | except KeyError: 52 | logger.error("can not find content_url for {}".format(url)) 53 | raise DocumentNotFound( 54 | "Can not find url for the latest document on " "datatracker" 55 | ) 56 | else: 57 | logger.error("can not find doc for {}".format(url)) 58 | raise DocumentNotFound("Can not find the latest document on datatracker") 59 | 60 | return latest_doc 61 | 62 | 63 | def get_previous(doc, dt_latest_url, logger=getLogger()): 64 | """Returns previous ID/RFC from datatracker""" 65 | url = "/".join([dt_latest_url, doc]) 66 | with get(url) as response: 67 | if response.status_code == OK: 68 | try: 69 | data = response.json() 70 | previous_doc = data["previous"] 71 | except KeyError: 72 | logger.error("can not find content_url for {}".format(url)) 73 | raise DocumentNotFound( 74 | "Can not find url for the previous document on " "datatracker" 75 | ) 76 | else: 77 | logger.error("can not find doc for {}".format(url)) 78 | raise DocumentNotFound("Can not find the previous document on datatracker") 79 | 80 | return get_latest(previous_doc, dt_latest_url, logger) 81 | 82 | 83 | def get_both(doc, dt_latest_url, logger=getLogger()): 84 | """Returns urls of given doc and previous ID/RFC from Datatracker.""" 85 | 86 | url = "/".join([dt_latest_url, doc]) 87 | with get(url) as response: 88 | if response.status_code == OK: 89 | try: 90 | data = response.json() 91 | latest_doc = data["content_url"] 92 | try: 93 | previous_doc = data["previous_url"] 94 | except KeyError: 95 | logger.error("Can not find previous_url for {}".format(url)) 96 | raise DocumentNotFound( 97 | "Can not find url for previous document on " "datatracker" 98 | ) 99 | except KeyError: 100 | logger.error("can not find content_url for {}".format(url)) 101 | raise DocumentNotFound( 102 | "Can not find url for the latest document on " "datatracker" 103 | ) 104 | else: 105 | logger.error("can not find doc for {}".format(url)) 106 | raise DocumentNotFound("Can not find the latest document on datatracker") 107 | 108 | return (previous_doc, latest_doc) 109 | 110 | 111 | def is_url(string): 112 | """Returns True if string is an URL""" 113 | try: 114 | url_parts = urlsplit(string) 115 | return all([url_parts.scheme, url_parts.netloc]) 116 | except ValueError: 117 | return False 118 | -------------------------------------------------------------------------------- /at/utils/runner.py: -------------------------------------------------------------------------------- 1 | from subprocess import run 2 | 3 | 4 | TIMEOUT = 120 # in seconds 5 | 6 | 7 | # Exception 8 | class RunnerError(Exception): 9 | """Error class for process runner""" 10 | 11 | pass 12 | 13 | 14 | def proc_run(args, timeout=TIMEOUT, capture_output=True): 15 | """Return subprocess.run()""" 16 | try: 17 | return run(args, timeout=timeout, capture_output=True) 18 | except Exception: 19 | raise RunnerError(f"Error running {args[0]}.") 20 | -------------------------------------------------------------------------------- /at/utils/text.py: -------------------------------------------------------------------------------- 1 | from logging import getLogger 2 | 3 | from at.utils.file import get_extension, save_file, save_file_from_url 4 | from at.utils.processor import ( 5 | get_text, 6 | get_xml, 7 | md2xml, 8 | rst2xml, 9 | ProcessingError, 10 | ) 11 | 12 | 13 | # Exceptions 14 | class TextProcessingError(Exception): 15 | """Error class for text errors""" 16 | 17 | pass 18 | 19 | 20 | def get_text_id_from_file( 21 | file, upload_dir, raw=False, text_or_xml=False, logger=getLogger() 22 | ): 23 | """Save file and returns text draft""" 24 | 25 | (dir_path, filename) = save_file(file, upload_dir) 26 | 27 | if not raw: 28 | file_ext = get_extension(filename).lower() 29 | if not text_or_xml or (text_or_xml and file_ext not in [".txt", ".xml"]): 30 | (dir_path, filename) = get_text_id(dir_path, filename, logger) 31 | 32 | return (dir_path, filename) 33 | 34 | 35 | def get_text_id_from_url( 36 | url, upload_dir, raw=False, text_or_xml=False, logger=getLogger() 37 | ): 38 | """Save file from URL and returns text draft""" 39 | 40 | (dir_path, filename) = save_file_from_url(url, upload_dir) 41 | 42 | if not raw: 43 | file_ext = get_extension(filename).lower() 44 | if not text_or_xml or (text_or_xml and file_ext not in [".txt", ".xml"]): 45 | (dir_path, filename) = get_text_id(dir_path, filename, logger) 46 | 47 | return (dir_path, filename) 48 | 49 | 50 | def get_text_id(dir_path, filename, logger=getLogger()): 51 | """Returns text draft""" 52 | file_ext = get_extension(filename) 53 | 54 | if file_ext.lower() != ".txt": 55 | logger.debug("processing non text file") 56 | 57 | try: 58 | if file_ext.lower() in [".md", ".mkd"]: 59 | filename = md2xml(filename, logger) 60 | elif file_ext.lower() in [".rst"]: 61 | filename = rst2xml(filename, logger) 62 | xml_file, _ = get_xml(filename, logger=logger) 63 | filename, _ = get_text(xml_file, logger=logger) 64 | except ProcessingError as e: 65 | logger.error("error processing non text file: {}".format(filename)) 66 | raise TextProcessingError(str(e)) 67 | 68 | return (dir_path, filename) 69 | -------------------------------------------------------------------------------- /at/utils/validation.py: -------------------------------------------------------------------------------- 1 | from logging import getLogger 2 | from subprocess import CalledProcessError 3 | 4 | from xml2rfc import XmlRfcParser 5 | from lxml.etree import XMLSyntaxError 6 | 7 | from at.utils.file import cleanup_output, get_extension, get_filename 8 | from at.utils.logs import process_xml2rfc_log 9 | from at.utils.processor import process_file, ProcessingError 10 | from at.utils.text import get_text_id_from_file 11 | from at.utils.runner import proc_run, RunnerError 12 | 13 | 14 | def validate_draft(file, upload_dir, logger=getLogger()): 15 | """Validate uploaded file.""" 16 | 17 | file_ext = get_extension(file.filename) 18 | 19 | if file_ext.lower() == ".txt": 20 | # don't try to convert text files to XML 21 | _, filename = get_text_id_from_file(file=file, upload_dir=upload_dir) 22 | log = {"idnits": idnits(filename, logger)} 23 | else: 24 | _, filename = process_file(file=file, upload_dir=upload_dir, logger=logger) 25 | log = validate_xml(filename, logger=logger) 26 | 27 | # get list of non ASCII chars 28 | log["non_ascii"] = get_non_ascii_chars(filename=filename, logger=logger) 29 | 30 | return log 31 | 32 | 33 | def validate_xml(filename, logger=getLogger()): 34 | """Validate XML2RFC 35 | NOTE: if file is XML2RFC v2 that will get converted to v3""" 36 | 37 | try: 38 | log = None 39 | 40 | logger.debug("invoking xml2rfc parser") 41 | 42 | parser = XmlRfcParser(filename, quiet=True) 43 | xmltree = parser.parse(remove_comments=False, quiet=True) 44 | xmlroot = xmltree.getroot() 45 | xml2rfc_version = xmlroot.get("version", "2") 46 | v2_processed_log = None 47 | 48 | if xml2rfc_version == "2": 49 | filename, output = convert_v2v3(filename, logger) 50 | v2_processed_log = process_xml2rfc_log(output, filename) 51 | 52 | except XMLSyntaxError as e: 53 | logger.info("xml2rfc error: {}".format(str(e))) 54 | raise ProcessingError(e) 55 | 56 | logger.info("new file saved at {}".format(filename)) 57 | 58 | log, text_file = xml2rfc_validation(filename, logger) 59 | processed_log = process_xml2rfc_log(log, filename) 60 | 61 | idnits_log = idnits(text_file, logger) 62 | 63 | if v2_processed_log: 64 | processed_log = {k: v2_processed_log[k] + v for k, v in processed_log.items()} 65 | 66 | processed_log["idnits"] = idnits_log 67 | 68 | return processed_log 69 | 70 | 71 | def xml2rfc_validation(filename, logger=getLogger()): 72 | """Run xml2rfc to validate the document and return output and text file""" 73 | 74 | logger.debug("running xml2rfc") 75 | 76 | text_file = get_filename(filename, "txt") 77 | 78 | try: 79 | output = proc_run( 80 | args=["xml2rfc", "--warn-bare-unicode", "--out", text_file, filename], 81 | capture_output=True, 82 | ) 83 | output.check_returncode() 84 | except RunnerError as e: # pragma: no cover 85 | logger.info(f"process error: {str(e)}") 86 | except CalledProcessError: 87 | if output.stderr: 88 | logger.info("xml2rfc error: {}".format(output.stderr)) 89 | else: 90 | logger.info("xml2rfc error: no stderr output") 91 | 92 | return (output, text_file) 93 | 94 | 95 | def convert_v2v3(filename, logger=getLogger()): 96 | """Convert XML2RFC v2 file to v3 and return file name output""" 97 | 98 | logger.debug("converting v2 XML to v3 XML") 99 | 100 | xml_file = get_filename(filename, "xml") 101 | 102 | try: 103 | output = proc_run( 104 | args=["xml2rfc", "--v2v3", "--out", xml_file, filename], capture_output=True 105 | ) 106 | output.check_returncode() 107 | except RunnerError as e: # pragma: no cover 108 | logger.info(f"process error: {str(e)}") 109 | raise ProcessingError(str(e)) 110 | except CalledProcessError: 111 | if output.stderr: 112 | error = output.stderr.decode("utf-8") 113 | logger.info("xml2rfc v2v3 error: {}".format(error)) 114 | else: 115 | error = "v2v3 conversion error" 116 | logger.info("xml2rfc v2v3 error: no stderr output") 117 | raise ProcessingError(error) 118 | 119 | logger.info("new file saved at {}".format(xml_file)) 120 | return xml_file, output 121 | 122 | 123 | def idnits( 124 | filename, 125 | logger=getLogger(), 126 | verbose="0", 127 | show_text=False, 128 | year=False, 129 | submit_check=False, 130 | ): 131 | """Run idnits and return output""" 132 | 133 | logger.debug("running idnits") 134 | 135 | args = ["idnits"] 136 | if verbose == "1": 137 | args.append("--verbose") 138 | elif verbose == "2": 139 | args.append("--verbose") 140 | args.append("--verbose") # add --verbose twice 141 | if show_text: 142 | args.append("--showtext") 143 | if year: 144 | args.append("--year") 145 | args.append(str(year)) 146 | if submit_check: 147 | args.append("--submitcheck") 148 | args.append(filename) 149 | 150 | error = None 151 | output = None 152 | try: 153 | output = proc_run(args=args, capture_output=True) 154 | output.check_returncode() 155 | except RunnerError as e: # pragma: no cover 156 | error = str(e) 157 | logger.info(f"process error: {error}") 158 | except CalledProcessError: 159 | if output.stderr: 160 | error = output.stderr.decode("utf-8") 161 | logger.info("idnits error: {}".format(error)) 162 | else: 163 | error = "Error occured while running idnits" 164 | logger.info("idnits error: no stderr output") 165 | 166 | if output and output.stdout: 167 | stdout = output.stdout.decode("utf-8", errors="ignore") 168 | return cleanup_output(filename, stdout) 169 | else: 170 | return error 171 | 172 | 173 | def svgcheck(filename, logger=getLogger()): 174 | """Run svgcheck and return output""" 175 | 176 | logger.debug("running svgcheck") 177 | 178 | parsed_svg_file = get_filename(filename, "parsed.svg") 179 | args = [ 180 | "svgcheck", 181 | "--no-network", 182 | "--always-emit", 183 | "--repair", 184 | "--out", 185 | parsed_svg_file, 186 | filename, 187 | ] 188 | result = None 189 | errors = None 190 | parsed_svg = None 191 | try: 192 | output = proc_run(args=args, capture_output=True) 193 | output.check_returncode() 194 | except RunnerError as e: # pragma: no cover 195 | errors = str(e) 196 | logger.info(f"process error: {errors}") 197 | except CalledProcessError: 198 | if output.stderr: 199 | errors = output.stderr.decode("utf-8") 200 | logger.info("svgcheck error: {}".format(errors)) 201 | else: 202 | errors = "Error occured while running svgcheck" 203 | logger.info("svgcheck error: no stderr output") 204 | 205 | if not errors: 206 | if output.stderr: 207 | # svgcheck writes to stderr 208 | result = output.stderr.decode("utf-8", errors="ignore") 209 | 210 | with open(parsed_svg_file) as file: 211 | parsed_svg = "\n".join(file.readlines()) 212 | 213 | return ( 214 | parsed_svg, 215 | cleanup_output(filename, result), 216 | cleanup_output(filename, errors), 217 | ) 218 | 219 | 220 | def get_non_ascii_chars(filename, logger=getLogger()): 221 | """Run kramdown-rfc echars and return output""" 222 | 223 | logger.debug("running echars") 224 | output = None 225 | error = None 226 | 227 | try: 228 | output = proc_run(["echars", filename], capture_output=True) 229 | except RunnerError as e: # pragma: no cover 230 | error = str(e) 231 | logger.info(f"process error: {error}") 232 | 233 | if output: 234 | return output.stdout.decode("utf-8") 235 | else: 236 | return error 237 | -------------------------------------------------------------------------------- /constraints.txt: -------------------------------------------------------------------------------- 1 | blinker==1.9.0 2 | Brotli==1.1.0 3 | certifi==2025.4.26 4 | cffi==1.17.1 5 | charset-normalizer==3.4.2 6 | click==8.2.1 7 | ConfigArgParse==1.7.1 8 | cssselect2==0.8.0 9 | decorator==5.2.1 10 | dnspython==2.7.0 11 | eventlet==0.40.0 12 | Flask==3.1.1 13 | flask-cors==6.0.0 14 | fonttools==4.58.0 15 | google-i18n-address==3.1.1 16 | greenlet==3.2.2 17 | gunicorn==23.0.0 18 | id2xml==1.5.2 19 | iddiff==0.4.3 20 | idna==3.10 21 | intervaltree==3.1.0 22 | itsdangerous==2.2.0 23 | Jinja2==3.1.6 24 | lxml==5.4.0 25 | MarkupSafe==3.0.2 26 | packaging==25.0 27 | pathlib2==2.3.7.post1 28 | pillow==11.2.1 29 | platformdirs==4.3.8 30 | pycountry==24.6.1 31 | pycparser==2.22 32 | pydyf==0.11.0 33 | pyphen==0.17.2 34 | PyYAML==6.0.2 35 | requests==2.32.3 36 | sentry-sdk==2.29.1 37 | setuptools==80.8.0 38 | six==1.17.0 39 | sortedcontainers==2.4.0 40 | svgcheck==0.10.0 41 | tinycss2==1.4.0 42 | tinyhtml5==2.0.0 43 | urllib3==2.4.0 44 | wcwidth==0.2.13 45 | weasyprint==65.0 46 | webencodings==0.5.1 47 | Werkzeug==3.1.3 48 | xml2rfc==3.28.1 49 | zopfli==0.2.3.post1 50 | -------------------------------------------------------------------------------- /dev.Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:noble 2 | LABEL maintainer="Kesara Rathnayake " 3 | 4 | ARG VERSION=6.6.6 5 | 6 | ENV DEBIAN_FRONTEND=noninteractive 7 | ENV PATH=$PATH:./node_modules/.bin 8 | # Disable local file read for kramdown-rfc 9 | ENV KRAMDOWN_SAFE=1 10 | 11 | WORKDIR /usr/src/app 12 | 13 | SHELL ["/bin/bash", "-o", "pipefail", "-c"] 14 | 15 | # Add nodejs 18.x 16 | RUN apt-get update && \ 17 | apt-get install -y curl gpg && \ 18 | curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg && \ 19 | echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_18.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list 20 | 21 | RUN apt-get update && \ 22 | apt-get install -y \ 23 | software-properties-common \ 24 | gcc \ 25 | wget \ 26 | ruby \ 27 | python3 \ 28 | python3-venv \ 29 | libpango-1.0-0 \ 30 | libpango1.0-dev \ 31 | wdiff \ 32 | nodejs \ 33 | gawk \ 34 | bison \ 35 | flex \ 36 | make \ 37 | git \ 38 | build-essential \ 39 | cmake \ 40 | nginx \ 41 | supervisor && \ 42 | rm -rf /var/lib/apt/lists/* /var/log/dpkg.log && \ 43 | apt-get autoremove -y && \ 44 | apt-get clean -y 45 | 46 | # Install rfcdiff 47 | RUN wget https://github.com/ietf-tools/rfcdiff/archive/refs/tags/1.49.tar.gz && \ 48 | tar zxf 1.49.tar.gz -C /tmp/ && \ 49 | mv /tmp/rfcdiff-1.49/rfcdiff /bin && \ 50 | chmod +x /bin/rfcdiff && \ 51 | rm -rf 1.49.tar.gz /tmp/rfcdiff-1.49 52 | 53 | # Install bap 54 | RUN wget https://github.com/ietf-tools/bap/archive/refs/heads/master.zip && \ 55 | unzip -q master.zip -d /tmp/bap && \ 56 | cd /tmp/bap/bap-master/ && \ 57 | ./configure && \ 58 | make && \ 59 | cp aex bap /bin && \ 60 | cd && \ 61 | rm -rf /tmp/bap master.zip 62 | 63 | # Install idnits 64 | RUN wget https://github.com/ietf-tools/idnits/archive/refs/tags/2.17.1.zip && \ 65 | unzip -q 2.17.1.zip -d ~/idnits && \ 66 | cp ~/idnits/idnits-2.17.1/idnits /bin && \ 67 | chmod +x /bin/idnits && \ 68 | rm -rf ~/idnits/idnits-2.17.1/idnits idnit 2.17.1.zip 69 | 70 | # Install mmark 71 | RUN arch=$(arch | sed s/aarch64/arm64/ | sed s/x86_64/amd64/) && \ 72 | wget "https://github.com/mmarkdown/mmark/releases/download/v2.2.46/mmark_2.2.46_linux_$arch.tgz" && \ 73 | tar zxf mmark_*.tgz -C /bin/ && \ 74 | rm mmark_*.tgz 75 | 76 | # Build & install rst2rfcxml 77 | RUN git clone --branch v1.6.0 --recurse-submodules https://github.com/dthaler/rst2rfcxml.git && \ 78 | cd rst2rfcxml && \ 79 | cmake -B build -DCMAKE_BUILD_TYPE=Release && \ 80 | cmake --build build && \ 81 | mv ./build/rst2rfcxml/rst2rfcxml /bin && \ 82 | chmod +x /bin/rst2rfcxml && \ 83 | cd .. && \ 84 | rm -rf rst2rfcxml 85 | 86 | COPY Gemfile Gemfile.lock LICENSE README.md api.yml constraints.txt package-lock.json package.json requirements.txt docker/version.py ./ 87 | COPY at ./at 88 | 89 | # Install JavaScript dependencies 90 | RUN npm install 91 | 92 | # Rename idnits v3 binary 93 | RUN mv ./node_modules/.bin/idnits ./node_modules/.bin/idnits3 94 | 95 | # Install Python dependencies 96 | ENV PYTHONUNBUFFERED=1 97 | ENV VENV_DIR=/usr/src/app/venv 98 | RUN python3 -m venv $VENV_DIR 99 | ENV PATH="$VENV_DIR/bin:$PATH" 100 | RUN python -m pip install --upgrade pip 101 | RUN python -m pip install -r requirements.txt -c constraints.txt 102 | 103 | # Install Ruby dependencies 104 | RUN gem install bundler && bundle install 105 | 106 | # nginx unprivileged setup 107 | RUN ln -sf /dev/stdout /var/log/nginx/access.log && \ 108 | ln -sf /dev/stderr /var/log/nginx/error.log && \ 109 | sed -i '/user www-data;/d' /etc/nginx/nginx.conf && \ 110 | sed -i 's,/run/nginx.pid,/tmp/nginx.pid,' /etc/nginx/nginx.conf && \ 111 | sed -i "/^http {/a \ proxy_temp_path /tmp/proxy_temp;\n client_body_temp_path /tmp/client_temp;\n fastcgi_temp_path /tmp/fastcgi_temp;\n uwsgi_temp_path /tmp/uwsgi_temp;\n scgi_temp_path /tmp/scgi_temp;\n" /etc/nginx/nginx.conf && \ 112 | mkdir -p /var/cache/nginx && \ 113 | chown -R www-data:0 /var/cache/nginx && \ 114 | chmod -R g+w /var/cache/nginx 115 | 116 | RUN mkdir -p tmp && \ 117 | echo "UPLOAD_DIR = '$PWD/tmp'" > at/config.py && \ 118 | echo "VERSION = '${VERSION}'" >> at/config.py && \ 119 | echo "REQUIRE_AUTH = False" >> at/config.py && \ 120 | echo "DT_LATEST_DRAFT_URL = 'https://datatracker.ietf.org/api/rfcdiff-latest-json'" >> at/config.py && \ 121 | echo "ALLOWED_DOMAINS = ['ietf.org', 'rfc-editor.org', 'github.com', 'githubusercontent.com', 'github.io', 'gitlab.com', 'gitlab.io', 'codeberg.page', 'httpwg.org', 'quicwg.org']" >> at/config.py && \ 122 | python3 version.py >> at/config.py && \ 123 | chown -R www-data:0 /usr/src/app/tmp 124 | 125 | # cache configuration 126 | RUN mkdir -p /tmp/cache/xml2rfc && \ 127 | mkdir -p /tmp/cache/refcache && \ 128 | mkdir /var/www/.cache && \ 129 | ln -sf /tmp/cache/xml2rfc /var/cache/xml2rfc && \ 130 | chown -R www-data:0 /tmp/cache /var/www/.cache 131 | ENV KRAMDOWN_REFCACHEDIR=/tmp/cache/refcache 132 | 133 | 134 | # COPY required files 135 | COPY static /usr/share/nginx/html/ 136 | COPY api.yml /usr/share/nginx/html/ 137 | COPY docker/gunicorn.py /usr/src/app/ 138 | COPY docker/nginx-default-site.conf /etc/nginx/sites-available/default 139 | COPY docker/supervisord.conf /etc/supervisor/ 140 | COPY requirements.dev.txt /usr/src/app/ 141 | COPY tests /usr/src/app/tests 142 | COPY docker /usr/src/app/docker 143 | 144 | CMD ["bash"] 145 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | --- 2 | services: 3 | author-tools: 4 | build: . 5 | container_name: author-tools 6 | environment: 7 | GUNICORN_WORKERS: ${GUNICORN_WORKERS} 8 | SENTRY_DSN: ${SENTRY_DSN} 9 | SITE_URL: ${SITE_URL} 10 | env_file: 11 | - '.env' 12 | ports: 13 | - '${AT_PORT}:8080' 14 | mem_limit: 4gb 15 | -------------------------------------------------------------------------------- /docker/gunicorn.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | accesslog = "-" 4 | errorlog = "-" 5 | capture_output = True 6 | workers = os.getenv("GUNICORN_WORKERS", 2) 7 | worker_class = "eventlet" 8 | bind = "0.0.0.0:8008" 9 | -------------------------------------------------------------------------------- /docker/nginx-default-site.conf: -------------------------------------------------------------------------------- 1 | server { 2 | listen 8080 default_server; 3 | listen [::]:8080 default_server; 4 | server_name _; 5 | gzip on; 6 | access_log /dev/stdout; 7 | error_log /dev/stdout; 8 | absolute_redirect off; 9 | keepalive_timeout 70; 10 | client_max_body_size 5m; 11 | proxy_read_timeout 300; 12 | 13 | root /usr/share/nginx/html/; 14 | 15 | location /diff { 16 | rewrite ^/diff /api/iddiff?$args break; 17 | proxy_pass http://127.0.0.1:8008; 18 | } 19 | 20 | location /api/ { 21 | proxy_pass http://127.0.0.1:8008; 22 | } 23 | 24 | location = /abnf/ { 25 | return 301 /abnf; 26 | } 27 | 28 | location = /about/ { 29 | return 301 /about; 30 | } 31 | 32 | location = /iddiff/ { 33 | return 301 /iddiff; 34 | } 35 | 36 | location = /idnits/ { 37 | return 301 /idnits; 38 | } 39 | 40 | location = /svgcheck/ { 41 | return 301 /svgcheck; 42 | } 43 | 44 | location = /rfcdiff/ { 45 | return 301 /rfcdiff; 46 | } 47 | 48 | location / { 49 | if ($request_uri ~ ^/iddiff\?(.*)) { 50 | rewrite ^ /api/iddiff?$args last; 51 | } 52 | if ($request_uri ~ ^/idnits\?(.*)) { 53 | rewrite ^ /api/idnits?$args last; 54 | } 55 | rewrite ^/iddiff$ /iddiff.html break; 56 | rewrite ^/abnf$ /abnf.html break; 57 | rewrite ^/idnits$ /idnits.html break; 58 | rewrite ^/svgcheck$ /svgcheck.html break; 59 | rewrite ^/rfcdiff$ /rfcdiff.html break; 60 | rewrite ^/clean-svg-ids$ /clean-svg-ids.html break; 61 | rewrite ^/about$ /about.html break; 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /docker/supervisord.conf: -------------------------------------------------------------------------------- 1 | [supervisord] 2 | nodaemon=true 3 | logfile=/dev/stdout 4 | logfile_maxbytes=0 5 | pidfile=/tmp/supervisord.pid 6 | 7 | [unix_http_server] 8 | file=/tmp/supervisor.sock 9 | chmod=0700 10 | 11 | [program:nginx] 12 | command=nginx -g "daemon off;" 13 | stdout_logfile=/dev/stdout 14 | stdout_logfile_maxbytes=0 15 | redirect_stderr=true 16 | 17 | [program:gunicorn] 18 | command=gunicorn --config /usr/src/app/gunicorn.py "at:create_app()" 19 | directory=/usr/src/app 20 | redirect_stderr=true 21 | stdout_logfile=/dev/stdout 22 | stdout_logfile_maxbytes=0 23 | 24 | [rpcinterface:supervisor] 25 | supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface 26 | 27 | [supervisorctl] 28 | serverurl=unix:///tmp/supervisor.sock 29 | -------------------------------------------------------------------------------- /docker/version.py: -------------------------------------------------------------------------------- 1 | from logging import getLogger 2 | from subprocess import run as proc_run, CalledProcessError 3 | 4 | from weasyprint import __version__ as weasyprint_version 5 | from xml2rfc import __version__ as xml2rfc_version 6 | 7 | 8 | def get_kramdown_rfc_version(logger=getLogger()): 9 | """Return kramdown-rfc version""" 10 | 11 | output = proc_run(args=["kramdown-rfc", "--version"], capture_output=True) 12 | 13 | try: 14 | output.check_returncode() 15 | return output.stdout.decode("utf-8").replace("kramdown-rfc", "").strip() 16 | except CalledProcessError: # pragma: no cover 17 | logger.info("kramdown-rfc error: {}".format(output.stderr.decode("utf-8"))) 18 | return None 19 | 20 | 21 | def get_mmark_version(logger=getLogger()): 22 | """Return mmark version""" 23 | 24 | output = proc_run(args=["mmark", "--version"], capture_output=True) 25 | 26 | try: 27 | output.check_returncode() 28 | return output.stdout.decode("utf-8").strip() 29 | except CalledProcessError: # pragma: no cover 30 | logger.info("mmark error: {}".format(output.stderr.decode("utf-8"))) 31 | return None 32 | 33 | 34 | def get_id2xml_version(logger=getLogger()): 35 | """Return id2xml version""" 36 | 37 | output = proc_run(args=["id2xml", "--version"], capture_output=True) 38 | 39 | try: 40 | output.check_returncode() 41 | return output.stdout.decode("utf-8").replace("id2xml", "").strip() 42 | except CalledProcessError: # pragma: no cover 43 | logger.info("id2xml error: {}".format(output.stderr.decode("utf-8"))) 44 | return None 45 | 46 | 47 | def get_xml2rfc_version(): 48 | """Return xml2rfc version""" 49 | 50 | return xml2rfc_version 51 | 52 | 53 | def get_weasyprint_version(): 54 | """Return Weasyprint version""" 55 | 56 | return weasyprint_version 57 | 58 | 59 | def get_idnits_version(logger=getLogger()): 60 | """Return idnits version""" 61 | 62 | output = proc_run(args=["idnits", "--version"], capture_output=True) 63 | 64 | try: 65 | output.check_returncode() 66 | return output.stdout.decode("utf-8").replace("idnits", "").strip() 67 | except CalledProcessError: # pragma: no cover 68 | logger.info("idnits error: {}".format(output.stderr.decode("utf-8"))) 69 | return None 70 | 71 | 72 | def get_aasvg_version(logger=getLogger()): 73 | """Return aasvg version""" 74 | 75 | output = proc_run(args=["aasvg", "--version"], capture_output=True) 76 | 77 | try: 78 | output.check_returncode() 79 | return output.stdout.decode("utf-8").replace("aasvg", "").strip() 80 | except CalledProcessError: # pragma: no cover 81 | logger.info("aasvg error: {}".format(output.stderr.decode("utf-8"))) 82 | return None 83 | 84 | 85 | def get_iddiff_version(logger=getLogger()): 86 | """Return iddiff version""" 87 | 88 | output = proc_run(args=["iddiff", "--version"], capture_output=True) 89 | 90 | try: 91 | output.check_returncode() 92 | return output.stdout.decode("utf-8").replace("iddiff", "").strip() 93 | except CalledProcessError: # pragma: no cover 94 | logger.info("iddiff error: {}".format(output.stderr.decode("utf-8"))) 95 | return None 96 | 97 | 98 | def get_svgcheck_version(logger=getLogger()): 99 | """Return svgcheck version""" 100 | 101 | output = proc_run(args=["svgcheck", "--version"], capture_output=True) 102 | 103 | try: 104 | output.check_returncode() 105 | return ( 106 | output.stdout.decode("utf-8") 107 | .split("\n")[0] 108 | .replace("svgcheck =", "") 109 | .strip() 110 | ) 111 | except CalledProcessError: # pragma: no cover 112 | logger.info("svgcheck error: {}".format(output.stderr.decode("utf-8"))) 113 | return None 114 | 115 | 116 | def get_rfcdiff_version(logger=getLogger()): 117 | """Return rfcdiff version""" 118 | 119 | output = proc_run(args=["rfcdiff", "--version"], capture_output=True) 120 | 121 | try: 122 | output.check_returncode() 123 | return ( 124 | output.stdout.decode("utf-8").split("\n")[0].replace("rfcdiff", "").strip() 125 | ) 126 | except CalledProcessError: # pragma: no cover 127 | logger.info("rfcdiff error: {}".format(output.stderr.decode("utf-8"))) 128 | return None 129 | 130 | 131 | def get_rst2rfcxml_version(logger=getLogger()): 132 | """Return rst2rfcxml version""" 133 | 134 | output = proc_run(args=["rst2rfcxml", "--version"], capture_output=True) 135 | 136 | try: 137 | output.check_returncode() 138 | return ( 139 | output.stdout.decode("utf-8") 140 | .split("\n")[0] 141 | .replace("rst2rfcxml", "") 142 | .strip() 143 | ) 144 | except CalledProcessError: # pragma: no cover 145 | logger.info("rst2rfcxml error: {}".format(output.stderr.decode("utf-8"))) 146 | return None 147 | 148 | 149 | if __name__ == "__main__": 150 | VERSION_INFORMATION = { 151 | "xml2rfc": get_xml2rfc_version(), 152 | "kramdown-rfc": get_kramdown_rfc_version(), 153 | "mmark": get_mmark_version(), 154 | "id2xml": get_id2xml_version(), 155 | "weasyprint": get_weasyprint_version(), 156 | "idnits": get_idnits_version(), 157 | "iddiff": get_iddiff_version(), 158 | "aasvg": get_aasvg_version(), 159 | "svgcheck": get_svgcheck_version(), 160 | "rfcdiff": get_rfcdiff_version(), 161 | "rst2rfcxml": get_rst2rfcxml_version(), 162 | "bap": "1.4", 163 | } # bap does not provide a switch to get version 164 | print(f"VERSION_INFORMATION = {VERSION_INFORMATION}") 165 | -------------------------------------------------------------------------------- /k8s/author-tools.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: author-tools 5 | spec: 6 | replicas: 1 7 | revisionHistoryLimit: 2 8 | selector: 9 | matchLabels: 10 | app: author-tools 11 | template: 12 | metadata: 13 | labels: 14 | app: author-tools 15 | spec: 16 | securityContext: 17 | fsGroup: 33 18 | runAsUser: 33 19 | runAsGroup: 33 20 | runAsNonRoot: true 21 | initContainers: 22 | - args: 23 | - -c 24 | - chmod 770 /tmp; 25 | - chown -R 33:33 /tmp; 26 | command: 27 | - /bin/sh 28 | image: alpine:latest 29 | name: setup-volumes 30 | volumeMounts: 31 | - name: at-tmp 32 | mountPath: /tmp 33 | securityContext: 34 | runAsUser: 0 35 | runAsNonRoot: false 36 | containers: 37 | - name: author-tools 38 | image: "ghcr.io/ietf-tools/author-tools:$APP_IMAGE_TAG" 39 | imagePullPolicy: Always 40 | command: ["supervisord"] 41 | ports: 42 | - containerPort: 8080 43 | name: http 44 | protocol: TCP 45 | volumeMounts: 46 | - name: at-tmp 47 | mountPath: /tmp 48 | - name: at-kramdown 49 | mountPath: /tmp/cache/refcache 50 | - name: at-xml2rfc 51 | mountPath: /tmp/cache/xml2rfc 52 | - name: at-idnits 53 | mountPath: /var/www/.idnits 54 | - name: at-cache 55 | mountPath: /var/www/.cache 56 | - name: at-app-tmp 57 | mountPath: /usr/src/app/tmp 58 | env: 59 | - name: "CONTAINER_ROLE" 60 | value: "author-tools" 61 | # ensures the pod gets recreated on every deploy: 62 | - name: "DEPLOY_UID" 63 | value: "$DEPLOY_UID" 64 | envFrom: 65 | - secretRef: 66 | name: author-tools-secrets-env 67 | securityContext: 68 | allowPrivilegeEscalation: false 69 | capabilities: 70 | drop: 71 | - ALL 72 | readOnlyRootFilesystem: true 73 | runAsUser: 33 74 | runAsGroup: 33 75 | resources: 76 | requests: 77 | memory: "256Mi" 78 | cpu: "100m" 79 | limits: 80 | memory: "4Gi" 81 | cpu: "2000m" 82 | volumes: 83 | - name: at-tmp 84 | emptyDir: 85 | sizeLimit: "2Gi" 86 | - name: at-kramdown 87 | emptyDir: 88 | sizeLimit: "1Gi" 89 | - name: at-xml2rfc 90 | emptyDir: 91 | sizeLimit: "1Gi" 92 | - name: at-idnits 93 | emptyDir: 94 | sizeLimit: "1Gi" 95 | - name: at-cache 96 | emptyDir: 97 | sizeLimit: "1Gi" 98 | - name: at-app-tmp 99 | emptyDir: 100 | sizeLimit: "4Gi" 101 | restartPolicy: Always 102 | terminationGracePeriodSeconds: 30 103 | --- 104 | apiVersion: v1 105 | kind: Service 106 | metadata: 107 | name: author-tools 108 | spec: 109 | type: ClusterIP 110 | ports: 111 | - port: 80 112 | targetPort: http 113 | protocol: TCP 114 | name: http 115 | selector: 116 | app: author-tools 117 | -------------------------------------------------------------------------------- /k8s/kustomization.yaml: -------------------------------------------------------------------------------- 1 | namespace: author-tools 2 | resources: 3 | - author-tools.yaml 4 | -------------------------------------------------------------------------------- /k8s/secrets.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: secrets-env 5 | type: Opaque 6 | stringData: 7 | GUNICORN_WORKERS: "2" 8 | SITE_URL: "https://author-tools.ietf.org" 9 | DT_LATEST_DRAFT_URL: "http://dt-datatracker.datatracker.svc/api/rfcdiff-latest-json" 10 | # SENTRY_DSN: "" secret from vault 11 | -------------------------------------------------------------------------------- /package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "author-tools", 3 | "version": "1.9.0", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "author-tools", 9 | "version": "1.9.0", 10 | "license": "SEE LICENSE IN LICENSE", 11 | "dependencies": { 12 | "aasvg": "^0.4.0" 13 | } 14 | }, 15 | "node_modules/aasvg": { 16 | "version": "0.4.2", 17 | "resolved": "https://registry.npmjs.org/aasvg/-/aasvg-0.4.2.tgz", 18 | "integrity": "sha512-J0n4FYxi6jXkUcpP1vbCLUNs5qW40fQB46fDkTXfGPjiVW5Ua9jucI0oGvRG4PLX5eiyKzh5JGNYPLgobAawWA==", 19 | "license": "BSD", 20 | "bin": { 21 | "aasvg": "main.js" 22 | }, 23 | "engines": { 24 | "node": ">=10" 25 | } 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "author-tools", 3 | "version": "1.9.0", 4 | "description": "IETF Author Tools", 5 | "main": "", 6 | "directories": {}, 7 | "scripts": {}, 8 | "repository": { 9 | "type": "git", 10 | "url": "git+https://github.com/ietf-tools/author-tools.git" 11 | }, 12 | "author": "Kesara Rathnayake", 13 | "license": "SEE LICENSE IN LICENSE", 14 | "bugs": { 15 | "url": "https://github.com/ietf-tools/author-tools/issues" 16 | }, 17 | "homepage": "https://github.com/ietf-tools/author-tools#readme", 18 | "dependencies": { 19 | "aasvg": "^0.4.0" 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /requirements.dev.txt: -------------------------------------------------------------------------------- 1 | black>=25.1.0 2 | coverage>=6.1.1 3 | Faker>=8.12.1 4 | hypothesis>=6.16.0 5 | pyflakes>=2.3.1 6 | responses==0.17.0 7 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | eventlet>=0.40.0 2 | decorator>=5.2.1 3 | Flask>=3.1.1 4 | flask-cors>=6.0.0 5 | gunicorn>=23.0.0 6 | id2xml>=1.5.2 7 | iddiff>=0.4.3 8 | requests>=2.32.3 9 | sentry-sdk[flask]>=2.29.1 10 | svgcheck>=0.10.0 11 | xml2rfc[pdf]>=3.28.1 12 | -------------------------------------------------------------------------------- /static/doc/favicon-16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ietf-tools/author-tools/a50c3997464b1a32005eab6ad410c83e616f8ab3/static/doc/favicon-16x16.png -------------------------------------------------------------------------------- /static/doc/favicon-32x32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ietf-tools/author-tools/a50c3997464b1a32005eab6ad410c83e616f8ab3/static/doc/favicon-32x32.png -------------------------------------------------------------------------------- /static/doc/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | IETF Author Tools - API Documentation 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 44 | 45 | 46 | 47 |
48 | 92 |
93 |
94 |
95 |
96 | 110 |
111 |
112 |
113 |
114 | 137 | 141 | 142 | 143 | 144 | 165 | 166 | 179 | 180 | 181 | 182 | -------------------------------------------------------------------------------- /static/doc/oauth2-redirect.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Swagger UI: OAuth2 Redirect 5 | 6 | 7 | 74 | 75 | 76 | -------------------------------------------------------------------------------- /static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ietf-tools/author-tools/a50c3997464b1a32005eab6ad410c83e616f8ab3/static/favicon.ico -------------------------------------------------------------------------------- /static/robots.txt: -------------------------------------------------------------------------------- 1 | User-agent: * 2 | Allow: / 3 | Allow: /iddiff$ 4 | Disallow: /api/ 5 | Disallow: /diff 6 | Disallow: /iddiff?* 7 | Disallow: /idnits?* 8 | Disallow: /idnits3?* 9 | 10 | Sitemap: https://author-tools.ietf.org/sitemap.xml 11 | 12 | Host: author-tools.ietf.org 13 | -------------------------------------------------------------------------------- /static/scripts/abnf.js: -------------------------------------------------------------------------------- 1 | const formInput = document.getElementById('formInput'); 2 | const formTextAreaInput = document.getElementById('formTextAreaInput'); 3 | const buttonExtract = document.getElementById('buttonExtract'); 4 | const buttonParse = document.getElementById('buttonParse'); 5 | const accordionAbnfParse = document.getElementById('accordionAbnfParse'); 6 | const accordionItemErrors = document.getElementById('accordionItemErrors'); 7 | const preErrors = document.getElementById('preErrors'); 8 | const accordionItemAbnf = document.getElementById('accordionItemAbnf'); 9 | const preAbnf = document.getElementById('preAbnf'); 10 | 11 | // enable Bootstrap/Popper tooltips 12 | var tooltipTriggerList = [].slice.call(document.querySelectorAll('[data-bs-toggle2="tooltip"]')); 13 | let tooltipList = tooltipTriggerList.map(function (tooltipTriggerEl) { 14 | return new bootstrap.Tooltip(tooltipTriggerEl); 15 | }); 16 | 17 | reset(); 18 | 19 | formInput.addEventListener('keydown', submit); 20 | buttonExtract.addEventListener('click', extract); 21 | buttonParse.addEventListener('click', parse); 22 | 23 | function submit(event) { 24 | if (event.key == 'Enter') { 25 | event.preventDefault(); 26 | extract(); 27 | } 28 | } 29 | 30 | function reset() { 31 | for (let tooltip of tooltipList) { 32 | tooltip.hide(); 33 | } 34 | 35 | resetButtons(); 36 | accordionItemErrors.style.display = 'none'; 37 | preErrors.textContent = ''; 38 | accordionItemAbnf.style.display = 'none'; 39 | preAbnf.textContent = ''; 40 | } 41 | 42 | function resetButtons() { 43 | buttonExtract.disabled = false; 44 | buttonExtract.innerText = buttonExtract.dataset.title; 45 | buttonParse.disabled = false; 46 | buttonParse.innerText = buttonParse.dataset.title; 47 | } 48 | 49 | function disableButtons() { 50 | buttonExtract.disabled = true; 51 | buttonParse.disabled = true; 52 | } 53 | 54 | function extract() { 55 | reset(); 56 | 57 | url = '/api/abnf/extract?'; 58 | 59 | try { 60 | urlInput = new URL(formInput.value); 61 | url = url + 'url=' + urlInput.href; 62 | 63 | } catch (_) { 64 | url = url + 'doc=' + formInput.value; 65 | } 66 | 67 | window.location.href = encodeURI(url); 68 | } 69 | 70 | function parse() { 71 | reset(); 72 | 73 | buttonParse.innerHTML = '' + buttonParse.innerHTML; 74 | disableButtons(); 75 | 76 | apiCall = '/api/abnf/parse' 77 | 78 | const formData = new FormData(); 79 | formData.append('input', formTextAreaInput.value); 80 | 81 | const request = new Request(apiCall, { 82 | method: 'POST', 83 | body: formData 84 | }); 85 | 86 | fetch(request) 87 | .then(function(response) { 88 | if (!response.ok) { 89 | throw new Error(`There was an issue processing your request. (HTTP Status: ${response.status})`); 90 | } 91 | return response.json(); 92 | }) 93 | .then(function(json) { 94 | reset(); 95 | if (json.errors) { 96 | accordionItemErrors.style.display = 'block'; 97 | preErrors.textContent = json.errors; 98 | } 99 | if (json.abnf) { 100 | accordionItemAbnf.style.display = 'block'; 101 | preAbnf.textContent = json.abnf; 102 | } 103 | accordionAbnfParse.scrollIntoView(); 104 | }) 105 | .catch(error => { 106 | accordionItemErrors.style.display = 'block'; 107 | preErrors.textContent = error; 108 | resetButtons(); 109 | }); 110 | } 111 | -------------------------------------------------------------------------------- /static/scripts/about.js: -------------------------------------------------------------------------------- 1 | const alertError = document.getElementById('alertError'); 2 | const messageError = document.getElementById('messageError'); 3 | 4 | alertError.style.display = 'none'; 5 | messageError.textContent = ''; 6 | 7 | const apiCall = '/api/version'; 8 | 9 | const request = new Request(apiCall, { 10 | method: 'GET', 11 | }); 12 | 13 | fetch(request) 14 | .then(function(response) { return response.json(); }) 15 | .then(function(json) { 16 | document.getElementById('spanIetfat').textContent = json.versions.author_tools_api; 17 | document.getElementById('spanXml2rfc').textContent = json.versions.xml2rfc; 18 | document.getElementById('spanKramdown').textContent = json.versions['kramdown-rfc']; 19 | document.getElementById('spanMmark').textContent = json.versions.mmark; 20 | document.getElementById('spanId2xml').textContent = json.versions.id2xml; 21 | document.getElementById('spanIdnits').textContent = json.versions.idnits; 22 | document.getElementById('spanIddiff').textContent = json.versions.iddiff; 23 | document.getElementById('spanWeasyprint').textContent = json.versions.weasyprint; 24 | document.getElementById('spanAasvg').textContent = json.versions.aasvg; 25 | document.getElementById('spanBap').textContent = json.versions.bap; 26 | document.getElementById('spanSvgcheck').textContent = json.versions.svgcheck; 27 | document.getElementById('spanRfcdiff').textContent = json.versions.rfcdiff; 28 | document.getElementById('spanRst2rfcxml').textContent = json.versions.rst2rfcxml; 29 | }) 30 | .catch(error => { 31 | alertError.style.display = 'block'; 32 | messageError.textContent = 'Error occured while retrieving version infomation.'; 33 | }); 34 | -------------------------------------------------------------------------------- /static/scripts/clean-svg-ids.js: -------------------------------------------------------------------------------- 1 | const alertError = document.getElementById('alertError'); 2 | const buttonDownload = document.getElementById('buttonDownload'); 3 | const buttonOpen = document.getElementById('buttonOpen'); 4 | const formFile = document.getElementById('formFile'); 5 | const messageError = document.getElementById('messageError'); 6 | const accordionItemWarnings = document.getElementById('accordionItemWarnings'); 7 | const accordionItemErrors = document.getElementById('accordionItemErrors'); 8 | const listWarnings = document.getElementById('listWarnings'); 9 | const listErrors = document.getElementById('listErrors'); 10 | const buttonClean = document.getElementById('buttonClean'); 11 | 12 | // enable Bootstrap/Popper tooltips 13 | var tooltipTriggerList = [].slice.call(document.querySelectorAll('[data-bs-toggle="tooltip"]')); 14 | let tooltipList = tooltipTriggerList.map(function (tooltipTriggerEl) { 15 | return new bootstrap.Tooltip(tooltipTriggerEl); 16 | }); 17 | 18 | reset(); 19 | 20 | formFile.addEventListener('change', reset); 21 | buttonClean.addEventListener('click', clean); 22 | 23 | function reset() { 24 | for (let tooltip of tooltipList) { 25 | tooltip.hide(); 26 | } 27 | 28 | alertError.style.display = 'none'; 29 | buttonDownload.style.display = 'none'; 30 | buttonDownload.setAttribute('download', ''); 31 | buttonDownload.href = '#'; 32 | buttonOpen.style.display = 'none'; 33 | buttonOpen.href = '#'; 34 | messageError.textContent = ''; 35 | accordionValidation.style.display = 'none'; 36 | accordionItemWarnings.style.display = 'none'; 37 | accordionItemErrors.style.display = 'none'; 38 | listWarnings.textContent = ''; 39 | listErrors.textContent = ''; 40 | resetButtons(); 41 | } 42 | 43 | function resetButtons() { 44 | buttonClean.disabled = false; 45 | buttonClean.innerText = buttonClean.dataset.title; 46 | } 47 | 48 | function disableButtons() { 49 | buttonClean.disabled = true; 50 | } 51 | 52 | function clean(event) { 53 | reset(); 54 | 55 | buttonClean.innerHTML = '' + buttonClean.innerHTML; 56 | disableButtons(); 57 | 58 | const formData = new FormData(); 59 | const file = formFile.files[0]; 60 | 61 | formData.append('file', file); 62 | 63 | const apiCall = '/api/clean_svg_ids'; 64 | 65 | const request = new Request(apiCall, { 66 | method: 'POST', 67 | body: formData 68 | }); 69 | 70 | fetch(request) 71 | .then(function(response) { return response.json(); }) 72 | .then(function(json) { 73 | resetButtons(); 74 | if (json.error) { 75 | alertError.style.display = 'block'; 76 | messageError.textContent = json.error; 77 | } 78 | if (json.url && json.url.length > 0) { 79 | // file rendering is successful 80 | download_url = json.url + '?download=1' 81 | buttonDownload.style.display = 'block'; 82 | buttonDownload.setAttribute('download', download_url); 83 | buttonDownload.href = download_url; 84 | buttonOpen.style.display = 'block'; 85 | buttonOpen.setAttribute('href', json.url); 86 | buttonOpen.href = json.url; 87 | } 88 | }) 89 | .catch((error) => { 90 | resetButtons(); 91 | alertError.style.display = 'block'; 92 | messageError.textContent = error; 93 | }); 94 | } 95 | -------------------------------------------------------------------------------- /static/scripts/idnits.js: -------------------------------------------------------------------------------- 1 | const formURL = document.getElementById('formURL'); 2 | const buttonIdnits = document.getElementById('buttonIdnits'); 3 | const buttonSubmissionCheck = document.getElementById('buttonSubmissionCheck'); 4 | const switchVerbose = document.getElementById('switchVerbose'); 5 | const switchVeryVerbose = document.getElementById('switchVeryVerbose'); 6 | const switchShowText = document.getElementById('switchShowText'); 7 | const switchSubmissionCheck = document.getElementById('switchSubmissionCheck'); 8 | const tabLinks = document.getElementsByClassName('tab-link'); 9 | 10 | // enable Bootstrap/Popper tooltips 11 | var tooltipTriggerList = [].slice.call(document.querySelectorAll('[data-bs-toggle2="tooltip"]')); 12 | let tooltipList = tooltipTriggerList.map(function (tooltipTriggerEl) { 13 | return new bootstrap.Tooltip(tooltipTriggerEl); 14 | }); 15 | 16 | reset(); 17 | 18 | formURL.addEventListener('keydown', submit); 19 | buttonIdnits.addEventListener('click', idnits); 20 | buttonSubmissionCheck.addEventListener('click', submissionCheck); 21 | for (let tabLink of tabLinks) { 22 | tabLink.addEventListener('click', resetOther); 23 | } 24 | 25 | function resetOther(event) { 26 | const clickedItem = event.target || event.srcElement; 27 | others = clickedItem.dataset.others.split(','); 28 | others.forEach(resetForm); 29 | } 30 | 31 | function resetForm(form_id) { 32 | const form = document.getElementById(form_id); 33 | form.reset(); 34 | } 35 | 36 | function submit(event) { 37 | formURL.classList.remove('is-invalid'); 38 | if (event.key == 'Enter') { 39 | event.preventDefault(); 40 | idnits(); 41 | } 42 | } 43 | 44 | function reset() { 45 | for (let tooltip of tooltipList) { 46 | tooltip.hide(); 47 | } 48 | } 49 | 50 | 51 | function idnits() { 52 | reset(); 53 | 54 | if (formURL.value.length > 0) { 55 | if (formURL.checkValidity()) { 56 | 57 | url = '/api/idnits?url=' + formURL.value; 58 | if (switchVeryVerbose.checked) { 59 | url += '&verbose=2'; 60 | } else if (!switchVerbose.checked) { 61 | url += '&verbose=0'; 62 | } 63 | if (!switchShowText.checked) { 64 | url += '&hidetext=True'; 65 | } 66 | if (switchSubmissionCheck.checked) { 67 | url += '&submitcheck=True'; 68 | } 69 | 70 | window.location.href = url; 71 | } else { 72 | formURL.classList.add('is-invalid'); 73 | event.preventDefault(); 74 | event.stopPropagation(); 75 | } 76 | } else { 77 | idnitsPost(submissionCheck=false); 78 | } 79 | } 80 | 81 | function submissionCheck() { 82 | reset(); 83 | 84 | if (formURL.value.length > 0) { 85 | if (formURL.checkValidity()) { 86 | url = '/api/idnits?url=' + formURL.value; 87 | url += '&submitcheck=True&hidetext=True'; 88 | window.location.href = url; 89 | } else { 90 | formURL.classList.add('is-invalid'); 91 | event.preventDefault(); 92 | event.stopPropagation(); 93 | } 94 | } else { 95 | idnitsPost(submissionCheck=true); 96 | } 97 | } 98 | 99 | function idnitsPost(submissionCheck) { 100 | const form = document.getElementById('form-tab-file'); 101 | const formData = new FormData(); 102 | 103 | if (submissionCheck) { 104 | formData.append('submitcheck', 'True'); 105 | } else { 106 | if (switchVeryVerbose.checked) { 107 | formData.append('verbose', '2'); 108 | } else if (!switchVerbose.checked) { 109 | formData.append('verbose', '0'); 110 | } 111 | if (!switchShowText.checked) { 112 | formData.append('hidetext', 'True'); 113 | } 114 | if (switchSubmissionCheck.checked) { 115 | formData.append('submitcheck', 'True'); 116 | } 117 | } 118 | 119 | const apiCall = '/api/idnits'; 120 | form.method = 'POST'; 121 | form.action = apiCall; 122 | form.enctype='multipart/form-data' 123 | for (const [key, value] of formData) { 124 | const hiddenField = document.createElement('input'); 125 | hiddenField.type = 'hidden'; 126 | hiddenField.id = key; 127 | hiddenField.name = key; 128 | hiddenField.value = value; 129 | form.appendChild(hiddenField); 130 | } 131 | form.submit(); 132 | } 133 | -------------------------------------------------------------------------------- /static/scripts/rfcdiff.js: -------------------------------------------------------------------------------- 1 | const alertError = document.getElementById('alertError'); 2 | const formFile1 = document.getElementById('formFile1'); 3 | const formFile2 = document.getElementById('formFile2'); 4 | const formID1 = document.getElementById('formID1'); 5 | const formID2 = document.getElementById('formID2'); 6 | const formURL1 = document.getElementById('formURL1'); 7 | const formURL2 = document.getElementById('formURL2'); 8 | const messageError = document.getElementById('messageError'); 9 | const buttonCompare = document.getElementById('buttonCompare'); 10 | const buttonWdiff = document.getElementById('buttonWdiff'); 11 | const buttonAbdiff = document.getElementById('buttonAbdiff'); 12 | const buttonChbars = document.getElementById('buttonChbars'); 13 | const divDiff = document.getElementById('divDiff'); 14 | const buttonDownload = document.getElementById('buttonDownload'); 15 | const buttonOpen = document.getElementById('buttonOpen'); 16 | const buttonShare = document.getElementById('buttonShare'); 17 | const tabLinks = document.getElementsByClassName('tab-link'); 18 | 19 | // enable Bootstrap/Popper tooltips 20 | var tooltipTriggerList = [].slice.call(document.querySelectorAll('[data-bs-toggle2="tooltip"]')); 21 | let tooltipList = tooltipTriggerList.map(function (tooltipTriggerEl) { 22 | return new bootstrap.Tooltip(tooltipTriggerEl); 23 | }); 24 | 25 | reset(); 26 | 27 | formFile1.addEventListener('change', reset); 28 | formFile2.addEventListener('change', reset); 29 | formID1.addEventListener('keydown', submit); 30 | formID2.addEventListener('keydown', submit); 31 | formURL1.addEventListener('keydown', submit); 32 | formURL2.addEventListener('keydown', submit); 33 | buttonCompare.addEventListener('click', compare); 34 | buttonWdiff.addEventListener('click', compare); 35 | buttonAbdiff.addEventListener('click', compare); 36 | buttonChbars.addEventListener('click', compare); 37 | for (let tabLink of tabLinks) { 38 | tabLink.addEventListener('click', resetOther); 39 | } 40 | 41 | function submit(event) { 42 | if (event.key == 'Enter') { 43 | event.preventDefault(); 44 | compare(); 45 | } 46 | } 47 | 48 | function resetOther(event) { 49 | const clickedItem = event.target || event.srcElement; 50 | others = clickedItem.dataset.others.split(','); 51 | others.forEach(resetForm); 52 | } 53 | 54 | function resetForm(form_id) { 55 | const form = document.getElementById(form_id); 56 | form.reset(); 57 | } 58 | 59 | function reset() { 60 | for (let tooltip of tooltipList) { 61 | tooltip.hide(); 62 | } 63 | 64 | alertError.style.display = 'none'; 65 | messageError.innerHTML = ''; 66 | divDiff.innerHTML = ''; 67 | buttonDownload.style.display = 'none'; 68 | buttonDownload.setAttribute('download', ''); 69 | buttonDownload.href = '#'; 70 | buttonOpen.style.display = 'none'; 71 | buttonOpen.href = '#'; 72 | buttonShare.style.display = 'none'; 73 | buttonShare.href = '#'; 74 | resetButtons(); 75 | } 76 | 77 | function resetButtons() { 78 | buttonCompare.disabled = false; 79 | buttonCompare.innerText = buttonCompare.dataset.title; 80 | buttonWdiff.disabled = false; 81 | buttonWdiff.innerText = buttonWdiff.dataset.title; 82 | buttonAbdiff.disabled = false; 83 | buttonAbdiff.innerText = buttonAbdiff.dataset.title; 84 | buttonChbars.disabled = false; 85 | buttonChbars.innerText = buttonChbars.dataset.title; 86 | } 87 | 88 | function disableButtons() { 89 | buttonCompare.disabled = true; 90 | buttonWdiff.disabled = true; 91 | buttonAbdiff.disabled = true; 92 | buttonChbars.disabled = true; 93 | } 94 | 95 | function getShareableURL(button) { 96 | var url = ''; 97 | 98 | if (formID1.value.length > 0) { 99 | url = '/diff?doc_1=' + formID1.value; 100 | if (formID2.value.length > 0) { 101 | url += '&doc_2=' + formID2.value; 102 | } 103 | else if (formURL2.value.length > 0) { 104 | url += '&url_2=' + formURL2.value; 105 | } 106 | } 107 | else if (formURL1.value.length > 0) { 108 | url = '/diff?url_1=' + formURL1.value; 109 | if (formURL2.value.length > 0) { 110 | url += '&url_2=' + formURL2.value; 111 | } 112 | else if (formID2.value.length > 0) { 113 | url += '&doc_2=' + formID2.value; 114 | } 115 | } 116 | else if (formID2.value.length > 0) { 117 | url = '/diff?doc_2=' + formID2.value; 118 | } 119 | else if (formURL2.value.length > 0) { 120 | url = '/diff?url_2=' + formURL2.value; 121 | } 122 | 123 | if (button.value == 'wdiff') { 124 | url += '&wdiff=1'; 125 | } 126 | 127 | if (button.value == 'abdiff') { 128 | url += '&abdiff=1'; 129 | } 130 | 131 | if (button.value == 'chbars') { 132 | url += '&chbars=1'; 133 | } 134 | 135 | url +='&rfcdiff=1'; 136 | 137 | return url; 138 | } 139 | 140 | function getDownloadFilename(file1, file2, type) { 141 | filename = '' 142 | if (file1) { 143 | filename = file1.name.replace(/\.[^/.]+$/, ''); 144 | } 145 | else if (file2) { 146 | filename = file2.name.replace(/\.[^/.]+$/, ''); 147 | } 148 | else if (formID1.value.length > 0) { 149 | filename = formID1.value; 150 | } 151 | else if (formURL1.value.length > 0) { 152 | filename = formURL1.value; 153 | } 154 | else if (formID2.value.length > 0) { 155 | filename = formID2.value; 156 | } 157 | else if (formURL2.value.length > 0) { 158 | filename = formURL2.value; 159 | } 160 | 161 | if (type == 'abdiff' || type == 'chbars') { 162 | return filename + '.diff.txt'; 163 | } else { 164 | return filename + '.diff.html'; 165 | } 166 | } 167 | 168 | function compare(event) { 169 | reset(); 170 | 171 | var button = event.target || event.srcElement; 172 | 173 | button.innerHTML = '' + button.innerHTML; 174 | disableButtons(); 175 | 176 | const formData = new FormData(); 177 | const file1 = formFile1.files[0]; 178 | const file2 = formFile2.files[0]; 179 | 180 | formData.append('rfcdiff', 1); 181 | formData.append('file_1', file1); 182 | formData.append('file_2', file2); 183 | 184 | if (formID1.value.length > 0) { 185 | formData.append('doc_1', formID1.value); 186 | } 187 | if (formID2.value.length > 0) { 188 | formData.append('doc_2', formID2.value); 189 | } 190 | if (formURL1.value.length > 0) { 191 | formData.append('url_1', formURL1.value); 192 | } 193 | if (formURL2.value.length > 0) { 194 | formData.append('url_2', formURL2.value); 195 | } 196 | if (button.value == 'wdiff') { 197 | formData.append('wdiff', 1); 198 | } 199 | if (button.value == 'abdiff') { 200 | formData.append('abdiff', 1); 201 | } 202 | if (button.value == 'chbars') { 203 | formData.append('chbars', 1); 204 | } 205 | 206 | const apiCall = '/api/iddiff'; 207 | 208 | const request = new Request(apiCall, { 209 | method: 'POST', 210 | body: formData 211 | }); 212 | 213 | fetch(request) 214 | .then(response => response.blob()) 215 | .then(blob => { 216 | if (blob.type == 'application/json') { 217 | alertError.style.display = 'block'; 218 | return blob.text(); 219 | } 220 | else { 221 | data = URL.createObjectURL(blob); 222 | buttonDownload.style.display = 'block'; 223 | buttonDownload.setAttribute('download', getDownloadFilename(file1, file2, button.value)); 224 | buttonDownload.href = data; 225 | buttonOpen.style.display = 'block'; 226 | buttonOpen.href = data; 227 | 228 | if (!file1 && !file2) { 229 | buttonShare.style.display = 'block'; 230 | buttonShare.href = getShareableURL(button); 231 | } 232 | 233 | return blob.text(); 234 | } 235 | }) 236 | .then(data => { 237 | try { 238 | resetButtons(); 239 | data = JSON.parse(data); 240 | messageError.innerHTML = data.error; 241 | } catch (error) { 242 | // diff is successful 243 | 244 | if (button.value == 'wdiff') { 245 | var html = document.createElement( 'html' ); 246 | html.innerHTML = data; 247 | divDiff.innerHTML = html.getElementsByTagName('body')[0].innerHTML; 248 | } else if (button.value == 'abdiff' || button.value == 'chbars') { 249 | var pre = document.createElement( 'pre' ); 250 | var text = document.createTextNode(data) 251 | pre.appendChild(text); 252 | divDiff.innerHTML = pre.outerHTML; 253 | } else { 254 | var html = document.createElement( 'html' ); 255 | html.innerHTML = data; 256 | divDiff.appendChild(html.getElementsByTagName('table')[0]); 257 | } 258 | } 259 | }) 260 | .catch((error) => { 261 | resetButtons(); 262 | alertError.style.display = 'block'; 263 | messageError.innerHTML = error; 264 | }); 265 | } 266 | -------------------------------------------------------------------------------- /static/scripts/svgcheck.js: -------------------------------------------------------------------------------- 1 | const alertError = document.getElementById('alertError'); 2 | const buttonDownload = document.getElementById('buttonDownload'); 3 | const buttonCheck = document.getElementById('buttonCheck'); 4 | const formFile = document.getElementById('formFile'); 5 | const messageError = document.getElementById('messageError'); 6 | const accordionSVGCheck = document.getElementById('accordionSVGCheck'); 7 | const accordionItemErrors = document.getElementById('accordionItemErrors'); 8 | const accordionItemSVGCheck = document.getElementById('accordionItemSVGCheck'); 9 | const accordionItemParsedSVG = document.getElementById('accordionItemParsedSVG'); 10 | const preErrors = document.getElementById('preErrors'); 11 | const preSVGCheck = document.getElementById('preSVGCheck'); 12 | const codeParsedSVG = document.getElementById('codeParsedSVG'); 13 | 14 | // enable Bootstrap/Popper tooltips 15 | var tooltipTriggerList = [].slice.call(document.querySelectorAll('[data-bs-toggle="tooltip"]')); 16 | var tooltipList = tooltipTriggerList.map(function (tooltipTriggerEl) { 17 | return new bootstrap.Tooltip(tooltipTriggerEl); 18 | }); 19 | 20 | reset(); 21 | 22 | buttonCheck.addEventListener('click', parse); 23 | 24 | function reset() { 25 | for (let tooltip of tooltipList) { 26 | tooltip.hide(); 27 | } 28 | 29 | alertError.style.display = 'none'; 30 | accordionItemErrors.style.display = 'none'; 31 | accordionItemSVGCheck.style.display = 'none'; 32 | accordionItemParsedSVG.style.display = 'none'; 33 | buttonCheck.disabled = false; 34 | buttonCheck.innerHTML = buttonCheck.dataset.title; 35 | preErrors.innerHTML = ''; 36 | preSVGCheck.innerHTML = ''; 37 | codeParsedSVG.innerHTML = ''; 38 | } 39 | 40 | function parse() { 41 | reset(); 42 | 43 | buttonCheck.innerHTML = '' + buttonCheck.innerHTML; 44 | buttonCheck.disabled = true; 45 | 46 | apiCall = '/api/svgcheck' 47 | 48 | const formData = new FormData(); 49 | const file = formFile.files[0]; 50 | 51 | formData.append('file', file); 52 | 53 | const request = new Request(apiCall, { 54 | method: 'POST', 55 | body: formData 56 | }); 57 | 58 | fetch(request) 59 | .then(function(response) { return response.json(); }) 60 | .then(function(json) { 61 | reset(); 62 | if (json.errors) { 63 | accordionItemErrors.style.display = 'block'; 64 | preErrors.innerText = json.errors; 65 | } 66 | if (json.svgcheck) { 67 | accordionItemSVGCheck.style.display = 'block'; 68 | preSVGCheck.innerText = json.svgcheck; 69 | } 70 | if (json.svg) { 71 | accordionItemParsedSVG.style.display = 'block'; 72 | text = document.createTextNode(json.svg); 73 | codeParsedSVG.appendChild(text); 74 | hljs.highlightAll(); 75 | } 76 | if (json.error) { 77 | alertError.style.display = 'block'; 78 | messageError.innerText = json.error; 79 | } 80 | accordionSVGCheck.scrollIntoView(); 81 | }) 82 | .catch((error) => { 83 | alertError.style.display = 'block'; 84 | messageError.innerText = error; 85 | }); 86 | } 87 | -------------------------------------------------------------------------------- /static/sitemap.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | https://author-tools.ietf.org/ 7 | 1.00 8 | 9 | 10 | https://author-tools.ietf.org/iddiff 11 | 1.00 12 | 13 | 14 | https://author-tools.ietf.org/abnf 15 | 1.00 16 | 17 | 18 | https://author-tools.ietf.org/idnits 19 | 1.00 20 | 21 | 22 | https://author-tools.ietf.org/idnits3 23 | 1.00 24 | 25 | 26 | https://author-tools.ietf.org/svgcheck 27 | 1.00 28 | 29 | 30 | https://author-tools.ietf.org/clean_svg_ids 31 | 1.00 32 | 33 | 34 | https://author-tools.ietf.org/doc/ 35 | 0.90 36 | 37 | 38 | https://author-tools.ietf.org/about 39 | 0.90 40 | 41 | 42 | -------------------------------------------------------------------------------- /static/styles/custom.css: -------------------------------------------------------------------------------- 1 | /* cyrillic-ext */ 2 | @font-face { 3 | font-family: 'PT Sans'; 4 | font-style: normal; 5 | font-weight: 400; 6 | src: url(https://fonts.gstatic.com/s/ptsans/v12/jizaRExUiTo99u79D0-ExcOPIDUg-g.woff2) format('woff2'); 7 | unicode-range: U+0460-052F, U+1C80-1C88, U+20B4, U+2DE0-2DFF, U+A640-A69F, U+FE2E-FE2F; 8 | } 9 | /* cyrillic */ 10 | @font-face { 11 | font-family: 'PT Sans'; 12 | font-style: normal; 13 | font-weight: 400; 14 | src: url(https://fonts.gstatic.com/s/ptsans/v12/jizaRExUiTo99u79D0aExcOPIDUg-g.woff2) format('woff2'); 15 | unicode-range: U+0400-045F, U+0490-0491, U+04B0-04B1, U+2116; 16 | } 17 | /* latin-ext */ 18 | @font-face { 19 | font-family: 'PT Sans'; 20 | font-style: normal; 21 | font-weight: 400; 22 | src: url(https://fonts.gstatic.com/s/ptsans/v12/jizaRExUiTo99u79D0yExcOPIDUg-g.woff2) format('woff2'); 23 | unicode-range: U+0100-024F, U+0259, U+1E00-1EFF, U+2020, U+20A0-20AB, U+20AD-20CF, U+2113, U+2C60-2C7F, U+A720-A7FF; 24 | } 25 | /* latin */ 26 | @font-face { 27 | font-family: 'PT Sans'; 28 | font-style: normal; 29 | font-weight: 400; 30 | src: url(https://fonts.gstatic.com/s/ptsans/v12/jizaRExUiTo99u79D0KExcOPIDU.woff2) format('woff2'); 31 | unicode-range: U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+2000-206F, U+2074, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD; 32 | } 33 | 34 | a { 35 | color: #3d22b3; 36 | } 37 | 38 | a:focus, a:hover { 39 | color: #271673; 40 | } 41 | 42 | textarea.monospace { 43 | font-family: monospace; 44 | } 45 | 46 | .error-message { 47 | white-space: pre-line; 48 | } 49 | 50 | .navbar { 51 | background-image: linear-gradient(to bottom,#40366c 0,#2c254a 100%); 52 | background-repeat: repeat-x; 53 | font-family: "PT Sans", "Trebuchet", "Seravek", sans-serif; 54 | padding-top: 0em; 55 | padding-bottom: 0em; 56 | margin-bottom: 21px; 57 | } 58 | 59 | @media (min-width: 575.98px) { 60 | .navbar { 61 | height: 50px; 62 | } 63 | } 64 | 65 | @media (max-width: 575.98px) { 66 | .navbar .container-fluid { 67 | padding: 0em; 68 | } 69 | .navbar-brand { 70 | padding-left: 12px; 71 | } 72 | .navbar-nav > .nav-item > .nav-link { 73 | padding-left: 11px !important; 74 | } 75 | } 76 | 77 | .navbar-brand > img { 78 | display: inline; 79 | padding-right: 1em; 80 | margin-top: -5px; 81 | } 82 | 83 | .navbar-brand { 84 | margin-top: -2px; 85 | } 86 | 87 | .navbar-nav > li > a, .navbar-nav > li > a:hover, .navbar-nav > li > a:focus { 88 | color: #ecf0f1 !important; 89 | } 90 | 91 | .navbar-nav > .nav-item { 92 | height: 50px; 93 | } 94 | 95 | .navbar-toggler { 96 | position: relative; 97 | float: right; 98 | padding: 9px 10px; 99 | margin-right: 15px; 100 | margin-top: 8px; 101 | margin-bottom: 8px; 102 | background-color: transparent; 103 | background-image: none; 104 | border: 1px solid transparent; 105 | border-radius: 4px; 106 | } 107 | 108 | .navbar-nav > .nav-item:hover, .navbar-nav > .nav-item:focus { 109 | background-color: #1c172f; 110 | } 111 | 112 | .navbar-nav > .nav-item > .nav-link { 113 | transform: translate(0, 20%); 114 | } 115 | 116 | .tab-content { 117 | padding-bottom: 10px; 118 | } 119 | 120 | .tab-pane { 121 | padding-top: 10px; 122 | } 123 | 124 | .diff { 125 | font-family: monospace; 126 | } 127 | 128 | .diff table { 129 | border-spacing: 0; 130 | } 131 | 132 | .diff td { 133 | padding: 0; 134 | white-space: pre; 135 | vertical-align: top; 136 | font-size: 0.86em; 137 | } 138 | 139 | .diff th { 140 | padding: 0; 141 | text-align: center; 142 | } 143 | 144 | .diff .left { background-color: #EEE; } 145 | .diff .right { background-color: #FFF; } 146 | .diff .lblock { background-color: #BFB; } 147 | .diff .rblock { background-color: #FF8; } 148 | .diff .delete { background-color: #ACF; } 149 | .diff .insert { background-color: #8FF; } 150 | .diff .change { background-color: gray; } 151 | .diff .header { background-color: orange; } 152 | 153 | .diff .w-delete { 154 | color: #F00; 155 | text-decoration: line-through; 156 | } 157 | .diff .w-insert { 158 | color: #008000; 159 | font-weight: bold; 160 | } 161 | .bg-light-subtle { 162 | background-color: #fcfcfd !important; 163 | } 164 | -------------------------------------------------------------------------------- /tests/data/draft-doe-smoke-signals-00.rst: -------------------------------------------------------------------------------- 1 | .. |docName| replace:: draft-doe-smoke-signals-00 2 | .. |ipr| replace:: trust200902 3 | .. |category| replace:: std 4 | .. |titleAbbr| replace:: Standard for Data Transmission via Smoke Signals 5 | .. |abstract| replace:: This draft describes a standard to transmit information via smoke signals 6 | .. |submissionType| replace:: IETF 7 | .. |author[0].fullname| replace:: Jane Doe 8 | .. |author[0].role| replace:: editor 9 | .. |author[0].surname| replace:: Doe 10 | .. |author[0].initials| replace:: J. 11 | .. |author[0].email| replace:: doe@example.org 12 | .. |author[0].country| replace:: New Zealand 13 | .. header:: 14 | .. contents:: 15 | .. sectnum:: 16 | 17 | ================================================ 18 | Standard for Data Transmission via Smoke Signals 19 | ================================================ 20 | 21 | This draft describes a standard to transmit information via smoke signals 22 | 23 | Introduction 24 | ============ 25 | 26 | Smoke signal is a form of visual communication used over a long distance. It 27 | is one of the oldest forms of long distance communication methods that has 28 | been used by many in many different countries throughout the history. 29 | -------------------------------------------------------------------------------- /tests/data/draft-smoke-signals-00.error.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: This will generate an error 3 | 4 | {::boilerplate bcp14} 5 | -------------------------------------------------------------------------------- /tests/data/draft-smoke-signals-00.error.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ietf-tools/author-tools/a50c3997464b1a32005eab6ad410c83e616f8ab3/tests/data/draft-smoke-signals-00.error.txt -------------------------------------------------------------------------------- /tests/data/draft-smoke-signals-00.error.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /tests/data/draft-smoke-signals-00.invalid.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Standard for Data Transmission via Smoke Signals 6 | 7 |
8 | 9 | New Zealand 10 | 11 | kesara@fq.nz 12 |
13 |
14 | 15 | 16 | 17 | This draft describes a standard to transmit information via smoke signals 18 | effectively. 19 | 20 |
21 | 22 | 23 |
24 | Smoke signal is a form of visual communication used over a long 25 | distance. It is one of the oldest forms of long distance communcation 26 | methods that has been used by many in many different countries throughout 27 | the history. 28 |
29 |
30 | 31 | 32 | 33 |
34 | 35 | -------------------------------------------------------------------------------- /tests/data/draft-smoke-signals-00.md: -------------------------------------------------------------------------------- 1 | --- 2 | coding: utf-8 3 | 4 | title: Standard for Data Transmission via Smoke Signals 5 | abbrev: smoke-signals 6 | docname: draft-smoke-signals-00 7 | category: exp 8 | ipr: trust200902 9 | stand_alone: yes 10 | pi: [toc, sortrefs, symrefs, comments] 11 | 12 | author: 13 | ins: K. Nanayakkara Rathnayake 14 | name: Kesara Nanayakkara Rathnayake 15 | country: New Zealand 16 | email: kesara@fq.nz 17 | 18 | --- abstract 19 | 20 | This draft describes a standard to transmit information via smoke signals 21 | effectively. 22 | 23 | --- middle 24 | 25 | # Introduction 26 | 27 | Smoke signal is a form of visual communication used over a long distance. It 28 | is one of the oldest forms of long distance communcation methods that has 29 | been used by many in many different countries throughout the history. 30 | 31 | ## Smoke Signals (aasvg) 32 | 33 | ~~~ aasvg 34 | (8) 35 | (8) 36 | (8) 37 | _ _(8)_ 38 | | |_| |_| | 39 | | | 40 | | __ | 41 | ___| || |___ 42 | ~~~ 43 | 44 | Figure: Smoke signals 45 | -------------------------------------------------------------------------------- /tests/data/draft-smoke-signals-00.mmark.md: -------------------------------------------------------------------------------- 1 | %%% 2 | title = "Standard for Data Transmission via Smoke Signals" 3 | abbrev = "smoke-signals" 4 | ipr= "trust200902" 5 | area = "Internet" 6 | submissiontype = "IETF" 7 | keyword = [""] 8 | 9 | [seriesInfo] 10 | name = "RFC" 11 | value = "1" 12 | stream = "IETF" 13 | status = "informational" 14 | 15 | [[author]] 16 | initials = "K." 17 | surname = "Nanayakkara Rathnayake" 18 | fullname = "Kesara Nanayakkara Rathnayake" 19 | %%% 20 | 21 | {mainmatter} 22 | 23 | # Introduction 24 | 25 | Smoke signal is a form of visual communication used over a long distance. It 26 | is one of the oldest forms of long distance communcation methods that has 27 | been used by many in many different countries throughout the history. 28 | 29 | ## Smoke Signals 30 | 31 | ~~~ 32 | (8) 33 | (8) 34 | (8) 35 | _ _(8)_ 36 | | |_| |_| | 37 | | | 38 | | __ | 39 | ___| || |___ 40 | ~~~ 41 | 42 | Figure: Smoke signals 43 | -------------------------------------------------------------------------------- /tests/data/draft-smoke-signals-00.odt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ietf-tools/author-tools/a50c3997464b1a32005eab6ad410c83e616f8ab3/tests/data/draft-smoke-signals-00.odt -------------------------------------------------------------------------------- /tests/data/draft-smoke-signals-00.txt: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Network Working Group K. Nanayakkara Rathnayake 6 | Internet-Draft 19 June 2021 7 | Intended status: Experimental 8 | Expires: 21 December 2021 9 | 10 | 11 | Standard for Data Transmission via Smoke Signals 12 | draft-smoke-signals-00 13 | 14 | Abstract 15 | 16 | This draft describes a standard to transmit information via smoke 17 | signals effectively. 18 | 19 | Status of This Memo 20 | 21 | This Internet-Draft is submitted in full conformance with the 22 | provisions of BCP 78 and BCP 79. 23 | 24 | Internet-Drafts are working documents of the Internet Engineering 25 | Task Force (IETF). Note that other groups may also distribute 26 | working documents as Internet-Drafts. The list of current Internet- 27 | Drafts is at https://datatracker.ietf.org/drafts/current/. 28 | 29 | Internet-Drafts are draft documents valid for a maximum of six months 30 | and may be updated, replaced, or obsoleted by other documents at any 31 | time. It is inappropriate to use Internet-Drafts as reference 32 | material or to cite them other than as "work in progress." 33 | 34 | This Internet-Draft will expire on 21 December 2021. 35 | 36 | Copyright Notice 37 | 38 | Copyright (c) 2021 IETF Trust and the persons identified as the 39 | document authors. All rights reserved. 40 | 41 | This document is subject to BCP 78 and the IETF Trust's Legal 42 | Provisions Relating to IETF Documents (https://trustee.ietf.org/ 43 | license-info) in effect on the date of publication of this document. 44 | Please review these documents carefully, as they describe your rights 45 | and restrictions with respect to this document. Code Components 46 | extracted from this document must include Simplified BSD License text 47 | as described in Section 4.e of the Trust Legal Provisions and are 48 | provided without warranty as described in the Simplified BSD License. 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | Rathnayake Expires 21 December 2021 [Page 1] 57 | 58 | Internet-Draft smoke-signals June 2021 59 | 60 | 61 | Table of Contents 62 | 63 | 1. Introduction . . . . . . . . . . . . . . . . . . . . . . . . 2 64 | Author's Address . . . . . . . . . . . . . . . . . . . . . . . . 2 65 | 66 | 1. Introduction 67 | 68 | Smoke signal is a form of visual communication used over a long 69 | distance. It is one of the oldest forms of long distance 70 | communcation methods that has been used by many in many different 71 | countries throughout the history. 72 | 73 | Author's Address 74 | 75 | Kesara Nanayakkara Rathnayake 76 | New Zealand 77 | 78 | Email: kesara@fq.nz 79 | 80 | Additional contact information: 81 | 82 | කෙසර නානායක්කාර රත්නායක 83 | New Zealand 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | Rathnayake Expires 21 December 2021 [Page 2] 113 | -------------------------------------------------------------------------------- /tests/data/draft-smoke-signals-00.v2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | Standard for Data Transmission via Smoke Signals 12 | 13 |
14 | 15 | New Zealand 16 | 17 | kesara@fq.nz 18 |
19 |
20 | 21 | 22 | 23 | This draft describes a standard to transmit information via smoke signals 24 | effectively. 25 | 26 |
27 | 28 | 29 |
30 | Smoke signal is a form of visual communication used over a long 31 | distance. It is one of the oldest forms of long distance communcation 32 | methods that has been used by many in many different countries throughout 33 | the history. 34 |
35 |
36 | 37 | 38 | 39 |
40 | -------------------------------------------------------------------------------- /tests/data/draft-smoke-signals-00.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Standard for Data Transmission via Smoke Signals 10 | 11 |
12 | 13 | New Zealand 14 | 15 | kesara@fq.nz 16 |
17 |
18 | 19 | 20 | This draft describes a standard to transmit information via smoke signals 21 | effectively. 22 | 23 |
24 | 25 | 26 |
27 | Smoke (දුම්) signal is a form of visual communication used over a long 28 | distance. It is one of the oldest forms of long distance communcation 29 | methods that has been used by many in many different countries throughout 30 | the history. 31 |
32 |
33 | 34 | 35 | 36 |
37 | -------------------------------------------------------------------------------- /tests/data/draft-smoke-signals-01.txt: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Network Working Group K. Nanayakkara Rathnayake 6 | Internet-Draft 1 November 2021 7 | Intended status: Experimental 8 | Expires: 5 May 2022 9 | 10 | 11 | Standard for Data Transmission via Smoke Signals 12 | draft-smoke-signals-01 13 | 14 | Abstract 15 | 16 | This draft describes a standard to transmit information via smoke 17 | signals effectively. 18 | 19 | Status of This Memo 20 | 21 | This Internet-Draft is submitted in full conformance with the 22 | provisions of BCP 78 and BCP 79. 23 | 24 | Internet-Drafts are working documents of the Internet Engineering 25 | Task Force (IETF). Note that other groups may also distribute 26 | working documents as Internet-Drafts. The list of current Internet- 27 | Drafts is at https://datatracker.ietf.org/drafts/current/. 28 | 29 | Internet-Drafts are draft documents valid for a maximum of six months 30 | and may be updated, replaced, or obsoleted by other documents at any 31 | time. It is inappropriate to use Internet-Drafts as reference 32 | material or to cite them other than as "work in progress." 33 | 34 | This Internet-Draft will expire on 5 May 2022. 35 | 36 | Copyright Notice 37 | 38 | Copyright (c) 2021 IETF Trust and the persons identified as the 39 | document authors. All rights reserved. 40 | 41 | This document is subject to BCP 78 and the IETF Trust's Legal 42 | Provisions Relating to IETF Documents (https://trustee.ietf.org/ 43 | license-info) in effect on the date of publication of this document. 44 | Please review these documents carefully, as they describe your rights 45 | and restrictions with respect to this document. Code Components 46 | extracted from this document must include Revised BSD License text as 47 | described in Section 4.e of the Trust Legal Provisions and are 48 | provided without warranty as described in the Revised BSD License. 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | Rathnayake Expires 5 May 2022 [Page 1] 57 | 58 | Internet-Draft smoke-signals November 2021 59 | 60 | 61 | Table of Contents 62 | 63 | 1. Introduction . . . . . . . . . . . . . . . . . . . . . . . . 2 64 | 1.1. Smoke Signals . . . . . . . . . . . . . . . . . . . . . . 2 65 | Author's Address . . . . . . . . . . . . . . . . . . . . . . . . 2 66 | 67 | 1. Introduction 68 | 69 | Smoke signal is a form of visual communication used over a long 70 | distance. It is one of the oldest forms of long distance 71 | communcation methods that has been used by many in many different 72 | countries throughout the history. 73 | 74 | 1.1. Smoke Signals 75 | 76 | (8) 77 | (8) 78 | (8) 79 | _ _(8)_ 80 | | |_| |_| | 81 | | | 82 | | __ | 83 | ___| || |___ 84 | 85 | Figure: Smoke signals 86 | 87 | Author's Address 88 | 89 | Kesara Nanayakkara Rathnayake 90 | New Zealand 91 | 92 | Email: kesara@fq.nz 93 | 94 | Additional contact information: 95 | 96 | කෙසර නානායක්කාර රත්නායක 97 | New Zealand 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | Rathnayake Expires 5 May 2022 [Page 2] 113 | -------------------------------------------------------------------------------- /tests/data/draft-smoke-signals-01.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Standard for Data Transmission via Smoke Signals 10 | 11 |
12 | 13 | New Zealand 14 | 15 | kesara@fq.nz 16 |
17 |
18 | 19 | 20 | This draft describes a standard to transmit information via smoke signals 21 | effectively. 22 | 23 |
24 | 25 | 26 |
27 | Smoke signal is a form of visual communication used over a long 28 | distance. It is one of the oldest forms of long distance communcation 29 | methods that has been used by many in many different countries throughout 30 | the history. 31 |
32 |
33 | 34 | 35 | 36 |
37 | -------------------------------------------------------------------------------- /tests/data/draft-smoke-signals-02.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Standard for Data Transmission via Smoke Signals 10 | 11 |
12 | 13 | New Zealand 14 | 15 | kesara@fq.nz 16 |
17 |
18 | 19 | 20 | This draft describes a standard to transmit information via smoke signals 21 | effectively. 22 | 23 |
24 | 25 | 26 |
27 | Smoke signal is a form of visual communication used over a long 28 | distance. It is one of the oldest forms of long distance communcation 29 | methods that has been used by many in many different countries throughout 30 | the history. 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 |
46 |
47 | 48 | 49 | 50 |
51 | -------------------------------------------------------------------------------- /tests/data/ietf.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /tests/data/invalid.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /tests/data/name-error.abnf: -------------------------------------------------------------------------------- 1 | ; name rules that generate errors 2 | name = first-name middle-name last-name 3 | first-name = *ALPHA 4 | first-name = HEXDIG 5 | last-name = *ALPHA 6 | -------------------------------------------------------------------------------- /tests/data/name.abnf: -------------------------------------------------------------------------------- 1 | ; name rules 2 | name = first-name last-name 3 | first-name = *ALPHA 4 | last-name = *ALPHA 5 | -------------------------------------------------------------------------------- /tests/test_api_abnf_extract.py: -------------------------------------------------------------------------------- 1 | from logging import disable as set_logger, INFO, CRITICAL 2 | from unittest import TestCase 3 | from os.path import abspath 4 | from pathlib import Path 5 | from shutil import rmtree 6 | from urllib.parse import urlencode 7 | 8 | from at import create_app 9 | 10 | API = "/api/abnf/extract" 11 | TEMPORARY_DATA_DIR = "./tests/tmp/" 12 | DT_LATEST_DRAFT_URL = "https://datatracker.ietf.org/api/rfcdiff-latest-json" 13 | ALLOWED_DOMAINS = ["ietf.org", "rfc-editor.org"] 14 | 15 | 16 | class TestApiAbnfExtract(TestCase): 17 | """Tests for /api/abnf/extract end point""" 18 | 19 | def setUp(self): 20 | # susspress logging messages 21 | set_logger(CRITICAL) 22 | # create temporary data dir 23 | Path(TEMPORARY_DATA_DIR).mkdir(exist_ok=True) 24 | 25 | config = { 26 | "UPLOAD_DIR": abspath(TEMPORARY_DATA_DIR), 27 | "REQUIRE_AUTH": False, 28 | "DT_LATEST_DRAFT_URL": DT_LATEST_DRAFT_URL, 29 | "ALLOWED_DOMAINS": ALLOWED_DOMAINS, 30 | } 31 | 32 | self.app = create_app(config) 33 | 34 | def tearDown(self): 35 | # set logging to INFO 36 | set_logger(INFO) 37 | # remove temporary data dir 38 | rmtree(TEMPORARY_DATA_DIR, ignore_errors=True) 39 | 40 | def test_no_input(self): 41 | with self.app.test_client() as client: 42 | with self.app.app_context(): 43 | result = client.get(API) 44 | json_data = result.get_json() 45 | 46 | self.assertEqual(result.status_code, 400) 47 | self.assertEqual( 48 | json_data["error"], "URL/document name must be provided" 49 | ) 50 | 51 | def test_latest_draft_not_found_error(self): 52 | doc = "draft-smoke-signals" 53 | 54 | with self.app.test_client() as client: 55 | with self.app.app_context(): 56 | result = client.get(API + "?" + urlencode({"doc": doc})) 57 | json_data = result.get_json() 58 | 59 | self.assertEqual(result.status_code, 400) 60 | self.assertEqual( 61 | json_data["error"], 62 | "Can not find the latest document on datatracker", 63 | ) 64 | 65 | def test_download_error(self): 66 | url = "https://www.ietf.org/archives/id/draft-404.txt" 67 | 68 | with self.app.test_client() as client: 69 | with self.app.app_context(): 70 | result = client.get(API + "?" + urlencode({"url": url})) 71 | json_data = result.get_json() 72 | 73 | self.assertEqual(result.status_code, 400) 74 | self.assertEqual( 75 | json_data["error"], "Error occured while downloading file." 76 | ) 77 | 78 | def test_text_processing(self): 79 | url = "https://datatracker.ietf.org/doc/pdf/draft-iab-xml2rfc-02" 80 | 81 | with self.app.test_client() as client: 82 | with self.app.app_context(): 83 | result = client.get(API + "?" + urlencode({"url": url})) 84 | json_data = result.get_json() 85 | 86 | self.assertEqual(result.status_code, 400) 87 | self.assertIn("error", json_data) 88 | self.assertGreater(len(json_data["error"]), 0) 89 | 90 | def test_invalid_url(self): 91 | url = "https://www.example.org/draft-example.txt" 92 | 93 | with self.app.test_client() as client: 94 | with self.app.app_context(): 95 | result = client.get(API + "?" + urlencode({"url": url})) 96 | json_data = result.get_json() 97 | 98 | self.assertEqual(result.status_code, 400) 99 | self.assertEqual( 100 | json_data["error"], "www.example.org domain is not allowed." 101 | ) 102 | 103 | def test_extract_abnf_empty(self): 104 | url = "https://www.rfc-editor.org/rfc/rfc9009.txt" 105 | 106 | with self.app.test_client() as client: 107 | with self.app.app_context(): 108 | result = client.get(API + "?" + urlencode({"url": url})) 109 | data = result.get_data(as_text=True) 110 | 111 | self.assertEqual(result.status_code, 200) 112 | self.assertEqual(data, "No output from BAP aex.") 113 | 114 | def test_extract_abnf_with_url(self): 115 | url = "https://www.rfc-editor.org/rfc/rfc9000.txt" 116 | 117 | with self.app.test_client() as client: 118 | with self.app.app_context(): 119 | result = client.get(API + "?" + urlencode({"url": url})) 120 | data = result.get_data(as_text=True) 121 | 122 | self.assertEqual(result.status_code, 200) 123 | self.assertIn("expected_pn = largest_pn + 1", data) 124 | 125 | def test_extract_abnf_with_docname(self): 126 | doc = "RFC 9000" 127 | 128 | with self.app.test_client() as client: 129 | with self.app.app_context(): 130 | result = client.get(API + "?" + urlencode({"doc": doc})) 131 | data = result.get_data(as_text=True) 132 | 133 | self.assertEqual(result.status_code, 200) 134 | self.assertIn("expected_pn = largest_pn + 1", data) 135 | -------------------------------------------------------------------------------- /tests/test_api_abnf_parse.py: -------------------------------------------------------------------------------- 1 | from logging import disable as set_logger, INFO, CRITICAL 2 | from unittest import TestCase 3 | from os.path import abspath 4 | from pathlib import Path 5 | from shutil import rmtree 6 | 7 | from at import create_app 8 | 9 | API = "/api/abnf/parse" 10 | TEMPORARY_DATA_DIR = "./tests/tmp/" 11 | DT_LATEST_DRAFT_URL = "https://datatracker.ietf.org/doc/rfcdiff-latest-json" 12 | ALLOWED_DOMAINS = ["ietf.org", "rfc-editor.org"] 13 | TEST_DATA_DIR = "./tests/data/" 14 | ABNF = "name.abnf" 15 | ABNF_ERROR = "name-error.abnf" 16 | 17 | 18 | class TestApiAbnfParse(TestCase): 19 | """Tests for /api/abnf/parse end point""" 20 | 21 | def setUp(self): 22 | # susspress logging messages 23 | set_logger(CRITICAL) 24 | # create temporary data dir 25 | Path(TEMPORARY_DATA_DIR).mkdir(exist_ok=True) 26 | 27 | config = { 28 | "UPLOAD_DIR": abspath(TEMPORARY_DATA_DIR), 29 | "REQUIRE_AUTH": False, 30 | "DT_LATEST_DRAFT_URL": DT_LATEST_DRAFT_URL, 31 | "ALLOWED_DOMAINS": ALLOWED_DOMAINS, 32 | } 33 | 34 | self.app = create_app(config) 35 | 36 | def tearDown(self): 37 | # set logging to INFO 38 | set_logger(INFO) 39 | # remove temporary data dir 40 | rmtree(TEMPORARY_DATA_DIR, ignore_errors=True) 41 | 42 | def test_abnf_parse(self): 43 | with self.app.test_client() as client: 44 | with self.app.app_context(): 45 | file_path = "".join([TEST_DATA_DIR, ABNF]) 46 | with open(file_path, "r", newline="") as file: 47 | abnf = "".join(file.readlines()) 48 | 49 | result = client.post(API, data={"input": abnf}) 50 | json_data = result.get_json() 51 | 52 | self.assertEqual(result.status_code, 200) 53 | self.assertEqual(json_data["errors"], "") 54 | self.assertIn("first-name last-name", json_data["abnf"]) 55 | 56 | def test_abnf_parse_with_errors(self): 57 | with self.app.test_client() as client: 58 | with self.app.app_context(): 59 | file_path = "".join([TEST_DATA_DIR, ABNF_ERROR]) 60 | with open(file_path, "r", newline="") as file: 61 | abnf = "".join(file.readlines()) 62 | 63 | result = client.post(API, data={"input": abnf}) 64 | json_data = result.get_json() 65 | 66 | self.assertEqual(result.status_code, 200) 67 | self.assertIn( 68 | "Rule first-name was already defined", json_data["errors"] 69 | ) 70 | self.assertIn("; middle-name UNDEFINED", json_data["abnf"]) 71 | -------------------------------------------------------------------------------- /tests/test_api_clean_svg_ids.py: -------------------------------------------------------------------------------- 1 | from logging import disable as set_logger, INFO, CRITICAL 2 | from os.path import abspath 3 | from pathlib import Path 4 | from shutil import rmtree 5 | from unittest import TestCase 6 | 7 | from at import create_app 8 | 9 | TEST_DATA_DIR = "./tests/data/" 10 | TEST_XML_DRAFT = "draft-smoke-signals-02.xml" 11 | TEST_UNSUPPORTED_FORMAT = "draft-smoke-signals-00.odt" 12 | TEMPORARY_DATA_DIR = "./tests/tmp/" 13 | VALID_API_KEY = "foobar" 14 | SITE_URL = "https://example.org" 15 | 16 | 17 | def get_path(filename): 18 | """Returns file path""" 19 | return "".join([TEST_DATA_DIR, filename]) 20 | 21 | 22 | class TestApiCleanSvgIds(TestCase): 23 | """Tests for /api/clean_svg_ids end point""" 24 | 25 | def setUp(self): 26 | # susspress logging messages 27 | set_logger(CRITICAL) 28 | # create temporary data dir 29 | Path(TEMPORARY_DATA_DIR).mkdir(exist_ok=True) 30 | 31 | config = { 32 | "UPLOAD_DIR": abspath(TEMPORARY_DATA_DIR), 33 | "REQUIRE_AUTH": False, 34 | "SITE_URL": SITE_URL, 35 | } 36 | 37 | self.app = create_app(config) 38 | 39 | def tearDown(self): 40 | # set logging to INFO 41 | set_logger(INFO) 42 | # remove temporary data dir 43 | rmtree(TEMPORARY_DATA_DIR, ignore_errors=True) 44 | 45 | def test_no_file(self): 46 | with self.app.test_client() as client: 47 | with self.app.app_context(): 48 | result = client.post( 49 | "/api/clean_svg_ids", data={"apikey": VALID_API_KEY} 50 | ) 51 | json_data = result.get_json() 52 | 53 | self.assertEqual(result.status_code, 400) 54 | self.assertEqual(json_data["error"], "No file") 55 | 56 | def test_missing_file_name(self): 57 | with self.app.test_client() as client: 58 | with self.app.app_context(): 59 | result = client.post( 60 | "/api/clean_svg_ids", 61 | data={ 62 | "file": (open(get_path(TEST_XML_DRAFT), "rb"), ""), 63 | "apikey": VALID_API_KEY, 64 | }, 65 | ) 66 | json_data = result.get_json() 67 | 68 | self.assertEqual(result.status_code, 400) 69 | self.assertEqual(json_data["error"], "Filename is missing") 70 | 71 | def test_unsupported_file_format(self): 72 | with self.app.test_client() as client: 73 | with self.app.app_context(): 74 | result = client.post( 75 | "/api/clean_svg_ids", 76 | data={ 77 | "file": ( 78 | open(get_path(TEST_UNSUPPORTED_FORMAT), "rb"), 79 | TEST_UNSUPPORTED_FORMAT, 80 | ), 81 | "apikey": VALID_API_KEY, 82 | }, 83 | ) 84 | json_data = result.get_json() 85 | 86 | self.assertEqual(result.status_code, 400) 87 | self.assertEqual(json_data["error"], "Input file format not supported") 88 | 89 | def test_clean_svg_ids(self): 90 | with self.app.test_client() as client: 91 | with self.app.app_context(): 92 | result = client.post( 93 | "/api/clean_svg_ids", 94 | data={ 95 | "file": (open(get_path(TEST_XML_DRAFT), "rb"), TEST_XML_DRAFT), 96 | "apikey": VALID_API_KEY, 97 | }, 98 | ) 99 | json_data = result.get_json() 100 | 101 | self.assertEqual(result.status_code, 200) 102 | self.assertTrue(json_data["url"].startswith("{}/".format(SITE_URL))) 103 | 104 | # test export 105 | export_url = json_data["url"].replace(SITE_URL, "") 106 | export = client.get(export_url) 107 | self.assertEqual(export.status_code, 200) 108 | self.assertIsNotNone(export.data) 109 | -------------------------------------------------------------------------------- /tests/test_api_svgcheck.py: -------------------------------------------------------------------------------- 1 | from logging import disable as set_logger, INFO, CRITICAL 2 | from unittest import TestCase 3 | from os.path import abspath 4 | from pathlib import Path 5 | from shutil import rmtree 6 | 7 | from at import create_app 8 | 9 | API = "/api/svgcheck" 10 | TEMPORARY_DATA_DIR = "./tests/tmp/" 11 | TEST_DATA_DIR = "./tests/data/" 12 | TEST_SVG = "ietf.svg" 13 | TEST_INVALID_SVG = "invalid.svg" 14 | TEST_UNSUPPORTED_FORMAT = "draft-smoke-signals-00.md" 15 | 16 | 17 | def get_path(filename): 18 | """Returns file path""" 19 | return "".join([TEST_DATA_DIR, filename]) 20 | 21 | 22 | class TestApiSvgcheck(TestCase): 23 | """Tests for /api/svgcheck end point""" 24 | 25 | def setUp(self): 26 | # susspress logging messages 27 | set_logger(CRITICAL) 28 | # create temporary data dir 29 | Path(TEMPORARY_DATA_DIR).mkdir(exist_ok=True) 30 | 31 | config = {"UPLOAD_DIR": abspath(TEMPORARY_DATA_DIR), "REQUIRE_AUTH": False} 32 | 33 | self.app = create_app(config) 34 | 35 | def tearDown(self): 36 | # set logging to INFO 37 | set_logger(INFO) 38 | # remove temporary data dir 39 | rmtree(TEMPORARY_DATA_DIR, ignore_errors=True) 40 | 41 | def test_no_file(self): 42 | with self.app.test_client() as client: 43 | with self.app.app_context(): 44 | result = client.post(API) 45 | json_data = result.get_json() 46 | 47 | self.assertEqual(result.status_code, 400) 48 | self.assertEqual(json_data["error"], "No file") 49 | 50 | def test_missing_file_name(self): 51 | with self.app.test_client() as client: 52 | with self.app.app_context(): 53 | result = client.post( 54 | API, data={"file": (open(get_path(TEST_SVG), "rb"), "")} 55 | ) 56 | json_data = result.get_json() 57 | 58 | self.assertEqual(result.status_code, 400) 59 | self.assertEqual(json_data["error"], "Filename is missing") 60 | 61 | def test_unsupported_file_format(self): 62 | with self.app.test_client() as client: 63 | with self.app.app_context(): 64 | result = client.post( 65 | API, 66 | data={ 67 | "file": ( 68 | open(get_path(TEST_UNSUPPORTED_FORMAT), "rb"), 69 | TEST_UNSUPPORTED_FORMAT, 70 | ) 71 | }, 72 | ) 73 | json_data = result.get_json() 74 | 75 | self.assertEqual(result.status_code, 400) 76 | self.assertEqual(json_data["error"], "Input file format not supported") 77 | 78 | def test_svgcheck(self): 79 | with self.app.test_client() as client: 80 | with self.app.app_context(): 81 | result = client.post( 82 | API, data={"file": (open(get_path(TEST_SVG), "rb"), TEST_SVG)} 83 | ) 84 | json_data = result.get_json() 85 | 86 | self.assertEqual(result.status_code, 200) 87 | self.assertIn("", json_data["svg"]) 88 | self.assertIn( 89 | "File conforms to SVG requirements.", json_data["svgcheck"] 90 | ) 91 | self.assertIsNone(json_data["errors"]) 92 | 93 | def test_svgcheck_error(self): 94 | with self.app.test_client() as client: 95 | with self.app.app_context(): 96 | result = client.post( 97 | API, 98 | data={ 99 | "file": ( 100 | open(get_path(TEST_INVALID_SVG), "rb"), 101 | TEST_INVALID_SVG, 102 | ) 103 | }, 104 | ) 105 | json_data = result.get_json() 106 | 107 | self.assertEqual(result.status_code, 200) 108 | self.assertIsNone(json_data["svg"]) 109 | self.assertIsNone(json_data["svgcheck"]) 110 | self.assertIn( 111 | "ERROR: File does not conform to SVG requirements", 112 | json_data["errors"], 113 | ) 114 | -------------------------------------------------------------------------------- /tests/test_api_validate.py: -------------------------------------------------------------------------------- 1 | from logging import disable as set_logger, INFO, CRITICAL 2 | from os.path import abspath 3 | from pathlib import Path 4 | from shutil import rmtree 5 | from unittest import TestCase 6 | 7 | from at import create_app 8 | 9 | TEST_DATA_DIR = "./tests/data/" 10 | TEST_XML_DRAFT = "draft-smoke-signals-00.xml" 11 | TEST_XML_V2_DRAFT = "draft-smoke-signals-00.v2.xml" 12 | TEST_XML_INVALID = "draft-smoke-signals-00.invalid.xml" 13 | TEST_TEXT_DRAFT = "draft-smoke-signals-00.txt" 14 | TEST_KRAMDOWN_DRAFT = "draft-smoke-signals-00.md" 15 | TEST_MMARK_DRAFT = "draft-smoke-signals-00.mmark.md" 16 | TEST_UNSUPPORTED_FORMAT = "draft-smoke-signals-00.odt" 17 | TEST_XML_ERROR = "draft-smoke-signals-00.error.xml" 18 | TEST_KRAMDOWN_ERROR = "draft-smoke-signals-00.error.md" 19 | TEST_DATA = [TEST_XML_DRAFT, TEST_XML_V2_DRAFT, TEST_KRAMDOWN_DRAFT, TEST_MMARK_DRAFT] 20 | TEMPORARY_DATA_DIR = "./tests/tmp/" 21 | VALID_API_KEY = "foobar" 22 | 23 | 24 | def get_path(filename): 25 | """Returns file path""" 26 | return "".join([TEST_DATA_DIR, filename]) 27 | 28 | 29 | class TestApiValidate(TestCase): 30 | """Tests for /api/validate end point""" 31 | 32 | def setUp(self): 33 | # susspress logging messages 34 | set_logger(CRITICAL) 35 | # create temporary data dir 36 | Path(TEMPORARY_DATA_DIR).mkdir(exist_ok=True) 37 | 38 | config = {"UPLOAD_DIR": abspath(TEMPORARY_DATA_DIR), "REQUIRE_AUTH": False} 39 | 40 | self.app = create_app(config) 41 | 42 | def tearDown(self): 43 | # set logging to INFO 44 | set_logger(INFO) 45 | # remove temporary data dir 46 | rmtree(TEMPORARY_DATA_DIR, ignore_errors=True) 47 | 48 | def test_no_file(self): 49 | with self.app.test_client() as client: 50 | with self.app.app_context(): 51 | result = client.post("/api/validate", data={"apikey": VALID_API_KEY}) 52 | json_data = result.get_json() 53 | 54 | self.assertEqual(result.status_code, 400) 55 | self.assertEqual(json_data["error"], "No file") 56 | 57 | def test_missing_file_name(self): 58 | with self.app.test_client() as client: 59 | with self.app.app_context(): 60 | result = client.post( 61 | "/api/validate", 62 | data={ 63 | "file": (open(get_path(TEST_XML_DRAFT), "rb"), ""), 64 | "apikey": VALID_API_KEY, 65 | }, 66 | ) 67 | json_data = result.get_json() 68 | 69 | self.assertEqual(result.status_code, 400) 70 | self.assertEqual(json_data["error"], "Filename is missing") 71 | 72 | def test_unsupported_file_format(self): 73 | with self.app.test_client() as client: 74 | with self.app.app_context(): 75 | result = client.post( 76 | "/api/validate", 77 | data={ 78 | "file": ( 79 | open(get_path(TEST_UNSUPPORTED_FORMAT), "rb"), 80 | TEST_UNSUPPORTED_FORMAT, 81 | ), 82 | "apikey": VALID_API_KEY, 83 | }, 84 | ) 85 | json_data = result.get_json() 86 | 87 | self.assertEqual(result.status_code, 400) 88 | self.assertEqual(json_data["error"], "Input file format not supported") 89 | 90 | def test_validate(self): 91 | with self.app.test_client() as client: 92 | with self.app.app_context(): 93 | for filename in TEST_DATA: 94 | result = client.post( 95 | "/api/validate", 96 | data={ 97 | "file": (open(get_path(filename), "rb"), filename), 98 | "apikey": VALID_API_KEY, 99 | }, 100 | ) 101 | json_data = result.get_json() 102 | 103 | self.assertEqual(result.status_code, 200) 104 | self.assertIn("errors", json_data) 105 | self.assertIn("warnings", json_data) 106 | self.assertIn("idnits", json_data) 107 | self.assertIn("bare_unicode", json_data) 108 | self.assertIn("non_ascii", json_data) 109 | self.assertEqual(len(json_data["errors"]), 0) 110 | self.assertGreater(len(json_data["idnits"]), 0) 111 | self.assertGreaterEqual(len(json_data["warnings"]), 0) 112 | self.assertGreaterEqual(len(json_data["bare_unicode"]), 0) 113 | self.assertGreaterEqual(len(json_data["non_ascii"]), 0) 114 | 115 | def test_validate_text(self): 116 | with self.app.test_client() as client: 117 | with self.app.app_context(): 118 | result = client.post( 119 | "/api/validate", 120 | data={ 121 | "file": ( 122 | open(get_path(TEST_TEXT_DRAFT), "rb"), 123 | TEST_TEXT_DRAFT, 124 | ), 125 | "apikey": VALID_API_KEY, 126 | }, 127 | ) 128 | json_data = result.get_json() 129 | 130 | self.assertEqual(result.status_code, 200) 131 | self.assertNotIn("errors", json_data) 132 | self.assertNotIn("warnings", json_data) 133 | self.assertNotIn("bare_unicode", json_data) 134 | self.assertIn("non_ascii", json_data) 135 | self.assertIn("idnits", json_data) 136 | self.assertGreater(len(json_data["idnits"]), 0) 137 | 138 | def test_validate_invalid_id(self): 139 | with self.app.test_client() as client: 140 | with self.app.app_context(): 141 | result = client.post( 142 | "/api/validate", 143 | data={ 144 | "file": ( 145 | open(get_path(TEST_XML_INVALID), "rb"), 146 | TEST_XML_INVALID, 147 | ), 148 | "apikey": VALID_API_KEY, 149 | }, 150 | ) 151 | json_data = result.get_json() 152 | 153 | self.assertEqual(result.status_code, 200) 154 | self.assertIn("errors", json_data) 155 | self.assertIn("warnings", json_data) 156 | self.assertIn("idnits", json_data) 157 | self.assertGreater(len(json_data["errors"]), 0) 158 | self.assertGreater(len(json_data["warnings"]), 0) 159 | self.assertGreater(len(json_data["idnits"]), 0) 160 | 161 | def test_kramdown_error(self): 162 | with self.app.test_client() as client: 163 | with self.app.app_context(): 164 | result = client.post( 165 | "/api/validate", 166 | data={ 167 | "file": ( 168 | open(get_path(TEST_KRAMDOWN_ERROR), "rb"), 169 | TEST_KRAMDOWN_ERROR, 170 | ), 171 | "apikey": VALID_API_KEY, 172 | }, 173 | ) 174 | json_data = result.get_json() 175 | 176 | self.assertEqual(result.status_code, 400) 177 | self.assertTrue(json_data["error"].startswith("processing error:")) 178 | 179 | def test_xml_error(self): 180 | with self.app.test_client() as client: 181 | with self.app.app_context(): 182 | result = client.post( 183 | "/api/validate", 184 | data={ 185 | "file": (open(get_path(TEST_XML_ERROR), "rb"), TEST_XML_ERROR), 186 | "apikey": VALID_API_KEY, 187 | }, 188 | ) 189 | json_data = result.get_json() 190 | 191 | self.assertEqual(result.status_code, 400) 192 | self.assertTrue(json_data["error"].startswith("processing error:")) 193 | -------------------------------------------------------------------------------- /tests/test_api_version.py: -------------------------------------------------------------------------------- 1 | from logging import disable as set_logger, INFO, CRITICAL 2 | from unittest import TestCase 3 | 4 | from at import create_app 5 | 6 | AUTHOR_TOOLS_API_TEST_VERSION = "0.0.1" 7 | VERSION_INFORMATION = { 8 | "xml2rfc": "3.19.1", 9 | "kramdown-rfc": "1.7.5", 10 | "mmark": "2.2.25", 11 | "id2xml": "1.5.2", 12 | "weasyprint": "60.2", 13 | "idnits": "2.17.00", 14 | "iddiff": "0.4.3", 15 | "aasvg": "0.3.6", 16 | "svgcheck": "0.7.1", 17 | "rfcdiff": "1.48", 18 | "bap": "1.4", 19 | "rst2rfcxml": "1.5.0", 20 | } 21 | VERSION_LABELS = ( 22 | "author_tools_api", 23 | "xml2rfc", 24 | "kramdown-rfc", 25 | "mmark", 26 | "id2xml", 27 | "weasyprint", 28 | "idnits", 29 | "iddiff", 30 | "aasvg", 31 | "svgcheck", 32 | "rfcdiff", 33 | "bap", 34 | "rst2rfcxml", 35 | ) 36 | 37 | 38 | class TestApiVersion(TestCase): 39 | """Tests for /api/version end point""" 40 | 41 | def setUp(self): 42 | # susspress logging messages 43 | set_logger(CRITICAL) 44 | 45 | config = { 46 | "REQUIRE_AUTH": False, 47 | "VERSION_INFORMATION": VERSION_INFORMATION, 48 | "VERSION": AUTHOR_TOOLS_API_TEST_VERSION, 49 | } 50 | 51 | self.app = create_app(config) 52 | 53 | def tearDown(self): 54 | # set logging to INFO 55 | set_logger(INFO) 56 | 57 | def test_version(self): 58 | with self.app.test_client() as client: 59 | with self.app.app_context(): 60 | result = client.get("/api/version") 61 | json_data = result.get_json() 62 | 63 | self.assertEqual(result.status_code, 200) 64 | 65 | for label in VERSION_LABELS: 66 | self.assertIn(label, json_data["versions"]) 67 | 68 | self.assertEqual( 69 | json_data["versions"]["author_tools_api"], 70 | AUTHOR_TOOLS_API_TEST_VERSION, 71 | ) 72 | -------------------------------------------------------------------------------- /tests/test_utils_abnf.py: -------------------------------------------------------------------------------- 1 | from logging import disable as set_logger, INFO, CRITICAL 2 | from unittest import TestCase 3 | 4 | from at.utils.abnf import extract_abnf, parse_abnf 5 | 6 | TEST_DATA_DIR = "./tests/data/" 7 | RFC = "rfc8855.txt" 8 | DRAFT = "draft-smoke-signals-00.txt" 9 | ABNF = "name.abnf" 10 | ABNF_ERROR = "name-error.abnf" 11 | 12 | 13 | class TestUtilsAbnf(TestCase): 14 | """Tests for at.utils.abnf""" 15 | 16 | def setUp(self): 17 | # susspress logging messages 18 | set_logger(CRITICAL) 19 | 20 | def tearDown(self): 21 | # set logging to INFO 22 | set_logger(INFO) 23 | 24 | def test_extract_abnf(self): 25 | result = extract_abnf("".join([TEST_DATA_DIR, RFC])) 26 | 27 | self.assertIn("FLOOR-REQUEST-STATUS", result) 28 | 29 | def test_extract_abnf_empty(self): 30 | result = extract_abnf("".join([TEST_DATA_DIR, DRAFT])) 31 | 32 | self.assertEqual(result, "No output from BAP aex.") 33 | 34 | def test_extract_abnf_error(self): 35 | result = extract_abnf("foobar") 36 | 37 | self.assertIn("Can't open", result) 38 | 39 | def test_parse_abnf(self): 40 | errors, abnf = parse_abnf("".join([TEST_DATA_DIR, ABNF])) 41 | 42 | self.assertIn("", errors) 43 | self.assertIn("first-name last-name", abnf) 44 | 45 | def test_parse_abnf_error(self): 46 | errors, abnf = parse_abnf("".join([TEST_DATA_DIR, ABNF_ERROR])) 47 | 48 | self.assertIn("Rule first-name was already defined", errors) 49 | self.assertIn("; middle-name UNDEFINED", abnf) 50 | -------------------------------------------------------------------------------- /tests/test_utils_authentication.py: -------------------------------------------------------------------------------- 1 | from logging import disable as set_logger, INFO, CRITICAL 2 | from os.path import abspath 3 | from pathlib import Path 4 | from shutil import rmtree 5 | from unittest import TestCase 6 | 7 | from hypothesis import given 8 | from hypothesis.strategies import text 9 | import responses 10 | 11 | from at import create_app 12 | 13 | TEST_DATA_DIR = "./tests/data/" 14 | TEST_XML_DRAFT = "draft-smoke-signals-00.xml" 15 | TEMPORARY_DATA_DIR = "./tests/tmp/" 16 | AUTHOR_TOOLS_API_TEST_VERSION = "0.0.1" 17 | DT_APPAUTH_URL = "https://example.com/" 18 | VALID_API_KEY = "foobar" 19 | 20 | 21 | def get_path(filename): 22 | """Returns file path""" 23 | return "".join([TEST_DATA_DIR, filename]) 24 | 25 | 26 | class TestUtilsAuthentication(TestCase): 27 | """Tests for at.utils.authentication""" 28 | 29 | def setUp(self): 30 | # susspress logging messages 31 | set_logger(CRITICAL) 32 | # create temporary data dir 33 | Path(TEMPORARY_DATA_DIR).mkdir(exist_ok=True) 34 | 35 | config = { 36 | "UPLOAD_DIR": abspath(TEMPORARY_DATA_DIR), 37 | "DT_APPAUTH_URL": DT_APPAUTH_URL, 38 | "REQUIRE_AUTH": True, 39 | "VERSION": AUTHOR_TOOLS_API_TEST_VERSION, 40 | } 41 | 42 | self.app = create_app(config) 43 | 44 | def tearDown(self): 45 | # set logging to INFO 46 | set_logger(INFO) 47 | # remove temporary data dir 48 | rmtree(TEMPORARY_DATA_DIR, ignore_errors=True) 49 | 50 | @responses.activate 51 | def test_authentication_missing_api_key(self): 52 | responses.add(responses.POST, DT_APPAUTH_URL, status=400) 53 | 54 | with self.app.test_client() as client: 55 | with self.app.app_context(): 56 | filename = get_path(TEST_XML_DRAFT) 57 | result = client.post( 58 | "/api/render/xml", data={"file": (open(filename, "rb"), filename)} 59 | ) 60 | json_data = result.get_json() 61 | 62 | self.assertEqual(result.status_code, 401) 63 | self.assertEqual(json_data["error"], "API key is missing") 64 | 65 | @responses.activate 66 | def test_authentication_valid_api_key(self): 67 | responses.add( 68 | responses.POST, DT_APPAUTH_URL, json={"success": True}, status=200 69 | ) 70 | 71 | with self.app.test_client() as client: 72 | with self.app.app_context(): 73 | filename = get_path(TEST_XML_DRAFT) 74 | result = client.post( 75 | "/api/render/xml", 76 | data={ 77 | "file": (open(filename, "rb"), filename), 78 | "apikey": VALID_API_KEY, 79 | }, 80 | ) 81 | 82 | self.assertEqual(result.status_code, 200) 83 | 84 | @responses.activate 85 | @given(text()) 86 | def test_authentication_invalid_api_key(self, api_key): 87 | responses.add(responses.POST, DT_APPAUTH_URL, status=403) 88 | 89 | with self.app.test_client() as client: 90 | with self.app.app_context(): 91 | filename = get_path(TEST_XML_DRAFT) 92 | result = client.post( 93 | "/api/render/xml", 94 | data={ 95 | "file": (open(filename, "rb"), filename), 96 | "apikey": VALID_API_KEY, 97 | }, 98 | ) 99 | json_data = result.get_json() 100 | 101 | self.assertEqual(result.status_code, 401) 102 | self.assertEqual(json_data["error"], "API key is invalid") 103 | 104 | @responses.activate 105 | def test_authentication_valid_api_key_in_headers(self): 106 | responses.add( 107 | responses.POST, DT_APPAUTH_URL, json={"success": True}, status=200 108 | ) 109 | 110 | with self.app.test_client() as client: 111 | with self.app.app_context(): 112 | filename = get_path(TEST_XML_DRAFT) 113 | result = client.post( 114 | "/api/render/xml", 115 | headers={"X-API-KEY": VALID_API_KEY}, 116 | data={"file": (open(filename, "rb"), filename)}, 117 | ) 118 | 119 | self.assertEqual(result.status_code, 200) 120 | 121 | @responses.activate 122 | @given(text()) 123 | def test_authentication_invalid_api_key_in_headers(self, api_key): 124 | responses.add(responses.POST, DT_APPAUTH_URL, status=403) 125 | 126 | with self.app.test_client() as client: 127 | with self.app.app_context(): 128 | filename = get_path(TEST_XML_DRAFT) 129 | result = client.post( 130 | "/api/render/xml", 131 | headers={"X-API-KEY": VALID_API_KEY}, 132 | data={"file": (open(filename, "rb"), filename)}, 133 | ) 134 | json_data = result.get_json() 135 | 136 | self.assertEqual(result.status_code, 401) 137 | self.assertEqual(json_data["error"], "API key is invalid") 138 | 139 | def test_authentication_disabled(self): 140 | config = { 141 | "UPLOAD_DIR": abspath(TEMPORARY_DATA_DIR), 142 | "REQUIRE_AUTH": False, 143 | "VERSION": AUTHOR_TOOLS_API_TEST_VERSION, 144 | } 145 | 146 | app = create_app(config) 147 | with app.test_client() as client: 148 | with app.app_context(): 149 | filename = get_path(TEST_XML_DRAFT) 150 | result = client.post( 151 | "/api/render/xml", 152 | data={ 153 | "file": (open(filename, "rb"), filename), 154 | "apikey": VALID_API_KEY, 155 | }, 156 | ) 157 | 158 | self.assertEqual(result.status_code, 200) 159 | -------------------------------------------------------------------------------- /tests/test_utils_file.py: -------------------------------------------------------------------------------- 1 | from logging import disable as set_logger, INFO, CRITICAL 2 | from pathlib import Path 3 | from shutil import rmtree 4 | from unittest import TestCase 5 | 6 | from faker import Faker 7 | from hypothesis import given, assume 8 | from hypothesis.strategies import text 9 | from werkzeug.datastructures import FileStorage 10 | 11 | from at.utils.file import ( 12 | allowed_file, 13 | cleanup_output, 14 | get_file, 15 | get_filename, 16 | get_name, 17 | get_name_with_revision, 18 | save_file, 19 | save_file_from_text, 20 | save_file_from_url, 21 | ALLOWED_EXTENSIONS, 22 | ALLOWED_EXTENSIONS_BY_PROCESS, 23 | DownloadError, 24 | ) 25 | 26 | TEST_DATA_DIR = "./tests/data/" 27 | TEST_XML_DRAFT = "draft-smoke-signals-00.xml" 28 | TEST_XML_V2_DRAFT = "draft-smoke-signals-00.v2.xml" 29 | TEST_TEXT_DRAFT = "draft-smoke-signals-00.txt" 30 | TEST_KRAMDOWN_DRAFT = "draft-smoke-signals-00.md" 31 | TEST_MMARK_DRAFT = "draft-smoke-signals-00.mmark.md" 32 | TEST_DATA = [ 33 | TEST_XML_DRAFT, 34 | TEST_XML_V2_DRAFT, 35 | TEST_TEXT_DRAFT, 36 | TEST_KRAMDOWN_DRAFT, 37 | TEST_MMARK_DRAFT, 38 | ] 39 | TEMPORARY_DATA_DIR = "./tests/tmp/" 40 | 41 | 42 | class TestUtilsFile(TestCase): 43 | """Tests for at.utils.file""" 44 | 45 | def setUp(self): 46 | # susspress logging messages 47 | set_logger(CRITICAL) 48 | # set faker 49 | self.faker = Faker(seed=1985) 50 | # create temporary data dir 51 | Path(TEMPORARY_DATA_DIR).mkdir(exist_ok=True) 52 | 53 | def tearDown(self): 54 | # set logging to INFO 55 | set_logger(INFO) 56 | # remove temporary data dir 57 | rmtree(TEMPORARY_DATA_DIR, ignore_errors=True) 58 | 59 | @given(text()) 60 | def test_allowed_file_for_non_supported(self, filename): 61 | for extension in ALLOWED_EXTENSIONS: 62 | assume(not filename.endswith(extension)) 63 | 64 | self.assertFalse(allowed_file(filename)) 65 | 66 | def test_allowed_file_for_supported(self): 67 | for extension in ALLOWED_EXTENSIONS: 68 | filename = self.faker.file_name(extension=extension) 69 | 70 | self.assertTrue(allowed_file(filename)) 71 | 72 | @given(text()) 73 | def test_allowed_file_for_non_supported_with_process(self, filename): 74 | for process in ALLOWED_EXTENSIONS_BY_PROCESS.keys(): 75 | for extension in ALLOWED_EXTENSIONS_BY_PROCESS[process]: 76 | assume(not filename.endswith(extension)) 77 | 78 | self.assertFalse(allowed_file(filename, process=process)) 79 | 80 | def test_allowed_file_for_supported_with_process(self): 81 | for process in ALLOWED_EXTENSIONS_BY_PROCESS.keys(): 82 | for extension in ALLOWED_EXTENSIONS_BY_PROCESS[process]: 83 | filename = self.faker.file_name(extension=extension) 84 | 85 | self.assertTrue(allowed_file(filename, process=process)) 86 | 87 | def test_get_filename(self): 88 | for extension in ALLOWED_EXTENSIONS: 89 | filename = self.faker.file_name() 90 | 91 | self.assertTrue(get_filename(filename, extension).endswith(extension)) 92 | 93 | def test_get_file(self): 94 | for extension in ALLOWED_EXTENSIONS: 95 | file_path = self.faker.file_path(extension=extension, depth=3) 96 | result = get_file(file_path) 97 | 98 | self.assertTrue(result.endswith(extension)) 99 | self.assertFalse(result.startswith("/")) 100 | self.assertTrue(result.count("/"), 1) 101 | 102 | def test_save_file(self): 103 | for filename in TEST_DATA: 104 | with open("".join([TEST_DATA_DIR, filename]), "rb") as file: 105 | file_object = FileStorage(file, filename=filename) 106 | (dir_path, file_path) = save_file(file_object, TEMPORARY_DATA_DIR) 107 | self.assertTrue(Path(dir_path).exists()) 108 | self.assertTrue(Path(file_path).exists()) 109 | 110 | @given(text()) 111 | def test_save_file_from_text(self, text): 112 | dir_path, file_path = save_file_from_text(text, TEMPORARY_DATA_DIR) 113 | 114 | self.assertTrue(Path(dir_path).exists()) 115 | self.assertTrue(Path(file_path).exists()) 116 | 117 | with open(file_path, "r", newline="") as file: 118 | self.assertEqual(text, "".join(file.readlines())) 119 | 120 | def test_save_file_from_url_connection_error(self): 121 | with self.assertRaises(DownloadError) as error: 122 | save_file_from_url("https://example.foobar/draft.txt", TEMPORARY_DATA_DIR) 123 | self.assertEqual(str(error.exception), "Error occured while downloading file.") 124 | 125 | def test_save_file_from_url_404_error(self): 126 | with self.assertRaises(DownloadError) as error: 127 | save_file_from_url("https://example.com/draft-404.txt", TEMPORARY_DATA_DIR) 128 | self.assertEqual(str(error.exception), "Error occured while downloading file.") 129 | 130 | def test_save_file_from_url_no_filename(self): 131 | url = "https://example.com/" 132 | with self.assertRaises(DownloadError) as error: 133 | save_file_from_url(url, TEMPORARY_DATA_DIR) 134 | self.assertEqual( 135 | str(error.exception), "Can not determine the filename: {}".format(url) 136 | ) 137 | 138 | def test_save_file_from_url_valid(self): 139 | id_url = "https://www.ietf.org/archive/id/draft-ietf-quic-http-23.txt" 140 | (dir_path, file_path) = save_file_from_url(id_url, TEMPORARY_DATA_DIR) 141 | self.assertTrue(Path(dir_path).exists()) 142 | self.assertTrue(Path(file_path).exists()) 143 | 144 | @given(text()) 145 | def test_get_name_non_standarded(self, filename): 146 | for prefix in ["draft-", "rfc"]: 147 | assume(not filename.startswith(prefix)) 148 | 149 | self.assertIsNone(get_name(filename)) 150 | 151 | def test_get_name_standarded(self): 152 | names_dictionary = { 153 | "rfc3333": "rfc3333", 154 | "rfc3333.txt": "rfc3333", 155 | "draft-smoke-signals-00.txt": "draft-smoke-signals", 156 | "draft-smoke-signals-01": "draft-smoke-signals", 157 | "draft-smoke-signals": "draft-smoke-signals", 158 | } 159 | 160 | for filename, name in names_dictionary.items(): 161 | self.assertEqual(get_name(filename), name) 162 | 163 | @given(text()) 164 | def test_get_name_with_revision_non_standarded(self, filename): 165 | for prefix in ["draft-", "rfc"]: 166 | assume(not filename.startswith(prefix)) 167 | 168 | self.assertIsNone(get_name_with_revision(filename)) 169 | 170 | def test_get_name_with_revision_standarded(self): 171 | names_dictionary = { 172 | "rfc3333": "rfc3333", 173 | "rfc3333.txt": "rfc3333", 174 | "draft-smoke-signals-00.txt": "draft-smoke-signals-00", 175 | "draft-smoke-signals-01": "draft-smoke-signals-01", 176 | "draft-smoke-signals": "draft-smoke-signals", 177 | } 178 | 179 | for filename, name in names_dictionary.items(): 180 | self.assertEqual(get_name_with_revision(filename), name) 181 | 182 | def test_cleanup_output(self): 183 | rel_dir = TEST_DATA_DIR 184 | filename = TEST_XML_DRAFT 185 | rel_path = "".join((TEST_DATA_DIR, TEST_XML_DRAFT)) 186 | abs_path = Path.resolve(Path(rel_path)) 187 | abs_dir = str(abs_path.parent) 188 | output_0 = "foobar-0" 189 | output_1 = "foobar-1" 190 | output_2 = "foobar-2" 191 | output_3 = "foobar-3" 192 | 193 | log_output = "\n".join( 194 | ( 195 | output_0, 196 | ":".join((filename, output_1)), 197 | ":".join((rel_path, output_2)), 198 | ":".join((str(abs_path), output_3)), 199 | ) 200 | ) 201 | 202 | result = cleanup_output(abs_path, log_output) 203 | 204 | for output in (output_0, output_1, output_2, output_3): 205 | self.assertIn(output, result) 206 | 207 | self.assertIn(filename, result) 208 | self.assertNotIn(rel_dir, result) 209 | self.assertNotIn(abs_dir, result) 210 | 211 | def test_cleanup_output_when_none(self): 212 | output = cleanup_output(None, None) 213 | 214 | self.assertIsNone(output) 215 | -------------------------------------------------------------------------------- /tests/test_utils_iddiff.py: -------------------------------------------------------------------------------- 1 | from logging import disable as set_logger, INFO, CRITICAL 2 | from unittest import TestCase 3 | from unittest.mock import patch 4 | 5 | 6 | from at.utils.iddiff import get_id_diff, IddiffError 7 | 8 | TEST_DATA_DIR = "./tests/data/" 9 | DRAFT_A = "draft-smoke-signals-00.txt" 10 | DRAFT_B = "draft-smoke-signals-01.txt" 11 | 12 | 13 | class TestUtilsIddiff(TestCase): 14 | """Tests for at.utils.iddiff""" 15 | 16 | def setUp(self): 17 | # susspress logging messages 18 | set_logger(CRITICAL) 19 | 20 | def tearDown(self): 21 | # set logging to INFO 22 | set_logger(INFO) 23 | 24 | def test_get_id_diff_error(self): 25 | with self.assertRaises(IddiffError): 26 | get_id_diff("", "") 27 | 28 | def test_get_id_diff(self): 29 | id_diff = get_id_diff( 30 | "".join([TEST_DATA_DIR, DRAFT_A]), "".join([TEST_DATA_DIR, DRAFT_B]) 31 | ) 32 | 33 | self.assertIn('', id_diff) 34 | self.assertIn(DRAFT_A, id_diff) 35 | self.assertIn(DRAFT_B, id_diff) 36 | 37 | def test_get_id_diff_with_rfcdiff(self): 38 | id_diff = get_id_diff( 39 | "".join([TEST_DATA_DIR, DRAFT_A]), 40 | "".join([TEST_DATA_DIR, DRAFT_B]), 41 | diff_tool="rfcdiff", 42 | ) 43 | 44 | self.assertIn("', id_diff) 56 | self.assertIn("", id_diff) 58 | self.assertIn(DRAFT_A, id_diff) 59 | self.assertIn(DRAFT_B, id_diff) 60 | 61 | def test_get_wdiff(self): 62 | id_diff = get_id_diff( 63 | "".join([TEST_DATA_DIR, DRAFT_A]), 64 | "".join([TEST_DATA_DIR, DRAFT_B]), 65 | wdiff=True, 66 | ) 67 | 68 | self.assertIn('', id_diff) 69 | self.assertIn("
", id_diff)
70 |         self.assertIn("
", id_diff) 71 | 72 | def test_get_chbars(self): 73 | id_diff = get_id_diff( 74 | "".join([TEST_DATA_DIR, DRAFT_A]), 75 | "".join([TEST_DATA_DIR, DRAFT_B]), 76 | chbars=True, 77 | ) 78 | 79 | self.assertIn("|Expires:", id_diff) 80 | 81 | def test_get_abdiff(self): 82 | id_diff = get_id_diff( 83 | "".join([TEST_DATA_DIR, DRAFT_A]), 84 | "".join([TEST_DATA_DIR, DRAFT_B]), 85 | abdiff=True, 86 | ) 87 | 88 | self.assertIn("OLD:", id_diff) 89 | self.assertIn("NEW:", id_diff) 90 | 91 | @patch("at.utils.iddiff.TIMEOUT", 0) 92 | def test_iddiff_timeout(self): 93 | with self.assertRaises(IddiffError): 94 | get_id_diff( 95 | "".join([TEST_DATA_DIR, DRAFT_A]), "".join([TEST_DATA_DIR, DRAFT_B]) 96 | ) 97 | -------------------------------------------------------------------------------- /tests/test_utils_logs.py: -------------------------------------------------------------------------------- 1 | from logging import disable as set_logger, INFO, CRITICAL 2 | from pathlib import Path 3 | from shutil import copy, rmtree 4 | from unittest import TestCase 5 | 6 | from at.utils.logs import ( 7 | get_errors, 8 | process_xml2rfc_log, 9 | update_logs, 10 | XML2RFC_ERROR_REGEX, 11 | XML2RFC_LINE_NUMBER_REGEX, 12 | XML2RFC_WARN_REGEX, 13 | ) 14 | from at.utils.validation import xml2rfc_validation 15 | 16 | TEST_DATA_DIR = "./tests/data/" 17 | TEST_XML_DRAFT = "draft-smoke-signals-00.xml" 18 | TEST_XML_INVALID = "draft-smoke-signals-00.invalid.xml" 19 | TEST_XML_V2_DRAFT = "draft-smoke-signals-00.v2.xml" 20 | TEST_DATA = [TEST_XML_DRAFT, TEST_XML_INVALID, TEST_XML_V2_DRAFT] 21 | TEMPORARY_DATA_DIR = "./tests/tmp/" 22 | 23 | 24 | class TestUtilsLogs(TestCase): 25 | """Tests for at.utils.logs""" 26 | 27 | def setUp(self): 28 | # susspress logging messages 29 | set_logger(CRITICAL) 30 | # create temporary data dir 31 | Path(TEMPORARY_DATA_DIR).mkdir(exist_ok=True) 32 | # create copies of test data in temporary data dir 33 | for file in TEST_DATA: 34 | original = "".join([TEST_DATA_DIR, file]) 35 | new = "".join([TEMPORARY_DATA_DIR, file]) 36 | copy(original, new) 37 | 38 | def tearDown(self): 39 | # set logging to INFO 40 | set_logger(INFO) 41 | # remove temporary data dir 42 | rmtree(TEMPORARY_DATA_DIR, ignore_errors=True) 43 | 44 | def test_process_xml2rfc_log(self): 45 | for file in TEST_DATA: 46 | filename = "".join([TEMPORARY_DATA_DIR, file]) 47 | output, _ = xml2rfc_validation(filename) 48 | log = process_xml2rfc_log(output, filename) 49 | 50 | self.assertIn("errors", log.keys()) 51 | self.assertIn("warnings", log.keys()) 52 | self.assertIn("bare_unicode", log.keys()) 53 | self.assertGreaterEqual(len(log["errors"]), 0) 54 | self.assertGreaterEqual(len(log["warnings"]), 0) 55 | self.assertGreaterEqual(len(log["bare_unicode"]), 0) 56 | for error in log["errors"]: 57 | self.assertNotRegex(r"xml2rfc", error) 58 | self.assertNotRegex(r"Error:", error) 59 | for warning in log["warnings"]: 60 | self.assertNotRegex(r"xml2rfc", warning) 61 | self.assertNotRegex(r"Warning:", warning) 62 | for bare_unicde in log["bare_unicode"]: 63 | self.assertIn("Found non-ascii characters", bare_unicde) 64 | 65 | def test_get_errors_valid(self): 66 | filename = "".join([TEMPORARY_DATA_DIR, TEST_XML_DRAFT]) 67 | output, _ = xml2rfc_validation(filename) 68 | errors = get_errors(output, filename) 69 | 70 | self.assertIsNone(errors) 71 | 72 | def test_get_errors_invalid(self): 73 | filename = "".join([TEMPORARY_DATA_DIR, TEST_XML_INVALID]) 74 | output, _ = xml2rfc_validation(filename) 75 | errors = get_errors(output, filename) 76 | 77 | self.assertIsNotNone(errors) 78 | self.assertIsInstance(errors, str) 79 | self.assertGreater(len(errors), 0) 80 | 81 | def test_update_logs(self): 82 | logs = { 83 | "errors": [ 84 | "foo", 85 | ], 86 | "warnings": [ 87 | "bar", 88 | ], 89 | } 90 | 91 | new_entries = { 92 | "errors": [ 93 | "foobar_error", 94 | "foobar_error_1", 95 | ], 96 | "warnings": [ 97 | "foobar_warning", 98 | "foobar_warning_1", 99 | ], 100 | } 101 | 102 | updated_logs = update_logs(logs, new_entries) 103 | 104 | for key, entries in logs.items(): 105 | self.assertIn(key, updated_logs.keys()) 106 | for entry in entries: 107 | self.assertIn(entry, updated_logs[key]) 108 | 109 | for key, entries in new_entries.items(): 110 | self.assertIn(key, updated_logs.keys()) 111 | for entry in entries: 112 | self.assertIn(entry, updated_logs[key]) 113 | 114 | def test_error_regex(self): 115 | logs = ( 116 | "/foo/bar.xml(3): Error: foobar", 117 | "/foo/bar.xml(3): ERROR: foobar", 118 | "/foo/bar.xml: Error: foobar", 119 | "Error: foobar", 120 | ) 121 | 122 | for log in logs: 123 | result = XML2RFC_ERROR_REGEX.search(log) 124 | self.assertEqual("foobar", result.group("message")) 125 | 126 | def test_warning_regex(self): 127 | logs = ( 128 | "/foo/bar.xml(3): Warning: foobar", 129 | "/foo/bar.xml(3): warning: foobar", 130 | "/foo/bar.xml: Warning: foobar", 131 | "warning: foobar", 132 | ) 133 | 134 | for log in logs: 135 | result = XML2RFC_WARN_REGEX.search(log) 136 | self.assertEqual("foobar", result.group("message")) 137 | 138 | def test_line_number_regex(self): 139 | logs = ( 140 | "/foo/bar.xml(f00): Warning: foobar", 141 | "/foo/bar.xml(f00): Error: foobar", 142 | "(f00): Warning: foobar", 143 | ) 144 | 145 | for log in logs: 146 | result = XML2RFC_LINE_NUMBER_REGEX.search(log) 147 | self.assertEqual("f00", result.group("line")) 148 | -------------------------------------------------------------------------------- /tests/test_utils_net.py: -------------------------------------------------------------------------------- 1 | from logging import disable as set_logger, INFO, CRITICAL 2 | from unittest import TestCase 3 | 4 | import responses 5 | 6 | from at.utils.net import ( 7 | get_both, 8 | get_latest, 9 | get_previous, 10 | is_valid_url, 11 | is_url, 12 | InvalidURL, 13 | DocumentNotFound, 14 | ) 15 | 16 | DT_LATEST_DRAFT_URL = "https://datatracker.ietf.org/api/rfcdiff-latest-json" 17 | 18 | 19 | class TestUtilsNet(TestCase): 20 | """Tests for at.utils.net""" 21 | 22 | def setUp(self): 23 | # susspress logging messages 24 | set_logger(CRITICAL) 25 | 26 | def tearDown(self): 27 | # set logging to INFO 28 | set_logger(INFO) 29 | 30 | def test_get_latest_not_found_error(self): 31 | with self.assertRaises(DocumentNotFound) as error: 32 | get_latest("foobar-foobar", DT_LATEST_DRAFT_URL) 33 | 34 | self.assertEqual( 35 | str(error.exception), "Can not find the latest document on datatracker" 36 | ) 37 | 38 | @responses.activate 39 | def test_get_latest_no_content_url_error(self): 40 | rfc = "rfc666" 41 | responses.add( 42 | responses.GET, "/".join([DT_LATEST_DRAFT_URL, rfc]), json={}, status=200 43 | ) 44 | with self.assertRaises(DocumentNotFound) as error: 45 | get_latest(rfc, DT_LATEST_DRAFT_URL) 46 | 47 | self.assertEqual( 48 | str(error.exception), 49 | "Can not find url for the latest document on datatracker", 50 | ) 51 | 52 | def test_get_latest_rfc(self): 53 | rfc = "rfc666" 54 | latest_draft_url = get_latest(rfc, DT_LATEST_DRAFT_URL) 55 | self.assertTrue(latest_draft_url.startswith("https://")) 56 | 57 | def test_get_latest_id(self): 58 | draft = "draft-ietf-quic-http" 59 | latest_draft_url = get_latest(draft, DT_LATEST_DRAFT_URL) 60 | self.assertTrue(latest_draft_url.startswith("https://")) 61 | 62 | def test_invalid_urls(self): 63 | allowed_domains = [ 64 | "example.com", 65 | ] 66 | urls = [ 67 | "ftp://example.com/", 68 | "file://example.com/", 69 | "example.com", 70 | "/etc/passwd", 71 | "../requirements.txt", 72 | "https://127.0.0.1", 73 | "https://127.0.0.1:80", 74 | "https://example.com:80", 75 | "https://example.com[/", 76 | "https://example.org", 77 | ] 78 | 79 | for url in urls: 80 | with self.assertRaises(InvalidURL): 81 | is_valid_url(url, allowed_domains=allowed_domains) 82 | 83 | def test_valid_url(self): 84 | allowed_domains = [ 85 | "example.com", 86 | ] 87 | urls = [ 88 | "http://example.com/", 89 | "https://example.com/", 90 | "https://example.com/example.xml", 91 | "https://example.com/example/example.xml", 92 | "http://www.example.com/", 93 | "https://www.example.com/", 94 | "https://www.example.com/example.xml", 95 | "https://www.example.com/example/example.xml", 96 | ] 97 | 98 | for url in urls: 99 | self.assertTrue(is_valid_url(url, allowed_domains=allowed_domains)) 100 | 101 | def test_is_url_true(self): 102 | strings = [ 103 | "http://example.com/", 104 | "https://example.com/", 105 | "https://example.com/example.xml", 106 | "https://example.com/example/example.xml", 107 | "http://www.example.com/", 108 | "https://www.example.com/", 109 | "https://www.example.com/example.xml", 110 | "https://www.example.com/example/example.xml", 111 | ] 112 | 113 | for string in strings: 114 | self.assertTrue(is_url(string)) 115 | 116 | def test_is_urls_false(self): 117 | strings = [ 118 | "http://example.com[/", 119 | "example.com", 120 | "/etc/passwd", 121 | "../requirements.txt", 122 | "rfc9000", 123 | "draft-ietf-httpbis-p2-semantics-26", 124 | ] 125 | 126 | for string in strings: 127 | self.assertFalse(is_url(string)) 128 | 129 | def test_get_previous_for_rfc(self): 130 | rfc = "rfc7749" 131 | previous = "draft-iab-xml2rfcv2-02" 132 | previous_doc_url = get_previous(rfc, DT_LATEST_DRAFT_URL) 133 | self.assertTrue(previous_doc_url.startswith("https://")) 134 | self.assertIn(previous, previous_doc_url) 135 | 136 | def test_get_previous_for_id(self): 137 | draft = "draft-ietf-sipcore-multiple-reasons-00" 138 | previous = "draft-sparks-sipcore-multiple-reasons-00" 139 | previous_doc_url = get_previous(draft, DT_LATEST_DRAFT_URL) 140 | self.assertTrue(previous_doc_url.startswith("https://")) 141 | self.assertIn(previous, previous_doc_url) 142 | 143 | def test_get_previous_not_found_error(self): 144 | with self.assertRaises(DocumentNotFound) as error: 145 | get_previous("foobar-foobar", DT_LATEST_DRAFT_URL) 146 | 147 | self.assertEqual( 148 | str(error.exception), "Can not find the previous document on datatracker" 149 | ) 150 | 151 | @responses.activate 152 | def test_get_previous_no_content_url_error(self): 153 | rfc = "rfc666" 154 | responses.add( 155 | responses.GET, "/".join([DT_LATEST_DRAFT_URL, rfc]), json={}, status=200 156 | ) 157 | with self.assertRaises(DocumentNotFound) as error: 158 | get_previous(rfc, DT_LATEST_DRAFT_URL) 159 | 160 | self.assertEqual( 161 | str(error.exception), 162 | "Can not find url for the previous document on datatracker", 163 | ) 164 | 165 | def test_get_both_for_rfc(self): 166 | rfc = "rfc7749" 167 | previous = "draft-iab-xml2rfcv2-02" 168 | previous_doc_url, latest_doc_url = get_both(rfc, DT_LATEST_DRAFT_URL) 169 | self.assertTrue(previous_doc_url.startswith("https://")) 170 | self.assertIn(previous, previous_doc_url) 171 | self.assertTrue(latest_doc_url.startswith("https://")) 172 | self.assertIn(rfc, latest_doc_url) 173 | 174 | def test_get_both_for_id(self): 175 | draft = "draft-ietf-sipcore-multiple-reasons-00" 176 | previous = "draft-sparks-sipcore-multiple-reasons-00" 177 | previous_doc_url, latest_doc_url = get_both(draft, DT_LATEST_DRAFT_URL) 178 | self.assertTrue(previous_doc_url.startswith("https://")) 179 | self.assertIn(previous, previous_doc_url) 180 | self.assertTrue(latest_doc_url.startswith("https://")) 181 | self.assertIn(draft, latest_doc_url) 182 | 183 | def test_get_both_latest_not_found_error(self): 184 | with self.assertRaises(DocumentNotFound) as error: 185 | get_both("foobar-foobar", DT_LATEST_DRAFT_URL) 186 | 187 | self.assertEqual( 188 | str(error.exception), "Can not find the latest document on datatracker" 189 | ) 190 | 191 | def test_get_both_previous_not_found_error(self): 192 | draft = "draft-reschke-xml2rfc-00" 193 | with self.assertRaises(DocumentNotFound) as error: 194 | get_both(draft, DT_LATEST_DRAFT_URL) 195 | 196 | self.assertEqual( 197 | str(error.exception), 198 | "Can not find url for previous document on datatracker", 199 | ) 200 | 201 | @responses.activate 202 | def test_get_both_no_content_url_error(self): 203 | rfc = "rfc666" 204 | responses.add( 205 | responses.GET, "/".join([DT_LATEST_DRAFT_URL, rfc]), json={}, status=200 206 | ) 207 | with self.assertRaises(DocumentNotFound) as error: 208 | get_both(rfc, DT_LATEST_DRAFT_URL) 209 | 210 | self.assertEqual( 211 | str(error.exception), 212 | "Can not find url for the latest document on datatracker", 213 | ) 214 | -------------------------------------------------------------------------------- /tests/test_utils_processor.py: -------------------------------------------------------------------------------- 1 | from logging import disable as set_logger, INFO, CRITICAL 2 | from pathlib import Path 3 | from shutil import copy, rmtree 4 | from unittest import TestCase 5 | 6 | from werkzeug.datastructures import FileStorage 7 | 8 | from at.utils.processor import ( 9 | clean_svg_ids, 10 | convert_v2v3, 11 | get_html, 12 | get_pdf, 13 | get_text, 14 | get_xml, 15 | kramdown2xml, 16 | md2xml, 17 | mmark2xml, 18 | process_file, 19 | txt2xml, 20 | rst2xml, 21 | ProcessingError, 22 | ) 23 | 24 | TEST_DATA_DIR = "./tests/data/" 25 | TEST_XML_DRAFT = "draft-smoke-signals-00.xml" 26 | TEST_XML_V2_DRAFT = "draft-smoke-signals-00.v2.xml" 27 | TEST_TEXT_DRAFT = "draft-smoke-signals-00.txt" 28 | TEST_KRAMDOWN_DRAFT = "draft-smoke-signals-00.md" 29 | TEST_MMARK_DRAFT = "draft-smoke-signals-00.mmark.md" 30 | TEST_XML_ERROR = "draft-smoke-signals-00.error.xml" 31 | TEST_RST_DRAFT = "draft-doe-smoke-signals-00.rst" 32 | TEST_DATA = [ 33 | TEST_XML_DRAFT, 34 | TEST_XML_V2_DRAFT, 35 | TEST_TEXT_DRAFT, 36 | TEST_KRAMDOWN_DRAFT, 37 | TEST_MMARK_DRAFT, 38 | TEST_RST_DRAFT, 39 | ] 40 | TEMPORARY_DATA_DIR = "./tests/tmp/" 41 | 42 | 43 | class TestUtilsProcessor(TestCase): 44 | """Tests for at.utils.processor""" 45 | 46 | def setUp(self): 47 | # susspress logging messages 48 | set_logger(CRITICAL) 49 | # create temporary data dir 50 | Path(TEMPORARY_DATA_DIR).mkdir(exist_ok=True) 51 | # create copies of test data in temporary data dir 52 | for file in TEST_DATA: 53 | original = "".join([TEST_DATA_DIR, file]) 54 | new = "".join([TEMPORARY_DATA_DIR, file]) 55 | copy(original, new) 56 | 57 | def tearDown(self): 58 | # set logging to INFO 59 | set_logger(INFO) 60 | # remove temporary data dir 61 | rmtree(TEMPORARY_DATA_DIR, ignore_errors=True) 62 | 63 | def test_process_file(self): 64 | for filename in TEST_DATA: 65 | with open("".join([TEST_DATA_DIR, filename]), "rb") as file: 66 | file_object = FileStorage(file, filename=filename) 67 | dir_path, saved_file = process_file(file_object, TEMPORARY_DATA_DIR) 68 | 69 | self.assertTrue(Path(dir_path).is_dir()) 70 | self.assertTrue(Path(saved_file).exists()) 71 | self.assertEqual(Path(saved_file).suffix, ".xml") 72 | 73 | def test_md2xml(self): 74 | for filename in [TEST_KRAMDOWN_DRAFT, TEST_MMARK_DRAFT]: 75 | saved_file = md2xml("".join([TEMPORARY_DATA_DIR, filename])) 76 | 77 | self.assertTrue(Path(saved_file).exists()) 78 | self.assertEqual(Path(saved_file).suffix, ".xml") 79 | 80 | def test_kramdown2xml(self): 81 | saved_file = kramdown2xml("".join([TEMPORARY_DATA_DIR, TEST_KRAMDOWN_DRAFT])) 82 | 83 | self.assertTrue(Path(saved_file).exists()) 84 | self.assertEqual(Path(saved_file).suffix, ".xml") 85 | 86 | def test_mmark2xml(self): 87 | saved_file = mmark2xml("".join([TEMPORARY_DATA_DIR, TEST_MMARK_DRAFT])) 88 | 89 | self.assertTrue(Path(saved_file).exists()) 90 | self.assertEqual(Path(saved_file).suffix, ".xml") 91 | 92 | def test_mmark2xml_error(self): 93 | mmark2xml("foobar") 94 | self.assertRaises(ProcessingError) 95 | 96 | def test_rst2xml(self): 97 | saved_file = rst2xml("".join([TEMPORARY_DATA_DIR, TEST_RST_DRAFT])) 98 | 99 | self.assertTrue(Path(saved_file).exists()) 100 | self.assertEqual(Path(saved_file).suffix, ".xml") 101 | 102 | def test_rst2xml_error(self): 103 | with self.assertRaises(ProcessingError): 104 | rst2xml("foobar") 105 | 106 | def test_txt2xml(self): 107 | saved_file = txt2xml("".join([TEMPORARY_DATA_DIR, TEST_TEXT_DRAFT])) 108 | 109 | self.assertTrue(Path(saved_file).exists()) 110 | self.assertEqual(Path(saved_file).suffix, ".xml") 111 | 112 | def test_convert_v2v3(self): 113 | saved_file, logs = convert_v2v3( 114 | "".join([TEMPORARY_DATA_DIR, TEST_XML_V2_DRAFT]) 115 | ) 116 | 117 | self.assertTrue(Path(saved_file).exists()) 118 | self.assertEqual(Path(saved_file).suffix, ".xml") 119 | self.assertIn("errors", logs.keys()) 120 | self.assertIn("warnings", logs.keys()) 121 | 122 | def test_convert_v2v3_error(self): 123 | with self.assertRaises(ProcessingError): 124 | saved_file, logs = convert_v2v3( 125 | "".join([TEMPORARY_DATA_DIR, TEST_XML_ERROR]) 126 | ) 127 | 128 | def test_get_xml(self): 129 | saved_file, logs = get_xml("".join([TEMPORARY_DATA_DIR, TEST_XML_V2_DRAFT])) 130 | 131 | self.assertTrue(Path(saved_file).exists()) 132 | self.assertEqual(Path(saved_file).suffix, ".xml") 133 | self.assertIn("errors", logs.keys()) 134 | self.assertIn("warnings", logs.keys()) 135 | 136 | def test_get_xml_v3(self): 137 | saved_file, logs = get_xml("".join([TEMPORARY_DATA_DIR, TEST_XML_DRAFT])) 138 | 139 | self.assertTrue(Path(saved_file).exists()) 140 | self.assertEqual(Path(saved_file).suffix, ".xml") 141 | self.assertIsNone(logs) 142 | 143 | def test_get_html(self): 144 | saved_file, logs = get_html("".join([TEMPORARY_DATA_DIR, TEST_XML_DRAFT])) 145 | 146 | self.assertTrue(Path(saved_file).exists()) 147 | self.assertEqual(Path(saved_file).suffix, ".html") 148 | self.assertIn("errors", logs.keys()) 149 | self.assertIn("warnings", logs.keys()) 150 | 151 | def test_get_html_error(self): 152 | with self.assertRaises(ProcessingError): 153 | saved_file, logs = get_html("".join([TEMPORARY_DATA_DIR, TEST_XML_ERROR])) 154 | 155 | def test_get_text(self): 156 | saved_file, logs = get_text("".join([TEMPORARY_DATA_DIR, TEST_XML_DRAFT])) 157 | 158 | self.assertTrue(Path(saved_file).exists()) 159 | self.assertEqual(Path(saved_file).suffix, ".txt") 160 | self.assertIn("errors", logs.keys()) 161 | self.assertIn("warnings", logs.keys()) 162 | 163 | def test_get_text_error(self): 164 | with self.assertRaises(ProcessingError): 165 | saved_file, logs = get_text("".join([TEMPORARY_DATA_DIR, TEST_XML_ERROR])) 166 | 167 | def test_get_pdf(self): 168 | saved_file, logs = get_pdf("".join([TEMPORARY_DATA_DIR, TEST_XML_DRAFT])) 169 | 170 | self.assertTrue(Path(saved_file).exists()) 171 | self.assertEqual(Path(saved_file).suffix, ".pdf") 172 | self.assertIn("errors", logs.keys()) 173 | self.assertIn("warnings", logs.keys()) 174 | 175 | def test_get_pdf_error(self): 176 | with self.assertRaises(ProcessingError): 177 | saved_file, logs = get_pdf("".join([TEMPORARY_DATA_DIR, TEST_XML_ERROR])) 178 | 179 | def test_clean_svg_ids(self): 180 | saved_file = clean_svg_ids("".join([TEMPORARY_DATA_DIR, TEST_XML_DRAFT])) 181 | 182 | self.assertTrue(Path(saved_file).exists()) 183 | self.assertEqual(Path(saved_file).suffix, ".xml") 184 | -------------------------------------------------------------------------------- /tests/test_utils_runner.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from at.utils.runner import proc_run, RunnerError 4 | 5 | 6 | class TestUtilsRunner(TestCase): 7 | """Tests for at.utils.runner""" 8 | 9 | def test_proc_run_no_timeout(self): 10 | output = proc_run(args=["echo", "foobar"], timeout=100) 11 | output.check_returncode() 12 | self.assertEqual(output.stdout.decode("utf-8").strip(), "foobar") 13 | 14 | def test_proc_run_timeout(self): 15 | with self.assertRaises(RunnerError): 16 | output = proc_run(args=["sleep", "100"], timeout=1) 17 | output.check_returncode() 18 | -------------------------------------------------------------------------------- /tests/test_utils_text.py: -------------------------------------------------------------------------------- 1 | from logging import disable as set_logger, INFO, CRITICAL 2 | from pathlib import Path 3 | from shutil import copy, rmtree 4 | from unittest import TestCase 5 | 6 | from werkzeug.datastructures import FileStorage 7 | 8 | from at.utils.text import ( 9 | get_text_id, 10 | get_text_id_from_file, 11 | get_text_id_from_url, 12 | TextProcessingError, 13 | ) 14 | 15 | TEST_DATA_DIR = "./tests/data/" 16 | DRAFT_A = "draft-smoke-signals-00.txt" 17 | DRAFT_B = "draft-smoke-signals-01.txt" 18 | TEST_XML_DRAFT = "draft-smoke-signals-00.xml" 19 | TEST_XML_V2_DRAFT = "draft-smoke-signals-00.v2.xml" 20 | TEST_KRAMDOWN_DRAFT = "draft-smoke-signals-00.md" 21 | TEST_MMARK_DRAFT = "draft-smoke-signals-00.mmark.md" 22 | TEST_XML_ERROR = "draft-smoke-signals-00.error.xml" 23 | TEST_DATA = [ 24 | TEST_XML_DRAFT, 25 | TEST_XML_V2_DRAFT, 26 | DRAFT_A, 27 | DRAFT_B, 28 | TEST_KRAMDOWN_DRAFT, 29 | TEST_MMARK_DRAFT, 30 | ] 31 | TEMPORARY_DATA_DIR = "./tests/tmp/" 32 | 33 | 34 | class TestUtilsText(TestCase): 35 | """Tests for at.utils.text""" 36 | 37 | def setUp(self): 38 | # susspress logging messages 39 | set_logger(CRITICAL) 40 | # create temporary data dir 41 | Path(TEMPORARY_DATA_DIR).mkdir(exist_ok=True) 42 | 43 | def tearDown(self): 44 | # set logging to INFO 45 | set_logger(INFO) 46 | # remove temporary data dir 47 | rmtree(TEMPORARY_DATA_DIR, ignore_errors=True) 48 | 49 | def test_get_text_id(self): 50 | for filename in TEST_DATA: 51 | original = "".join([TEST_DATA_DIR, filename]) 52 | new = "".join([TEMPORARY_DATA_DIR, filename]) 53 | copy(original, new) 54 | (dir_path, file_path) = get_text_id(TEMPORARY_DATA_DIR, new) 55 | self.assertTrue(Path(dir_path).exists()) 56 | self.assertTrue(Path(file_path).exists()) 57 | self.assertEqual(Path(file_path).suffix, ".txt") 58 | 59 | def test_get_text_id_error(self): 60 | filename = TEST_XML_ERROR 61 | original = "".join([TEST_DATA_DIR, filename]) 62 | new = "".join([TEMPORARY_DATA_DIR, filename]) 63 | copy(original, new) 64 | 65 | with self.assertRaises(TextProcessingError): 66 | get_text_id(TEMPORARY_DATA_DIR, new) 67 | 68 | def test_get_text_id_from_file(self): 69 | for filename in TEST_DATA: 70 | with open("".join([TEST_DATA_DIR, filename]), "rb") as file: 71 | file_object = FileStorage(file, filename=filename) 72 | (dir_path, file_path) = get_text_id_from_file( 73 | file_object, TEMPORARY_DATA_DIR 74 | ) 75 | self.assertTrue(Path(dir_path).exists()) 76 | self.assertTrue(Path(file_path).exists()) 77 | self.assertEqual(Path(file_path).suffix, ".txt") 78 | 79 | def test_get_text_id_from_file_error(self): 80 | filename = TEST_XML_ERROR 81 | with open("".join([TEST_DATA_DIR, filename]), "rb") as file: 82 | file_object = FileStorage(file, filename=filename) 83 | with self.assertRaises(TextProcessingError): 84 | get_text_id_from_file(file_object, TEMPORARY_DATA_DIR) 85 | 86 | def test_get_text_id_from_url(self): 87 | url = "https://www.ietf.org/archive/id/draft-iab-xml2rfcv2-01.xml" 88 | (dir_path, file_path) = get_text_id_from_url(url, TEMPORARY_DATA_DIR) 89 | self.assertTrue(Path(dir_path).exists()) 90 | self.assertTrue(Path(file_path).exists()) 91 | self.assertEqual(Path(file_path).suffix, ".txt") 92 | 93 | def test_get_text_id_from_url_error(self): 94 | url = "https://author-tools.ietf.org/sitemap.xml" 95 | with self.assertRaises(TextProcessingError): 96 | get_text_id_from_url(url, TEMPORARY_DATA_DIR) 97 | 98 | def test_get_text_id_from_file_raw(self): 99 | for filename in TEST_DATA: 100 | suffix = f".{filename.split('.')[-1]}" 101 | with open("".join([TEST_DATA_DIR, filename]), "rb") as file: 102 | file_object = FileStorage(file, filename=filename) 103 | (dir_path, file_path) = get_text_id_from_file( 104 | file_object, TEMPORARY_DATA_DIR, raw=True 105 | ) 106 | self.assertTrue(Path(dir_path).exists()) 107 | self.assertTrue(Path(file_path).exists()) 108 | self.assertEqual(Path(file_path).suffix, suffix) 109 | 110 | def test_get_text_id_from_url_raw(self): 111 | url = "https://www.ietf.org/archive/id/draft-iab-xml2rfcv2-01.xml" 112 | (dir_path, file_path) = get_text_id_from_url(url, TEMPORARY_DATA_DIR, raw=True) 113 | self.assertTrue(Path(dir_path).exists()) 114 | self.assertTrue(Path(file_path).exists()) 115 | self.assertEqual(Path(file_path).suffix, ".xml") 116 | 117 | def test_get_text_id_from_file_text_or_xml(self): 118 | for filename in TEST_DATA: 119 | suffix = f".{filename.split('.')[-1]}" 120 | with open("".join([TEST_DATA_DIR, filename]), "rb") as file: 121 | file_object = FileStorage(file, filename=filename) 122 | (dir_path, file_path) = get_text_id_from_file( 123 | file_object, TEMPORARY_DATA_DIR, text_or_xml=True 124 | ) 125 | self.assertTrue(Path(dir_path).exists()) 126 | self.assertTrue(Path(file_path).exists()) 127 | if suffix in [".xml", ".txt"]: 128 | self.assertEqual(Path(file_path).suffix, suffix) 129 | else: 130 | self.assertEqual(Path(file_path).suffix, ".txt") 131 | 132 | def test_get_text_id_from_url_text_or_xml(self): 133 | url = "https://www.ietf.org/archive/id/draft-iab-xml2rfcv2-01.xml" 134 | (dir_path, file_path) = get_text_id_from_url( 135 | url, TEMPORARY_DATA_DIR, text_or_xml=True 136 | ) 137 | self.assertTrue(Path(dir_path).exists()) 138 | self.assertTrue(Path(file_path).exists()) 139 | self.assertEqual(Path(file_path).suffix, ".xml") 140 | -------------------------------------------------------------------------------- /tests/test_utils_version.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from docker.version import ( 4 | get_aasvg_version, 5 | get_idnits_version, 6 | get_id2xml_version, 7 | get_iddiff_version, 8 | get_mmark_version, 9 | get_kramdown_rfc_version, 10 | get_rfcdiff_version, 11 | get_svgcheck_version, 12 | get_weasyprint_version, 13 | get_xml2rfc_version, 14 | get_rst2rfcxml_version, 15 | ) 16 | 17 | 18 | class TestUtilsVersion(TestCase): 19 | """Tests for at.utils.version""" 20 | 21 | def test_get_kramdown_rfc_version(self): 22 | result = get_kramdown_rfc_version() 23 | 24 | self.assertIsNotNone(result) 25 | self.assertIn(".", result) 26 | 27 | def test_get_id2xml_version(self): 28 | result = get_id2xml_version() 29 | 30 | self.assertIsNotNone(result) 31 | self.assertIn(".", result) 32 | 33 | def test_get_xml2rfc_version(self): 34 | result = get_xml2rfc_version() 35 | 36 | self.assertIsNotNone(result) 37 | self.assertIn(".", result) 38 | 39 | def test_get_mmark_version(self): 40 | result = get_mmark_version() 41 | 42 | self.assertIsNotNone(result) 43 | self.assertIn(".", result) 44 | 45 | def test_get_weasyprint_version(self): 46 | result = get_weasyprint_version() 47 | 48 | self.assertIsNotNone(result) 49 | self.assertIn(".", result) 50 | 51 | def test_get_idnits_version(self): 52 | result = get_idnits_version() 53 | 54 | self.assertIsNotNone(result) 55 | self.assertIn(".", result) 56 | 57 | def test_get_aasvg_version(self): 58 | result = get_aasvg_version() 59 | 60 | self.assertIsNotNone(result) 61 | self.assertIn(".", result) 62 | 63 | def test_get_iddiff_version(self): 64 | result = get_iddiff_version() 65 | 66 | self.assertIsNotNone(result) 67 | self.assertIn(".", result) 68 | 69 | def test_get_svgcheck_version(self): 70 | result = get_svgcheck_version() 71 | 72 | self.assertIsNotNone(result) 73 | self.assertIn(".", result) 74 | 75 | def test_get_rfcdiff_version(self): 76 | result = get_rfcdiff_version() 77 | 78 | self.assertIsNotNone(result) 79 | self.assertIn(".", result) 80 | 81 | def test_get_rst2rfcxml_version(self): 82 | result = get_rst2rfcxml_version() 83 | 84 | self.assertIsNotNone(result) 85 | self.assertIn(".", result) 86 | --------------------------------------------------------------------------------