├── tests
├── __init__.py
├── unit
│ ├── __init__.py
│ └── test_publishing.py
├── configs
│ └── basic.toml
├── conftest.py
└── factories.py
├── docs
├── CNAME
├── assets
│ └── love.png
├── stylesheets
│ └── one-page.css
└── index.md
├── .pyup.yml
├── CHANGELOG.md
├── .dockerignore
├── pytest.ini
├── startup.sh
├── setup.py
├── requirements.txt
├── test-requirements.txt
├── .editorconfig
├── docker-compose.dev.yaml
├── github_test.sh
├── CONTRIBUTING.md
├── docker-compose.yaml
├── publish
├── exceptions.py
├── __init__.py
├── helpers.py
├── cloudflare.py
├── http.py
├── config.py
├── cli.py
└── publishing.py
├── mkdocs.yml
├── Dockerfile
├── setup.cfg
├── LICENSE
├── .gitignore
├── .travis.yml
└── README.md
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/unit/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/CNAME:
--------------------------------------------------------------------------------
1 | ipfs-publish.uhlir.dev
--------------------------------------------------------------------------------
/.pyup.yml:
--------------------------------------------------------------------------------
1 | schedule: "every month"
2 | pr_prefix: "[PyUp] "
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | ## 0.1.0
4 |
5 | First release with following features:
6 |
--------------------------------------------------------------------------------
/docs/assets/love.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/AuHau/ipfs-publish/HEAD/docs/assets/love.png
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | *
2 | !publish
3 | !/setup.*
4 | !requirements.txt
5 | !.git
6 | !README.md
7 | !startup.sh
--------------------------------------------------------------------------------
/tests/configs/basic.toml:
--------------------------------------------------------------------------------
1 | host = "localhost"
2 | port = 8070
3 |
4 | [ipfs]
5 | host = "localhost"
6 | port = 5001
7 |
--------------------------------------------------------------------------------
/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | addopts = --cov publish --maxfail=20
3 | markers =
4 | unit: Unit tests
5 | integration: Integration tests
--------------------------------------------------------------------------------
/startup.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | export HOST_ADDR=$(ip -4 route list match 0/0 | awk '{print $3}')
4 |
5 | /data/.local/bin/ipfs-publish $@
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import setuptools
3 |
4 | setuptools.setup(
5 | setup_requires=['pbr>=5.0', 'setuptools>=38.6.0', 'wheel>=0.31.0', 'twine>=1.11.0'],
6 | pbr=True,
7 | )
8 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | gitpython==3.1.9
2 | click==7.1.2
3 | click-completion==0.5.2
4 | inquirer==2.7.0
5 | pbr==5.4.4
6 | quart==0.13.1
7 | ipfshttpclient==0.6.1
8 | toml==0.10.1
9 | appdirs==1.4.4
10 | cloudflare==2.8.13
--------------------------------------------------------------------------------
/test-requirements.txt:
--------------------------------------------------------------------------------
1 | -r requirements.txt
2 | pytest==5.3.5
3 | pytest-mock==2.0.0
4 | pytest-cov==2.8.1
5 | factory_boy==2.12.0
6 |
7 | # Docs
8 | mkdocs-material==4.6.2
9 | mkdocs==1.0.4
10 | pymdown-extensions==6.3
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*]
4 | charset = utf-8
5 | end_of_line = lf
6 |
7 | [*.py]
8 | indent_style = space
9 | indent_size = 4
10 | trim_trailing_whitespace = true
11 | insert_final_newline = true
12 | max_line_length = 120
13 |
--------------------------------------------------------------------------------
/docker-compose.dev.yaml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | services:
4 | ipfs-publish:
5 | environment:
6 | QUART_APP: publish.http:app
7 | QUART_ENV: development
8 | # command: '/data/.local/bin/quart run'
9 | volumes:
10 | - ./:/app
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 |
3 | import pytest
4 |
5 | from publish import config as config_module
6 |
7 |
8 | @pytest.fixture
9 | def config():
10 | path = pathlib.Path(__file__) / '..' / '..' / 'configs' / 'basic.toml'
11 | return config_module.Config(path)
12 |
--------------------------------------------------------------------------------
/github_test.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | repo_name=$1
4 | secret=$2
5 | ref=$3
6 | url=${4-http://localhost:8000}
7 |
8 | data="{\"ref\": \"refs/heads/${ref}\"}"
9 |
10 | sig=$(echo -n "${data}" | openssl dgst -sha1 -hmac "${secret}" | awk '{print "X-Hub-Signature: sha1="$1}')
11 |
12 | curl -X POST -H "X-GitHub-Event: push" -H "Content-Type: application/json" -H "${sig}" --data "${data}" ${url}/publish/${repo_name}
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | Any contributions are welcomed.
4 |
5 | Before doing any big contributions it is good idea to first discuss it in relevant issue to get good idea what sort of
6 | direction is the best and what sort of result will be accepted best.
7 |
8 | Also it is welcomed if your PRs will contain test coverage.
9 |
10 | ## Tips and tricks for development
11 |
12 | * It is good idea to use `IPFS_PUBLISH_CONFIG` env. variable to set custom config
13 | location for development.
14 | * If you want to see exceptions with stack-trace set `IPFS_PUBLISH_EXCEPTIONS` env. variable to `True`.
--------------------------------------------------------------------------------
/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | services:
4 | ipfs:
5 | image: ipfs/go-ipfs:v0.4.23
6 | volumes:
7 | - /data/ipfs
8 | ipfs-publish:
9 | image: ipfs-publish
10 | build:
11 | context: .
12 | args:
13 | IPFS_PUBLISH_CONFIG: /data/ipfs_publish/config.toml
14 | environment:
15 | IPFS_PUBLISH_CONFIG: /data/ipfs_publish/config.toml
16 | IPFS_PUBLISH_VERBOSITY: 3
17 | IPFS_PUBLISH_IPFS_MULTIADDR: /dns4/ipfs/tcp/5001/http/
18 | volumes:
19 | - /data/ipfs_publish
20 | depends_on:
21 | - ipfs
22 | ports:
23 | - 8080:8000
--------------------------------------------------------------------------------
/publish/exceptions.py:
--------------------------------------------------------------------------------
1 |
2 | class IpfsPublishException(Exception):
3 | """
4 | General exception related to IPFS Publish
5 | """
6 | pass
7 |
8 |
9 | class ConfigException(IpfsPublishException):
10 | """
11 | Exception related to any configuration errors.
12 | """
13 | pass
14 |
15 |
16 | class RepoException(IpfsPublishException):
17 | """
18 | Exception related to Repo class, mostly about the valid state of the repo.
19 | """
20 | pass
21 |
22 |
23 | class PublishingException(IpfsPublishException):
24 | """
25 | Exception related to anything which goes wrong during publishing of repo.
26 | """
27 | pass
28 |
29 |
30 | class HttpException(IpfsPublishException):
31 | """
32 | Exception related to handling HTTP requests.
33 | """
34 | pass
35 |
--------------------------------------------------------------------------------
/mkdocs.yml:
--------------------------------------------------------------------------------
1 | site_name: 'IPFS Publish - Documentation'
2 | site_description: 'Publish your static webpages directly from Git - automatically!'
3 | site_author: 'Adam Uhlíř'
4 |
5 | copyright: 'Copyright © 2019 Adam Uhlíř'
6 |
7 | repo_name: 'auhau/ipfs-publish'
8 | repo_url: 'https://github.com/auhau/ipfs-publish'
9 | theme:
10 | name: 'material'
11 | palette:
12 | primary: 'orange'
13 | accent: 'orange'
14 | logo:
15 | icon: 'power_settings_new'
16 |
17 | extra_css:
18 | - 'stylesheets/one-page.css'
19 |
20 | markdown_extensions:
21 | - admonition
22 | - codehilite:
23 | guess_lang: false
24 | - toc:
25 | permalink: true
26 | - pymdownx.superfences
27 |
28 | google_analytics:
29 | - UA-69422360-3
30 | - auto
31 |
32 | nav:
33 | - Home: 'index.md'
--------------------------------------------------------------------------------
/tests/factories.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 |
3 | import factory
4 |
5 | from publish import config as config_module, publishing
6 |
7 |
8 | class PublishFactory(factory.Factory):
9 | class Meta:
10 | strategy = factory.BUILD_STRATEGY
11 |
12 |
13 | class ConfigFactory(PublishFactory):
14 | path = pathlib.Path(__file__).parent / 'configs' / 'basic.toml'
15 |
16 | class Meta:
17 | model = config_module.Config
18 |
19 |
20 | class RepoFactory(PublishFactory):
21 | config = factory.SubFactory(ConfigFactory)
22 | name = factory.Faker('slug')
23 | git_repo_url = factory.Faker('url')
24 | secret = factory.Faker('pystr', min_chars=20, max_chars=20)
25 |
26 | class Meta:
27 | model = publishing.GenericRepo
28 |
29 |
30 | class GithubRepoFactory(RepoFactory):
31 | class Meta:
32 | model = publishing.GithubRepo
33 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.7
2 |
3 | LABEL author="Adam Uhlir $IPFS_PUBLISH_CONFIG
25 |
26 | COPY . /app
27 | RUN pip install --user .
28 |
29 | VOLUME /data/ipfs_publish
30 | ENTRYPOINT ["./startup.sh"]
31 | CMD ["server"]
32 |
33 | # Http webhook server endpoint
34 | EXPOSE 8000
35 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | name = ipfs-publish
3 | summary = CD-like tool for publishing your static website from Git to IPFS
4 | description-file = README.md
5 | description-content-type = text/markdown
6 | author = Adam Uhlir
7 | author-email = adam@uhlir.dev
8 | license = MIT
9 | home-page = https://ipfs-publish.uhlir.dev
10 | project_urls =
11 | Source = https://github.com/AuHau/ipfs-publish
12 | python_requires = >=3.7.0, <3.9.0
13 | classifier =
14 | License :: OSI Approved :: MIT License
15 | Programming Language :: Python
16 | Programming Language :: Python :: 3
17 | Programming Language :: Python :: 3.7
18 | Programming Language :: Python :: 3.8
19 | Programming Language :: Python :: Implementation :: CPython
20 | Development Status :: 4 - Beta
21 | Intended Audience :: Developers
22 |
23 | [files]
24 | packages =
25 | publish
26 |
27 | [entry_points]
28 | console_scripts =
29 | ipfs-publish = publish.cli:main
30 |
31 | [bdist_wheel]
32 | universal = 1
33 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 Adam Uhlíř
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/docs/stylesheets/one-page.css:
--------------------------------------------------------------------------------
1 | .md-sidebar--primary {
2 | display: none;
3 | }
4 |
5 | .md-sidebar--secondary {
6 | display: block;
7 | margin-left: 0;
8 | transform: none;
9 | }
10 |
11 | .md-content {
12 | margin-right: 0;
13 | }
14 |
15 | [data-md-toggle=drawer]:checked ~ .md-container .md-sidebar--secondary {
16 | box-shadow: 0 8px 10px 1px rgba(0, 0, 0, .14), 0 3px 14px 2px rgba(0, 0, 0, .12), 0 5px 5px -3px rgba(0, 0, 0, .4);
17 | -webkit-transform: translateX(24.2rem);
18 | transform: translateX(24.2rem);
19 | }
20 |
21 | @media only screen and (max-width: 76.1875em) {
22 | .md-sidebar--secondary {
23 | position: fixed;
24 | top: 0;
25 | left: -24.2rem;
26 | width: 24.2rem;
27 | height: 100% !important;
28 | -webkit-transform: translateX(0);
29 | transform: translateX(0);
30 | transition: box-shadow .25s, -webkit-transform .25s cubic-bezier(.4, 0, .2, 1);
31 | transition: transform .25s cubic-bezier(.4, 0, .2, 1), box-shadow .25s;
32 | transition: transform .25s cubic-bezier(.4, 0, .2, 1), box-shadow .25s, -webkit-transform .25s cubic-bezier(.4, 0, .2, 1);
33 | background-color: #fff;
34 | z-index: 3;
35 |
36 | }
37 | }
--------------------------------------------------------------------------------
/publish/__init__.py:
--------------------------------------------------------------------------------
1 | from pbr.version import VersionInfo
2 |
3 | VERSION = VersionInfo('ipfs-publish').semantic_version()
4 | __version__ = VERSION.release_string()
5 |
6 | APP_NAME = 'ipfs_publish'
7 | """
8 | Constant that defines the basic application then, that is used for appdata
9 | """
10 |
11 | ENV_NAME_CONFIG_PATH: str = 'IPFS_PUBLISH_CONFIG'
12 | """
13 | Name of environmental variable that holds path to the toml config that should be used.
14 | """
15 |
16 | ENV_NAME_IPFS_HOST: str = 'IPFS_PUBLISH_IPFS_HOST'
17 | """
18 | Name of environmental variable that defines the hostname of the go-ipfs's daemon's API.
19 | """
20 |
21 | ENV_NAME_IPFS_PORT: str = 'IPFS_PUBLISH_IPFS_PORT'
22 | """
23 | Name of environmental variable that defines the port of the go-ipfs's daemon's API.
24 | """
25 |
26 | ENV_NAME_IPFS_MULTIADDR: str = 'IPFS_PUBLISH_IPFS_MULTIADDR'
27 | """
28 | Name of environmental variable that defines the multiaddr of the go-ipfs's daemon's API.
29 | """
30 |
31 | ENV_NAME_VERBOSITY_LEVEL: str = 'IPFS_PUBLISH_VERBOSITY'
32 | """
33 | Name of environmental variable that can increase the level of logging verbosity.
34 | """
35 |
36 | ENV_NAME_PASS_EXCEPTIONS: str = 'IPFS_PUBLISH_EXCEPTIONS'
37 | """
38 | Name of environmental variable that disable catching of Exceptions for CLI commands
39 | """
40 |
41 | PUBLISH_IGNORE_FILENAME: str = '.ipfs_publish_ignore'
42 | """
43 | Name of the file that is looked up inside the clonned repo, that defines which files should be removed prior publishing
44 | """
45 |
46 | DEFAULT_LENGTH_OF_SECRET: int = 25
47 | """
48 | Int defining length of generated secret
49 | """
50 |
51 | IPNS_KEYS_NAME_PREFIX: str = 'ipfs_publish'
52 | """
53 | Prefix that is prepended to generated name used for naming the IPNS key
54 | """
55 |
56 | IPNS_KEYS_TYPE: str = 'rsa'
57 | """
58 | Type of IPNS key to be generated
59 | """
60 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Created by .ignore support plugin (hsz.mobi)
2 | ### JetBrains template
3 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
4 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
5 |
6 | # User-specific stuff
7 | .idea/
8 |
9 | # File-based project format
10 | *.iws
11 |
12 | # IntelliJ
13 | out/
14 |
15 | # mpeltonen/sbt-idea plugin
16 | .idea_modules/
17 |
18 | # JIRA plugin
19 | atlassian-ide-plugin.xml
20 |
21 | # Crashlytics plugin (for Android Studio and IntelliJ)
22 | com_crashlytics_export_strings.xml
23 | crashlytics.properties
24 | crashlytics-build.properties
25 | fabric.properties
26 |
27 | ### Python template
28 | # Byte-compiled / optimized / DLL files
29 | __pycache__/
30 | *.py[cod]
31 | *$py.class
32 |
33 | # C extensions
34 | *.so
35 |
36 | # Distribution / packaging
37 | .Python
38 | build/
39 | develop-eggs/
40 | dist/
41 | downloads/
42 | eggs/
43 | .eggs/
44 | lib/
45 | lib64/
46 | parts/
47 | sdist/
48 | var/
49 | wheels/
50 | *.egg-info/
51 | .installed.cfg
52 | *.egg
53 | MANIFEST
54 |
55 | # PyInstaller
56 | # Usually these files are written by a python script from a template
57 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
58 | *.manifest
59 | *.spec
60 |
61 | # Installer logs
62 | pip-log.txt
63 | pip-delete-this-directory.txt
64 |
65 | # Unit test / coverage reports
66 | htmlcov/
67 | .tox/
68 | .coverage
69 | .coverage.*
70 | .cache
71 | nosetests.xml
72 | coverage.xml
73 | *.cover
74 | .hypothesis/
75 | .pytest_cache/
76 |
77 | # Translations
78 | *.mo
79 | *.pot
80 |
81 | # Sphinx documentation
82 | docs/_build/
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # Environments
88 | .env
89 | .venv
90 | .direnv
91 | .envrc
92 | env/
93 | venv/
94 | ENV/
95 | env.bak/
96 | venv.bak/
97 |
98 | # packaging
99 | AUTHORS
100 | ChangeLog
101 |
102 | # mkdocs documentation
103 | /site
104 |
105 | # mypy
106 | .mypy_cache/
107 |
108 | # dev configs
109 | /configs
110 |
111 | # giTrack tracking
112 | .gitrack
--------------------------------------------------------------------------------
/publish/helpers.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import os
3 | import sys
4 |
5 | #######################################################################
6 | # Logging
7 | from publish import ENV_NAME_VERBOSITY_LEVEL, exceptions
8 |
9 |
10 | class NoOutput:
11 | def write(self) -> None:
12 | pass
13 |
14 |
15 | VERBOSITY_PACKAGES = {
16 | 'urllib3': 4,
17 | 'asyncio': 5,
18 | }
19 | """
20 | Dictionary that define thresholds of verbosity for packages.
21 | If verbosity (eq. number of Vs for CLI command) is bellowed the number, the logging for the package will be ignored.
22 | """
23 |
24 |
25 | def setup_logging(verbosity: int) -> None:
26 | """
27 | Setups the logging package based on passed verbosity
28 | :param verbosity: Verbosity level
29 | :return:
30 | """
31 | if ENV_NAME_VERBOSITY_LEVEL in os.environ:
32 | try:
33 | verbosity = max(verbosity, int(os.environ[ENV_NAME_VERBOSITY_LEVEL]))
34 | except ValueError:
35 | raise exceptions.IpfsPublishException(f'The env. variable {ENV_NAME_VERBOSITY_LEVEL} has to hold integer!')
36 |
37 | if verbosity == -1:
38 | sys.stdout = NoOutput()
39 | sys.stderr = NoOutput()
40 | return
41 |
42 | if verbosity == 0:
43 | logging_level = logging.ERROR
44 | elif verbosity == 1:
45 | logging_level = logging.INFO
46 | else:
47 | logging_level = logging.DEBUG
48 |
49 | logging.basicConfig(stream=sys.stderr, level=logging_level)
50 |
51 | for package, threshold_verbosity in VERBOSITY_PACKAGES.items():
52 | if verbosity >= threshold_verbosity:
53 | logging.getLogger(package).setLevel(logging.DEBUG)
54 | else:
55 | logging.getLogger(package).setLevel(logging.ERROR)
56 |
57 |
58 | #######################################################################
59 | # Misc
60 |
61 | def flatten(obj: dict):
62 | """
63 | Flatten nested dictionaries, it does not namespace the keys, so possible
64 | conflicts can arise. Conflicts are not allowed, so exception is raised if a
65 | key should be overwritten.
66 |
67 | :param obj:
68 | :raises KeyError: If there is already existing key in the new dict
69 | :return:
70 | """
71 |
72 | def _flatten(obj: dict, new_obj):
73 | for k, v in obj.items():
74 | if isinstance(v, dict):
75 | _flatten(v, new_obj)
76 | else:
77 | if k in new_obj:
78 | KeyError(f'Key \'{k}\' is already present in the dict!')
79 |
80 | new_obj[k] = v
81 |
82 | return new_obj
83 |
84 | return _flatten(obj, {})
85 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | stages:
2 | - name: test
3 | - name: deploy-docker
4 | if: tag IS present
5 | - name: deploy-pip
6 | if: tag IS present
7 | - name: deploy-mkdocs
8 | if: tag IS present
9 | - name: deploy-release
10 | if: tag IS present
11 |
12 | jobs:
13 | include:
14 | - stage: test
15 | name: "Tests"
16 | language: python
17 | python:
18 | - '3.8'
19 | install:
20 | - pip install -r test-requirements.txt
21 | - pip install codecov
22 | script:
23 | - pytest --cov publish
24 | after_success:
25 | - codecov
26 | - stage: deploy-pip
27 | name: "Publish to PyPi"
28 | language: python
29 | python: '3.8'
30 | script:
31 | - pip install -r test-requirements.txt
32 | deploy:
33 | provider: pypi
34 | user: AuHau-deploy
35 | distributions: sdist bdist_wheel
36 | skip_existing: true
37 | on:
38 | tags: true
39 | repo: AuHau/ipfs-publish
40 | password:
41 | secure: TG0Pe/gk6S5jtxoFXpFHqFD86MoDZjAaJxzRx1yroZVhWATEvwfR7KmVCY5HMjAg4hQvlHZV2r22MOPrEDo9lJuJ8Lz/07CQsebMprJ5WIYM/Gbhhs/3Ow93Pd0CMoRokDgCWiy4YSm5YFDRkTmN5DYDrIIasN0b4wsvfYMy1kZAZsoqWGLqjOU7hfsOqKBzmYVIcotKPlYDu7nyqKICiD418Xj3DL6f2bKaYQJ14zLijnh4XEGQeiOW4X5ltFrkQdhZjQ2es7tr4ByyBumcoqcW2VKftGjCl8T0EgWeUjY6szpPIDGiX3xMOHSjMnGJP0EWuo089B/LZN3cB195GqLCuHB1476yZefNo21jTrTAxprlfBHPyqhq6s5tqjW/Xz7k/4AKuI/h1qDXFWevyHvrLpV0x3ok7/9dmEX/D+mejwtXdTvOuFOJaCsP4HDjXyI+iUFsxpo5PnfKLZ3yKavI5am229SeUzkiX0xzRaYmPjN5IITTPwoqCrspKJhWmeQHUve983D+twvF28CocaGGI+u1H3IJ8E0z+JV5QebC/lJ9at8nU+bpUjkIlkv/JCpZh2EcZ4bdxohYlVvl6ZpQ9UaivBWk8A0pY1tmnPDdSScNlWMKqme2kokQDDXy58UapOIHrxnnXPVSjPP6loW9Vxgp9scx4YCnWtaLVr8=
42 |
43 | - stage: deploy-docker
44 | name: "Publish to Docker Hub"
45 | services:
46 | - 'docker'
47 | script: docker build -t auhau/ipfs-publish:latest -t auhau/ipfs-publish:$TRAVIS_TAG .
48 | deploy:
49 | provider: script
50 | script: echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin && docker push auhau/ipfs-publish:latest && docker push auhau/ipfs-publish:$TRAVIS_TAG
51 | on:
52 | tags: true
53 | repo: AuHau/ipfs-publish
54 | - stage: deploy-mkdocs
55 | name: "Publish documentation"
56 | language: python
57 | python: '3.8'
58 | install:
59 | - pip install -r test-requirements.txt
60 | script:
61 | - git config user.name "Adam Uhlir";
62 | - git config user.email "hello@adam-uhlir.me";
63 | - git remote add gh-token "https://${GH_TOKEN}@github.com/AuHau/ipfs-publish.git";
64 | - git fetch gh-token && git fetch gh-token gh-pages:gh-pages;
65 | - mkdocs gh-deploy -v --clean --remote-name gh-token;
66 | - stage: deploy-release
67 | name: "Create draft release"
68 | language: python
69 | python: '3.8'
70 | script: echo 'Lets do it!'
71 | deploy:
72 | provider: releases
73 | draft: true
74 | api_key: ${GH_TOKEN}
75 | on:
76 | tags: true
77 | repo: AuHau/ipfs-publish
--------------------------------------------------------------------------------
/publish/cloudflare.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import typing
3 |
4 | import CloudFlare
5 | import inquirer
6 |
7 | from publish import exceptions
8 |
9 | logger = logging.getLogger('publish.cloudflare')
10 |
11 |
12 | def bootstrap_cloudflare() -> typing.Tuple[typing.Optional[str], typing.Optional[str]]:
13 | if not inquirer.shortcuts.confirm('Do you want to update DNSLink on Cloudflare?', default=True):
14 | return None, None
15 |
16 | cf = CloudFlare.CloudFlare()
17 | try:
18 | cf.user.tokens.verify()
19 | except CloudFlare.exceptions.CloudFlareAPIError:
20 | print('>>> You don\'t have configured CloudFlare token!')
21 | print('>>> Either rerun this with proper configuration or specify ID of the Zone and TXT DNS entry which should be used for DNSLink.')
22 | print('>>> If you need help with the CloudFlare configuration see: https://github.com/cloudflare/python-cloudflare')
23 | zone_id = inquirer.shortcuts.text('Zone ID')
24 | dns_id = inquirer.shortcuts.text('DNS entry ID')
25 | return zone_id, dns_id
26 |
27 | print('>>> Lets find the right record you will want to update.')
28 |
29 | try:
30 | available_zones = cf.zones.get()
31 | zone_id = inquirer.shortcuts.list_input('In which zone should be the DNSLink edited?', choices=[(x['name'], x['id']) for x in available_zones])
32 | except CloudFlare.exceptions.CloudFlareAPIError:
33 | print('>>> Your token does not have sufficient rights to list zones!')
34 | zone_id = inquirer.shortcuts.text('Please provide Zone ID where should DNSLink be edited')
35 |
36 | if inquirer.shortcuts.confirm('Does the DNSLink TXT entry already exists?'):
37 | dns_records = cf.zones.dns_records.get(zone_id, params={'type': 'TXT', 'per_page': 100})
38 | dns_id = inquirer.shortcuts.list_input('Which entry you want to use?', choices=[(f'{x["name"]}: {x["content"][:40]}', x['id']) for x in dns_records])
39 | else:
40 | print('>>> Ok, lets create it then!')
41 | dns_name = inquirer.shortcuts.text('Where it should be placed (eq. full domain name with subdomain, it should probably start with _dnslink)')
42 | dns_id = cf.zones.dns_records.post(zone_id, data={'name': dns_name, 'type': 'TXT', 'content': 'dnslink='})["id"]
43 | print(f'>>> Entry with ID {dns_id} created!')
44 |
45 | return zone_id, dns_id
46 |
47 |
48 | # TODO: Verify that cf.user.tokens.verify() works with Email & Token
49 | # TODO: Verify that ENV configured token does not leak to scripts
50 | class CloudFlareMixin:
51 |
52 | dns_id: typing.Optional[str] = None
53 | """
54 | DNS ID of TXT record where the DNSLink should be updated.
55 | """
56 |
57 | zone_id: typing.Optional[str] = None
58 | """
59 | Zone ID of the DNS record where it will be modified.
60 | """
61 |
62 | def __init__(self, dns_id: str = None, zone_id: str = None):
63 | if (dns_id or zone_id) and not (dns_id and zone_id):
64 | raise exceptions.ConfigException('You have to set both dns_id and zone_id! Only one does not make sense.')
65 |
66 | self.cf = CloudFlare.CloudFlare()
67 | self.dns_id = dns_id
68 | self.zone_id = zone_id
69 |
70 | def update_dns(self, cid: str):
71 | if not self.dns_id or not self.zone_id:
72 | raise exceptions.ConfigException('dns_id and zone_id not set. Not possible to update DNS!')
73 |
74 | try:
75 | self.cf.user.tokens.verify()
76 | except CloudFlare.exceptions.CloudFlareAPIError:
77 | raise exceptions.PublishingException('CloudFlare access not configured!')
78 |
79 | logger.info('Publishing new CID to CloudFlare DNSLink')
80 |
81 | record = self.cf.zones.dns_records.get(self.zone_id, self.dns_id)
82 | record['content'] = f'dnslink={cid}'
83 | self.cf.zones.dns_records.put(self.zone_id, self.dns_id, data=record)
84 |
85 |
--------------------------------------------------------------------------------
/publish/http.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import hmac
3 | import logging
4 | import sys
5 | import typing
6 |
7 | from quart import Quart, request, abort
8 | from quart.json import dumps
9 |
10 | from publish import config as config_module, publishing, exceptions
11 |
12 | app = Quart(__name__)
13 | logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
14 |
15 | logger = logging.getLogger('publish.http')
16 |
17 |
18 | @app.route('/publish/', methods=['POST'])
19 | async def publish_endpoint(repo_name):
20 | """
21 | Endpoint for Git provider's webhook
22 |
23 | :param repo_name:
24 | :return:
25 | """
26 | config = config_module.Config.get_instance()
27 | if repo_name not in config.repos:
28 | abort(400)
29 |
30 | repo = config.repos[repo_name]
31 | handler = handler_dispatcher(repo)
32 |
33 | resp = await handler.handle_request(request)
34 |
35 | config.save()
36 | return resp
37 |
38 |
39 | def handler_dispatcher(repo: typing.Union[publishing.GenericRepo, publishing.GithubRepo]) -> 'GenericHandler':
40 | """
41 | Dispatch request to proper Handler based on what kind of repo the request is directed to.
42 | :param repo: Name of the repo
43 | :return:
44 | """
45 | if type(repo) is publishing.GenericRepo:
46 | return GenericHandler(repo)
47 | elif type(repo) is publishing.GithubRepo:
48 | return GithubHandler(repo)
49 | else:
50 | raise exceptions.HttpException('Unknown Repo\'s class!')
51 |
52 |
53 | class GenericHandler:
54 | """
55 | Handler that serves request for Generic repos.
56 |
57 | It verifies that the repo's secret is passed as GET argument of the request
58 | """
59 |
60 | def __init__(self, repo: publishing.GenericRepo):
61 | self.repo = repo
62 |
63 | async def handle_request(self, req: request) -> str:
64 | secret = req.args.get('secret')
65 |
66 | if secret != self.repo.secret:
67 | logger.warning(f'Request for generic repo \'{self.repo.name}\' did not have valid secret parameter!')
68 | abort(403)
69 |
70 | loop = asyncio.get_event_loop()
71 |
72 | # noinspection PyAsyncCall
73 | loop.run_in_executor(None, self.repo.publish_repo)
74 |
75 | return 'OK'
76 |
77 |
78 | class GithubHandler(GenericHandler):
79 | """
80 | Handler that serves request for GitHub repos.
81 |
82 | It verifies that the request is correctly signed with the repo's secret.
83 | """
84 |
85 | def __init__(self, repo: publishing.GithubRepo):
86 | super().__init__(repo)
87 |
88 | def is_data_signed_correctly(self, data, signature) -> bool:
89 | # HMAC requires the key to be bytes, but data is string
90 | mac = hmac.new(self.repo.secret.encode('utf-8'), msg=data, digestmod='sha1')
91 | if not hmac.compare_digest(str(mac.hexdigest()), signature):
92 | return False
93 |
94 | return True
95 |
96 | async def handle_request(self, req: request) -> str:
97 | header_signature = req.headers.get('X-Hub-Signature')
98 | if header_signature is None:
99 | logger.warning(f'Request for GitHub repo \'{self.repo.name}\' did not have X-Hub-Signature header!')
100 | abort(403)
101 |
102 | sha_name, signature = header_signature.split('=')
103 | if sha_name != 'sha1':
104 | logger.warning(f'Request for GitHub repo \'{self.repo.name}\' was not signed with SHA1 function!')
105 | abort(501)
106 |
107 | if not self.is_data_signed_correctly(await req.data, signature):
108 | logger.warning(f'Request for GitHub repo \'{self.repo.name}\' did not have valid signature!')
109 | abort(403)
110 |
111 | # Ping-Pong messages
112 | event = req.headers.get('X-GitHub-Event', 'ping')
113 | if event == 'ping':
114 | return dumps({'msg': 'pong'})
115 |
116 | if event != 'push':
117 | logger.warning(f'Request for GitHub repo \'{self.repo.name}\' was not result of push event!')
118 | abort(501)
119 |
120 | if self.repo.branch:
121 | if request.is_json:
122 | data = await request.get_json()
123 | else:
124 | data = await request.form
125 |
126 | expected_ref = f'refs/heads/{self.repo.branch}'
127 | if data['ref'] != expected_ref:
128 | logger.debug(f'Received push-event for \'{self.repo.name}\', but for branch \'{data["ref"]}\' '
129 | f'instead of expected \'{expected_ref}\' - ignoring the event')
130 | abort(204, 'Everything OK, but not following this branch. Build skipped.')
131 |
132 | loop = asyncio.get_event_loop()
133 |
134 | # noinspection PyAsyncCall
135 | loop.run_in_executor(None, self.repo.publish_repo)
136 |
137 | return 'OK'
138 |
--------------------------------------------------------------------------------
/tests/unit/test_publishing.py:
--------------------------------------------------------------------------------
1 | import inspect
2 | import pathlib
3 | import shutil
4 | import subprocess
5 |
6 | import git
7 | import ipfshttpclient
8 | import pytest
9 |
10 | from publish import publishing, exceptions, PUBLISH_IGNORE_FILENAME
11 | from .. import factories
12 |
13 | IGNORE_FILE_TEST_SET = (
14 | ('*.a', ('some.a', 'a', 'folder/b.a'), 1, 0),
15 | ('**/*.b', ('b', 'some.b', 'folder/b', 'folder/some.b', 'some/other/folder/some.b'), 3, 0),
16 | ('/another_file', (), NotImplementedError, 0),
17 | ('some_dir', ('some_dir/file',), 0, 1),
18 | ('non_existing_file', (), 0, 0),
19 | )
20 | """
21 | /../../outside_file
22 | non_existing_file"""
23 |
24 |
25 | class TestRepo:
26 | def test_publish_repo_basic(self, mocker):
27 | mocker.patch.object(git.Repo, 'clone_from')
28 | mocker.patch.object(shutil, 'rmtree')
29 |
30 | ipfs_client_mock = mocker.Mock(spec=ipfshttpclient.Client)
31 | ipfs_client_mock.add.return_value = [{'Hash': 'some-hash'}]
32 |
33 | mocker.patch.object(ipfshttpclient, 'connect')
34 | ipfshttpclient.connect.return_value = ipfs_client_mock
35 |
36 | repo: publishing.GenericRepo = factories.RepoFactory()
37 | repo.publish_repo()
38 |
39 | ipfs_client_mock.add.assert_called_once_with(mocker.ANY, recursive=True, pin=True)
40 | ipfs_client_mock.pin.rm.assert_not_called()
41 | assert repo.last_ipfs_addr == '/ipfs/some-hash/'
42 |
43 | def test_publish_repo_bins(self, mocker):
44 | mocker.patch.object(git.Repo, 'clone_from')
45 | mocker.patch.object(shutil, 'rmtree')
46 |
47 | ipfs_client_mock = mocker.Mock(spec=ipfshttpclient.Client)
48 | ipfs_client_mock.add.return_value = [{'Hash': 'some-hash'}]
49 |
50 | mocker.patch.object(ipfshttpclient, 'connect')
51 | ipfshttpclient.connect.return_value = ipfs_client_mock
52 |
53 | mocker.patch.object(subprocess, 'run')
54 | subprocess.run.return_value = subprocess.CompletedProcess(None, 0)
55 |
56 | repo: publishing.GenericRepo = factories.RepoFactory(build_bin='some_cmd', after_publish_bin='some_other_cmd')
57 | repo.publish_repo()
58 |
59 | assert subprocess.run.call_count == 2
60 | subprocess.run.assert_called_with(f'some_other_cmd /ipfs/some-hash/', shell=True, capture_output=True)
61 | subprocess.run.assert_any_call(f'some_cmd ', shell=True, capture_output=True)
62 |
63 | def test_publish_repo_bins_fails(self, mocker):
64 | mocker.patch.object(git.Repo, 'clone_from')
65 | mocker.patch.object(shutil, 'rmtree')
66 |
67 | ipfs_client_mock = mocker.Mock(spec=ipfshttpclient.Client)
68 | ipfs_client_mock.add.return_value = [{'Hash': 'some-hash'}]
69 |
70 | mocker.patch.object(ipfshttpclient, 'connect')
71 | ipfshttpclient.connect.return_value = ipfs_client_mock
72 |
73 | mocker.patch.object(subprocess, 'run')
74 | subprocess.run.return_value = subprocess.CompletedProcess(None, 1)
75 |
76 | repo: publishing.GenericRepo = factories.RepoFactory(build_bin='some_cmd', after_publish_bin='some_other_cmd')
77 |
78 | with pytest.raises(exceptions.RepoException):
79 | repo.publish_repo()
80 |
81 | def test_publish_rm_old_pin(self, mocker):
82 | mocker.patch.object(git.Repo, 'clone_from')
83 | mocker.patch.object(shutil, 'rmtree')
84 |
85 | ipfs_client_mock = mocker.Mock(spec=ipfshttpclient.Client)
86 | ipfs_client_mock.add.return_value = [{'Hash': 'some-hash'}]
87 |
88 | mocker.patch.object(ipfshttpclient, 'connect')
89 | ipfshttpclient.connect.return_value = ipfs_client_mock
90 |
91 | repo: publishing.GenericRepo = factories.RepoFactory(last_ipfs_addr='some_hash')
92 | repo.publish_repo()
93 |
94 | ipfs_client_mock.pin.rm.assert_called_once_with('some_hash')
95 |
96 | @pytest.mark.parametrize(('glob', 'paths_to_make', 'expected_unlink', 'expected_rmtree'), IGNORE_FILE_TEST_SET)
97 | def test_remove_ignored_files(self, glob, paths_to_make, expected_unlink, expected_rmtree, tmp_path: pathlib.Path, mocker):
98 | mocker.spy(pathlib.Path, 'unlink')
99 | mocker.spy(shutil, 'rmtree')
100 |
101 | (tmp_path / PUBLISH_IGNORE_FILENAME).write_text(glob)
102 |
103 | (tmp_path / '.git').mkdir()
104 |
105 | for path in paths_to_make:
106 | path = tmp_path / path
107 | path.parent.mkdir(parents=True, exist_ok=True)
108 | path.touch()
109 |
110 | if inspect.isclass(expected_unlink) and issubclass(expected_unlink, Exception):
111 | with pytest.raises(expected_unlink):
112 | repo: publishing.GenericRepo = factories.RepoFactory()
113 | repo._remove_ignored_files(tmp_path)
114 | else:
115 | repo: publishing.GenericRepo = factories.RepoFactory()
116 | repo._remove_ignored_files(tmp_path)
117 |
118 | # -1 because the method removes the ignore file on its own
119 | assert pathlib.Path.unlink.call_count - 1 == expected_unlink
120 |
121 | # =1 because of removing .git folder
122 | assert shutil.rmtree.call_count - 1 == expected_rmtree
123 |
--------------------------------------------------------------------------------
/publish/config.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | import os
4 | import pathlib
5 | import pprint
6 | import typing
7 |
8 | import click
9 | import inquirer
10 | import ipfshttpclient
11 | import toml
12 |
13 | from publish import ENV_NAME_CONFIG_PATH, exceptions, ENV_NAME_IPFS_HOST, ENV_NAME_IPFS_PORT, ENV_NAME_IPFS_MULTIADDR
14 |
15 | logger = logging.getLogger('publish.config')
16 |
17 |
18 | class Config:
19 | DEFAULT_CONFIG_PATH = os.path.expanduser('~/.ipfs_publish.toml')
20 |
21 | def __init__(self, path): # type: (pathlib.Path) -> None
22 | if not path.exists():
23 | raise exceptions.ConfigException('The config was not found on this path! {}'.format(path))
24 |
25 | data = toml.load(path)
26 | logger.debug(f'Loaded configuration:\n{pprint.pformat(data)}')
27 | self.data, self.repos = self._load_data(data)
28 |
29 | self.loaded_path = path
30 | self._ipfs = None
31 |
32 | def _load_data(self,
33 | data): # type: (typing.Dict[str, typing.Any]) -> typing.Tuple[dict, typing.Dict[str, publishing.Repo]]
34 | from publish import publishing
35 |
36 | self._verify_data(data)
37 |
38 | repos: typing.Dict[str, publishing.GenericRepo] = {}
39 | for value in data.pop('repos', {}).values():
40 | repo_class = publishing.get_repo_class(value['git_repo_url'])
41 | repo = repo_class.from_toml_dict(value, self)
42 | repos[repo.name] = repo
43 |
44 | return data, repos
45 |
46 | def _verify_data(self, data):
47 | if not data.get('host') or not data.get('port'):
48 | raise exceptions.ConfigException('\'host\' and \'port\' are required items in configuration file!')
49 |
50 | def save(self):
51 | data = json.loads(json.dumps(self.data))
52 | data['repos'] = {}
53 |
54 | for repo in self.repos.values():
55 | data['repos'][repo.name] = repo.to_toml_dict()
56 |
57 | with self.loaded_path.open('w') as f:
58 | toml.dump(data, f)
59 |
60 | def __getitem__(self, item):
61 | return self.data.get(item) # TODO: [Q] Is this good idea? Return None instead of KeyError?
62 |
63 | def __setitem__(self, key, value):
64 | self.data[key] = value
65 |
66 | @property
67 | def webhook_base(self):
68 | return 'http://{}{}'.format(self['host'], f':{self["port"]}' if self['port'] != 80 else '')
69 |
70 | @property
71 | def ipfs(self): # type: () -> ipfshttpclient.Client
72 | if self._ipfs is None:
73 | if self['ipfs'] is not None:
74 | host = os.environ.get(ENV_NAME_IPFS_HOST) or self['ipfs'].get('host')
75 | port = os.environ.get(ENV_NAME_IPFS_PORT) or self['ipfs'].get('port')
76 | multiaddr = os.environ.get(ENV_NAME_IPFS_MULTIADDR) or self['ipfs'].get('multiaddr')
77 | else:
78 | multiaddr = os.environ.get(ENV_NAME_IPFS_MULTIADDR)
79 | host = os.environ.get(ENV_NAME_IPFS_HOST)
80 | port = os.environ.get(ENV_NAME_IPFS_PORT)
81 |
82 | # Hack to allow cross-platform Docker to reference the Docker host's machine with $HOST_ADDR
83 | if host and host.startswith('$'):
84 | logger.info(f'Resolving host name from environment variable {host}')
85 | host = os.environ[host[1:]]
86 |
87 | if host == 'localhost':
88 | host = '127.0.0.1'
89 |
90 | if not multiaddr:
91 | multiaddr = f'/ip4/{host}/tcp/{port}/http'
92 |
93 | logger.info(f'Connecting and caching to IPFS host \'{multiaddr}\'')
94 | self._ipfs = ipfshttpclient.connect(multiaddr)
95 |
96 | return self._ipfs
97 |
98 | @classmethod
99 | def get_instance(cls, path=None): # type: (typing.Optional[pathlib.Path]) -> Config
100 | """
101 | Method that resolves from where the config should be loaded.
102 |
103 | :return:
104 | """
105 | if hasattr(cls, '_instance'):
106 | instance = cls._instance
107 |
108 | if path is not None and instance.loaded_path != path:
109 | logger.warning('Somebody is trying to load config with different path "{}", but we already have cached'
110 | 'instance with path "{}"'.format(path, instance.loaded_path))
111 |
112 | return instance
113 |
114 | if path is None:
115 | if ENV_NAME_CONFIG_PATH in os.environ:
116 | path = pathlib.Path(os.environ[ENV_NAME_CONFIG_PATH])
117 | else:
118 | path = pathlib.Path(cls.DEFAULT_CONFIG_PATH)
119 |
120 | # Default config should exist, if not lets create it.
121 | if not path.exists():
122 | logger.info(f'Config on the path {path} was not found! Bootstrapping it there!')
123 | cls.bootstrap(path)
124 |
125 | logger.info('Loading and caching config from file: {}'.format(path))
126 | cls._instance = cls(path)
127 | return cls._instance
128 |
129 | @classmethod
130 | def bootstrap(cls, path):
131 | click.echo('Welcome!\nLet\'s bootstrap some basic configuration:')
132 | host = inquirer.shortcuts.text('Set web server\'s host', default='localhost')
133 | port = int(inquirer.shortcuts.text('Set web server\'s port', default='8000', validate=lambda _, x: str(x).isdigit()))
134 |
135 | ipfs_multiaddr = inquirer.shortcuts.text('Set IPFS\'s multiaddr', default='/ip4/127.0.0.1/tcp/5001/http')
136 |
137 | with path.open('w') as f:
138 | toml.dump({'host': host, 'port': port, 'ipfs': {'multiaddr': ipfs_multiaddr }}, f)
139 |
140 | click.echo('Bootstrap successful! Let\'s continue with your original command.\n')
141 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # IPFS Publish
2 |
3 | 
4 |
5 | [](https://badge.fury.io/py/ipfs-publish)
6 | [](https://pypi.org/project/ipfs-publish)
7 | [](https://pypi.org/project/ipfs-publish/)
8 | [](https://hub.docker.com/r/auhau/ipfs-publish)
9 | [](https://codecov.io/gh/AuHau/ipfs-publish)
10 | [](https://app.codacy.com/app/AuHau/ipfs-publish)
11 | [](https://pyup.io/repos/github/AuHau/ipfs-publish/)
12 |
13 |
14 | > Continuous Delivery of static websites from Git to IPFS
15 |
16 | ## About
17 |
18 | This is a tool that aims to enable automatic publishing of static webpages from Git repositories into IPFS.
19 | It consists of two parts: small web server and management CLI.
20 |
21 | Web server exposes an endpoint which you use as your Git's webhook. When the hook is invoked, it clones
22 | your repo, build it (if needed), add it to the IPFS node (pin it if configured) and publish the new IPFS address
23 | under configured IPNS name.
24 |
25 | CLI is in place to manage the repos.
26 |
27 | ### Features
28 |
29 | * Ignore files - `.ipfs_publish_ignore` file specify entries that should be removed before adding the repo to IPFS
30 | * Publish directory - you can publish only specific sub-directory inside the repo
31 | * Publish specific branch - you can specify which branch should be published from the repo
32 | * Build script - before adding to IPFS you can run script/binary inside the cloned repo
33 | * After publish script - after the publishing to IPFS, this script is run with argument of the created IPFS address
34 | * Direct DNSLink update for CloudFlare DNS provider
35 |
36 | ### Git providers
37 |
38 | Currently the webhook supports generic mode, where the repo's **secret** is passed through as URL's parameter.
39 |
40 | There is also special mode for GitHub, where the **secret** should be configured as part of the Webhook's configuration.
41 |
42 | ## Warning
43 |
44 | **This tool is not meant as public service and only trusted Git repos should be used with it.
45 | It can introduce serious security risk into your system as the runtime environment for the scripts is not
46 | isolated from rest of your machine!**
47 |
48 | ## Install
49 |
50 | ### Requirements
51 |
52 | * Python 3.7 and higher
53 | * Git
54 | * go-ipfs daemon (tested with version 4.23)
55 | * UNIX-Like machine with public IP
56 |
57 | ### pip
58 |
59 | You can install ipfs-publish directly on your machine using `pip`:
60 |
61 | ```shell
62 | $ pip install ipfs-publish
63 | ```
64 |
65 | Then you can use the command `ipfs-publish` to manage your repos and/or start the webhook's server.
66 |
67 | ### Docker
68 |
69 | There is official Docker image build with name: `auhau/ipfs-publish`
70 |
71 | Easiest way to run ipfs-publish is with docker-compose. Here is example for its configuration:
72 |
73 | ```yaml
74 | version: '3'
75 |
76 | services:
77 | ipfs:
78 | image: ipfs/go-ipfs:v0.4.23
79 | volumes:
80 | - /data/ipfs # or you can mount it directly to some directory on your system
81 | ipfs-publish:
82 | image: auhau/ipfs-publish
83 | environment:
84 | IPFS_PUBLISH_CONFIG: /data/ipfs_publish/config.toml
85 | IPFS_PUBLISH_VERBOSITY: 3
86 | IPFS_PUBLISH_IPFS_HOST: ipfs
87 | IPFS_PUBLISH_IPFS_PORT: 5001
88 | volumes:
89 | - /data/ipfs_publish
90 | depends_on:
91 | - ipfs
92 | ports:
93 | - 8080:8000
94 | ```
95 |
96 | For more information see [documentation](https://ipfs-publish.uhlir.dev/#docker).
97 |
98 | ## Usage
99 |
100 | ```shell
101 | # Add new repo
102 | $ ipfs-publish add
103 | [?] Git URL of the repo: https://github.com/auhau/auhau.github.io
104 | [?] Name of the new repo: github_com_auhau_auhau_github_io
105 | [?] Do you want to publish to IPNS? (Y/n):
106 | [?] Path to build binary, if you want to do some pre-processing before publishing:
107 | [?] Path to after-publish binary, if you want to do some actions after publishing:
108 | [?] Directory to be published inside the repo. Path related to the root of the repo: /
109 |
110 | Successfully added new repo!
111 | Use this URL for you webhook: http://localhost:8080/publish/github_com_auhau_auhau_github_io
112 | Also set this string as your hook's Secret: NIHT4785CVFT358GFE08RDAZG
113 | Your IPNS address: /ipns/QmRTqaW3AJJXmKyiNT7MqqZ4VjGtNNxPyTkgo3Q7pmoCeX/
114 |
115 | # List current enabled repos
116 | $ ipfs-publish list
117 | github_com_auhau_auhau_github_io
118 |
119 | # Show details of repo
120 | $ ipfs-publish show github_com_auhau_auhau_github_io
121 | github_com_auhau_auhau_github_io
122 | Git URL: https://github.com/auhau/auhau.github.io
123 | Secret: EAHJ43UYT7LUEM4QFRZ4IFAXL
124 | IPNS key: ipfs_publishg_github_com_auhau_auhau_github_io
125 | IPNS lifetime: 24h
126 | IPNS ttl: 15m
127 | IPNS address: /ipns/QmRTqaW3AJJXmKyiNT7MqqZ4VjGtNNxPyTkgo3Q7pmoCeX/
128 | Last IPFS address: None
129 | Webhook address: http://localhost:8080/publish/github_com_auhau_auhau_github_io
130 |
131 | # You can manually publish repo
132 | $ ipfs-publish publish github_com_auhau_auhau_github_io
133 |
134 | # Starts HTTP server & IPNS republishing service
135 | $ ipfs-publish server &
136 | Running on http://localhost:8080 (CTRL + C to quit)
137 | ```
138 |
139 | ## Contributing
140 |
141 | Feel free to dive in, contributions are welcomed! [Open an issue](https://github.com/AuHau/ipfs-publish/issues/new) or submit PRs.
142 |
143 | For PRs and tips about development please see [contribution guideline](https://github.com/AuHau/ipfs-publish/blob/master/CONTRIBUTING.md).
144 |
145 | ## License
146 |
147 | [MIT © Adam Uhlir](https://github.com/AuHau/ipfs-publish/blob/master/LICENSE)
--------------------------------------------------------------------------------
/publish/cli.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import os
3 | import pathlib
4 | import sys
5 | import traceback
6 | import typing
7 |
8 | import click
9 | import click_completion
10 |
11 | from publish import publishing, exceptions, __version__, helpers, config as config_module, \
12 | ENV_NAME_PASS_EXCEPTIONS
13 |
14 | logger = logging.getLogger('publish.cli')
15 | click_completion.init()
16 |
17 |
18 | def entrypoint(args: typing.Sequence[str], obj: typing.Optional[dict] = None):
19 | """
20 | CLI entry point, where exceptions are handled.
21 | """
22 | try:
23 | cli(args, obj=obj or {})
24 | except exceptions.IpfsPublishException as e:
25 | logger.error(str(e).strip())
26 | logger.debug(traceback.format_exc())
27 | exit(1)
28 | except Exception as e:
29 | if os.environ.get(ENV_NAME_PASS_EXCEPTIONS) == 'True':
30 | raise
31 |
32 | logger.error(str(e).strip())
33 | logger.debug(traceback.format_exc())
34 | exit(1)
35 |
36 |
37 | @click.group()
38 | @click.option('--quiet', '-q', is_flag=True, help="Don't print anything")
39 | @click.option('--verbose', '-v', count=True, help="Prints additional info. More Vs, more info! (-vvv...)")
40 | @click.option('--config', '-c', type=click.Path(dir_okay=False), help="Path to specific config file")
41 | @click.version_option(__version__)
42 | @click.pass_context
43 | def cli(ctx, quiet, verbose, config):
44 | """
45 | Management interface for ipfs_publish, that allows adding/listing/removing supported repos.
46 |
47 | Currently only public repositories are allowed. There is support for generic Git provider, that has to have at least
48 | support for webhooks. There is also specific implementation for GitHub provider as it can sign the webhook's request
49 | with secret.
50 |
51 | The tool ships with HTTP server, that needs to be running to accept the webhook's calls.
52 | """
53 | helpers.setup_logging(-1 if quiet else verbose)
54 |
55 | ctx.obj['config'] = config_module.Config.get_instance(pathlib.Path(config) if config else None)
56 |
57 |
58 | @cli.command(short_help='Add new repo')
59 | @click.option('--name', '-n', help='Name of the repo')
60 | @click.option('--url', '-u', 'git_repo_url', help='URL of the Git repo')
61 | @click.option('--branch', '-r', help='Git branch which should be checked out. Default: default branch')
62 | @click.option('--ipns-key', '-k', help='Key name to be used for signing IPNS link')
63 | @click.option('--ipns-lifetime', '-l', help='For how long IPNS record should be valid (a.k.a. lifetime). Default: 24h')
64 | @click.option('--ipns-ttl', '-t', help='For how long IPNS record should be cached (a.k.a. ttl). Default: 15m')
65 | @click.option('--pin/--no-pin', default=True, help='Whether the files added to IPFS should be pinned. Default: True')
66 | @click.option('--republish/--no-republish', default=True, help='Whether the IPNS record should be periodically '
67 | 'republished. Default: True')
68 | @click.option('--build-bin', '-b',
69 | help='Binary which should be executed before clean up of ignored files & publishing.')
70 | @click.option('--after-publish-bin', '-a', help='Binary which should be executed after publishing.')
71 | @click.option('--publish-dir', '-d', help='Directory that should be published. Default is root of the repo.')
72 | @click.pass_context
73 | def add(ctx, **kwargs):
74 | """
75 | Command that add new repo into the list of publishing repos. The values can be either specified using
76 | CLI's options, or using interactive bootstrap.
77 |
78 | If there is HTTP server running, it needs to be restarted in order the changes to be applied.
79 | """
80 | config: config_module.Config = ctx.obj['config']
81 |
82 | new_repo = publishing.bootstrap_repo(config, **kwargs)
83 | config.repos[new_repo.name] = new_repo
84 | config.save()
85 |
86 | click.secho('\nSuccessfully added new repo!', fg='green')
87 |
88 | webhook_url = click.style(f'{new_repo.webhook_url}', fg='yellow')
89 | click.echo(f'Use this URL for you webhook: {webhook_url}')
90 |
91 | if isinstance(new_repo, publishing.GithubRepo):
92 | click.echo(f'Also set this string as your hook\'s Secret: {click.style(new_repo.secret, fg="yellow")}')
93 |
94 | if new_repo.ipns_key is not None:
95 | click.echo(f'Your IPNS address: {click.style(new_repo.ipns_addr, fg="yellow")}')
96 |
97 |
98 | @cli.command('list', short_help='List all enabled repos')
99 | @click.pass_context
100 | def listing(ctx):
101 | """
102 | List names of all repos
103 | """
104 | config = ctx.obj['config']
105 |
106 | for repo in config.repos.values():
107 | click.echo(repo.name)
108 |
109 |
110 | @cli.command(short_help='Shows details for a repo')
111 | @click.argument('name')
112 | @click.pass_context
113 | def show(ctx, name):
114 | """
115 | Displays details for repo with NAME, that is passed as argument.
116 | """
117 | config: config_module.Config = ctx.obj['config']
118 | repo: publishing.GenericRepo = config.repos.get(name)
119 |
120 | if repo is None:
121 | click.secho('Unknown repo!', fg='red')
122 | exit(1)
123 |
124 | click.secho(repo.name, fg='green')
125 | print_attribute('Git URL', repo.git_repo_url)
126 | print_attribute('Secret', repo.secret)
127 | print_attribute('IPNS key', repo.ipns_key)
128 | print_attribute('IPNS lifetime', repo.ipns_lifetime)
129 | print_attribute('IPNS ttl', repo.ipns_ttl)
130 | print_attribute('IPNS address', repo.ipns_addr)
131 | print_attribute('Last IPFS address', repo.last_ipfs_addr)
132 | print_attribute('Webhook address', f'{repo.webhook_url}')
133 |
134 |
135 | @cli.command(short_help='Remove repo')
136 | @click.option('--keep-pinned', is_flag=True, help='Will not remove the repo\'s content from the IPFS node')
137 | @click.option('--keep-ipns', is_flag=True, help='Will not remove the IPNS key from the IPFS node')
138 | @click.argument('name')
139 | @click.pass_context
140 | def rm(ctx, name, keep_pinned=False, keep_ipns=False):
141 | """
142 | Removes the repo from the IPFS Publish.
143 |
144 | It will by default cleanup the resources inside IPFS node. Eq. remove IPNS key and unpin the content. You can keep
145 | the resources if you use the specific options, just be aware that keeping the IPNS key, will resolve in continuation
146 | of republishing of the IPNS entry. And keeping pinned content will result in still serving the file.
147 | """
148 | config: config_module.Config = ctx.obj['config']
149 | repo: publishing.GenericRepo = config.repos.get(name)
150 |
151 | if repo is None:
152 | click.secho('Unknown repo!', fg='red')
153 | exit(1)
154 |
155 | if not keep_ipns:
156 | config.ipfs.key_rm(repo.ipns_key)
157 |
158 | if not keep_pinned and repo.last_ipfs_addr:
159 | config.ipfs.pin_rm(repo.last_ipfs_addr)
160 |
161 | del config.repos[name]
162 | config.save()
163 |
164 | click.echo('Repo successfully removed!')
165 |
166 |
167 | @cli.command(short_help='Publish repo')
168 | @click.argument('name')
169 | @click.pass_context
170 | def publish(ctx, name):
171 | """
172 | Will immediately publish repo based on its configuration.
173 | """
174 | config: config_module.Config = ctx.obj['config']
175 | repo: publishing.GenericRepo = config.repos.get(name)
176 |
177 | if repo is None:
178 | click.secho('Unknown repo!', fg='red')
179 | exit(1)
180 |
181 | repo.publish_repo()
182 | config.save()
183 |
184 | click.echo('Repo successfully published!')
185 |
186 |
187 | @cli.command(short_help='Starts HTTP server')
188 | @click.option('--port', '-p', type=int, help='Fort number')
189 | @click.option('--host', '-h', help='Hostname on which the server will listen')
190 | @click.pass_context
191 | def server(ctx, host=None, port=None):
192 | """
193 | Command that starts webserver and republishing service.
194 |
195 | Webserver expose endpoint for the Webhook calls. Republishing service serves for refreshing IPNS entry, that have
196 | limited lifetime.
197 |
198 | When any configuration of the ipfs_publish is changed this command needs to be restarted.
199 | """
200 | from publish import http
201 | config: config_module.Config = ctx.obj['config']
202 | app = http.app
203 |
204 | host = host or config['host'] or 'localhost'
205 | port = port or config['port'] or 8080
206 |
207 | logger.info(f'Launching server on {host}:{port}')
208 | app.run(host, port)
209 |
210 |
211 | def print_attribute(name, value):
212 | click.echo('{}: {}'.format(
213 | click.style(name, fg='white', dim=1),
214 | value
215 | ))
216 |
217 |
218 | def main():
219 | entrypoint(sys.argv[1:])
220 |
221 |
222 | if __name__ == '__main__':
223 | main()
224 |
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | # Welcome to IPFS Publish!
2 |
3 | 
4 |
5 | ## About
6 |
7 | This is a tool that aims to enable automatic publishing of static webpages from Git repositories into IPFS.
8 | It consists of two parts: small web server and management CLI.
9 |
10 | Web server exposes an endpoint which you use as your Git's webhook. When the hook is invoked, it clones
11 | your repo, build it (if needed), add it to the IPFS node (pin it if configured) and publish the new IPFS address
12 | under configured IPNS name.
13 |
14 | CLI is in place to manage the repos.
15 |
16 | ## Installation
17 |
18 | ### Requirements
19 |
20 | * Python 3.7 and higher
21 | * Git
22 | * go-ipfs daemon
23 | * UNIX-Like machine with public IP
24 |
25 | !!! warning "Web server warning"
26 | This tool is shipped with a basic web server that is mainly meant for a development environment
27 | and is a single-threaded based, hence it is not meant for heavy load. As I am not expecting
28 | that this tool would scale big it should be sufficient to use. If you would have the need you can
29 | deploy it with some production-scale webserver that supports the `ASGI` protocol. The `ASGI` app
30 | can be found in `publish.http:app` package.
31 |
32 | ### pip
33 |
34 | You can install ipfs-publish directly on your machine using `pip`:
35 |
36 | ```shell
37 | $ pip install ipfs-publish
38 | ```
39 |
40 | Then you can use the command `ipfs-publish` to manage your repos and/or start the webhook's server.
41 |
42 | ### Docker
43 |
44 | If you plan to let some other users to use your ipfs-publish instance, then it might be good idea to run it inside
45 | Docker, for at least some level isolation from rest of your system. **But it is bit more complicated to setup.**
46 |
47 | There is automatically build official Docker image: `auhau/ipfs-publish`. The image exposes port 8080, under which the
48 | webhook server is listening for incoming connections. And volume on path `/data/ipfs_publish/` to persist the configuration.
49 | This image does not have IPFS daemon, therefore you have to provide connectivity to the daemon of your choice.
50 |
51 | !!! info "go-ipfs verion"
52 | ipfs-publish is tested with go-ipfs version **v0.4.23**! The IPFS HTTP Client should support the version up to v0.7.0, but I have not tested it yet.
53 |
54 | Easiest way to deploy ipfs-publish is using `docker-compose`, together with `go-ipfs` as container.
55 | You can use this YAML configuration for it:
56 |
57 | ```yaml
58 | version: '3'
59 |
60 | services:
61 | ipfs:
62 | image: ipfs/go-ipfs:v0.4.23
63 | volumes:
64 | - /data/ipfs # or you can mount it directly to some directory on your system
65 | ipfs-publish:
66 | image: auhau/ipfs-publish
67 | environment:
68 | IPFS_PUBLISH_CONFIG: /data/ipfs_publish/config.toml
69 | IPFS_PUBLISH_VERBOSITY: 3
70 | IPFS_PUBLISH_IPFS_MULTIADDR: /dns4/ipfs/tcp/5001/http
71 | volumes:
72 | - /data/ipfs_publish
73 | depends_on:
74 | - ipfs
75 | ports:
76 | - 8080:8000
77 | ```
78 |
79 | Also you can deploy it as a standalone image using `docker`, but it requires some more configuration based on your use-case.
80 | If you have running IPFS daemon on the host like this:
81 |
82 | ```shell
83 | $ docker run -e IPFS_PUBLISH_CONFIG=/data/ipfs_publish/config.toml
84 | -e IPFS_PUBLISH_IPFS_MULTIADDR=/ip4/127.0.0.1/tcp/5001/http --network="host" auhau/ipfs_publish
85 | ```
86 |
87 | !!! warning "Host network"
88 | `--network="host"` will bind the container's ports directly to the machine exposing it to the world, so be careful
89 | with that! With this configuration you can use `localhost` address which will address the host machine.
90 |
91 | **Be aware that this mode does not work on macOS!**
92 |
93 | !!! tip "Non-host network approach"
94 | If you don't want to use the `--network="host"` mode, you can achieve similar behaviour if you set
95 | `IPFS_PUBLISH_IPFS_HOST=$HOST_ADDR`. `HOST_ADDR` is a special environment variable, that
96 | is set inside the container and is resolved to IP address under which the host machine is reachable.
97 |
98 | !!! warning "IPFS Daemon API restriction"
99 | By default the Daemon API is listening only for connection from localhost. If you want to run the IPFS Daemon
100 | on the host and connect to it from container as described before, then you have to configure the IPFS Daemon
101 | to listen to correct address.
102 |
103 | ### systemd service
104 | Depanding on your OS, you can create a systemd service for running the webhook's server. It will handle restarting
105 | the service, and provides easy way how to manage it.
106 |
107 |
108 | **ipfs-publish.service**
109 |
110 | ```
111 | [Unit]
112 | Description=ipfs-publish webhook server
113 | After=network.target
114 | StartLimitIntervalSec=0
115 |
116 | [Service]
117 | Type=simple
118 | Restart=always
119 | RestartSec=1
120 | User=<>
121 | ExecStart=ipfs-publish server
122 |
123 | [Install]
124 | WantedBy=multi-user.target
125 | ```
126 |
127 | Moreover you can define reloading service which can automatically reload the configuration inside the server on change
128 | and hence mitigate the current limitation of ipfs-publish. You can define it as:
129 |
130 | **ipfs-publish-watcher.service**
131 | ```
132 | [Unit]
133 | Description=ipfs-publish restarter
134 | After=network.target
135 |
136 | [Service]
137 | Type=oneshot
138 | ExecStart=/usr/bin/systemctl restart ipfs-publish.service
139 |
140 | [Install]
141 | WantedBy=multi-user.target
142 | ```
143 |
144 | **ipfs-publish-watcher.path**
145 | ```
146 | [Path]
147 | PathModified=<>
148 |
149 | [Install]
150 | WantedBy=multi-user.target
151 | ```
152 |
153 | Remember that you have to enable&start both `.service` and `.path` units!
154 |
155 | ```shell
156 | $ systemctl enable ipfs-publish-watcher.service && systemctl start ipfs-publish-watcher.service
157 | $ systemctl enable ipfs-publish-watcher.path && systemctl start ipfs-publish-watcher.path
158 | ```
159 |
160 |
161 | ## Usage
162 |
163 | Upon the first invocation of the command `ipfs-publish`, you are asked to specify some general configuration, like
164 | how to connect to the IPFS daemon etc. This process will create the config file.
165 |
166 | !!! info "Default config file placement"
167 | The default placement of the ipfs-publish's config is on path: `~/.ipfs_publish.toml`
168 |
169 | !!! tip "Specific config's placement"
170 | You can use different path where the config's is to be stored using either the environment variable `IPFS_PUBLISH_CONFIG`
171 | or the `ipfs-publish --config ` option.
172 |
173 |
174 | For available CLI commands see the `--help` page. Basic overview of usage of the CLI:
175 |
176 | ```shell
177 | # Add new repo
178 | $ ipfs-publish add
179 | [?] Git URL of the repo: https://github.com/auhau/auhau.github.io
180 | [?] Name of the new repo: github_com_auhau_auhau_github_io
181 | [?] Do you want to check-out specific branch?:
182 | [?] Do you want to publish to IPNS? (Y/n):
183 | [?] Path to build binary, if you want to do some pre-processing before publishing:
184 | [?] Path to after-publish binary, if you want to do some actions after publishing:
185 | [?] Directory to be published inside the repo. Path related to the root of the repo: /
186 |
187 | Successfully added new repo!
188 | Use this URL for you webhook: http://localhost:8080/publish/github_com_auhau_auhau_github_io
189 | Also set this string as your hook's Secret: NIHT4785CVFT358GFE08RDAZG
190 | Your IPNS address: /ipns/QmRTqaW3AJJXmKyiNT7MqqZ4VjGtNNxPyTkgo3Q7pmoCeX/
191 |
192 | # List current enabled repos
193 | $ ipfs-publish list
194 | github_com_auhau_auhau_github_io
195 |
196 | # Show details of repo
197 | $ ipfs-publish show github_com_auhau_auhau_github_io
198 | github_com_auhau_auhau_github_io
199 | Git URL: https://github.com/auhau/auhau.github.io
200 | Secret: EAHJ43UYT7LUEM4QFRZ4IFAXL
201 | IPNS key: ipfs_publishg_github_com_auhau_auhau_github_io
202 | IPNS lifetime: 24h
203 | IPNS ttl: 15m
204 | IPNS address: /ipns/QmRTqaW3AJJXmKyiNT7MqqZ4VjGtNNxPyTkgo3Q7pmoCeX/
205 | Last IPFS address: None
206 | Webhook address: http://localhost:8080/publish/github_com_auhau_auhau_github_io
207 |
208 | # You can manually publish repo
209 | $ ipfs-publish publish github_com_auhau_auhau_github_io
210 |
211 | # Starts HTTP server & IPNS republishing service
212 | $ ipfs-publish server &
213 | Running on http://localhost:8080 (CTRL + C to quit)
214 | ```
215 |
216 | !!! warning "Restarting server after changes"
217 | If you do any modifications of the ipfs-publish state (eq. call `add` / `remove` commands) than
218 | the changes will be propagated only after restart of the ipfs-publish server!
219 |
220 | ### Environment variables overview
221 |
222 | * `IPFS_PUBLISH_VERBOSITY` (int) - specifies verbosity level, same like the `-vvv` option.
223 | * `IPFS_PUBLISH_EXCEPTIONS` (bool) - if `True` then any exceptions raised are not handled by the CLI (mostly for testing).
224 | * `IPFS_PUBLISH_CONFIG` (str) - path to where the config file will be looked for.
225 | * `IPFS_PUBLISH_IPFS_HOST` (str) - hostname where IPFS HTTP API will connect to.
226 | * `IPFS_PUBLISH_IPFS_PORT` (int) - port which will be used for IPFS HTTP API connection.
227 | * `IPFS_PUBLISH_IPFS_MULTIADDR` (str) - multiaddr to connect fo IPFS HTTP Daemon. Has precedence over IPFS Host & Port.
228 |
229 | ### Publishing flow
230 |
231 | When repo is being published it follows these steps:
232 |
233 | 1. Freshly clone the Git repo into temporary directory, the default branch is checked out.
234 | 1. If `build_bin` is defined, it is executed inside root of the repo.
235 | 1. The `.git` folder is removed and if the `.ipfs_publish_ignore` file is present in root of the repo, the files
236 | specified in the file are removed.
237 | 1. The old pinned version is unpinned.
238 | 1. If `publish_dir` is specified, then this folder is added and pinned (if configured) to IPFS, otherwise the root of the repo is added.
239 | 1. If publishing to IPNS is configured, the IPNS entry is updated.
240 | 1. If CloudFlare DNS publishing is configured, then the latest CID is updated on configured DNS entry.
241 | 1. If `after_publish_bin` is defined, then it is executed inside root of the repo and the added CID is passed as argument.
242 | 1. Cleanup of the repo.
243 |
244 | ### Ignore files
245 |
246 | ipfs-publish can remove files before publishing the repo to IPFS. It works similarly like `.gitignore` except, that it
247 | follows the Python's [glob's syntax](https://docs.python.org/3/library/glob.html), that is similar to UNIX style glob.
248 | Hence usage of `**` is required if you want to remove files from subdirectories.
249 |
250 | The definition of which files should be removed has to be placed in root of the repo with filename `.ipfs_publish_ignore`.
251 |
252 | ### Building binary
253 |
254 | ipfs-publish allows you to run binary before publishing the repo. This feature can be used to build the repo before publishing it.
255 | The binary is invoked in root of the repository and it needs to be installed and accessible to the OS user that is running
256 | the webhook's server. It is invoked with shell, so shell's capabilities are available.
257 |
258 | The binary can be specified during the bootstrapping of the repo using CLI , or later on added into the config file under "execute" subsection
259 | of the repo's configuration: `[repos..execute]` under name `build_bin`. Example:
260 |
261 | ```toml
262 | [repos.github_com_auhau_auhau_github_io.execute]
263 | build_bin = "jekyll build"
264 | ```
265 |
266 | ### After-publish binary
267 |
268 | Similarly to building binary, there is also support for running a command after publishing to the IPFS. This can be
269 | used for example to directly set the CID to your dns_link TXT record and not depend on IPNS. The published
270 | IPFS address is passed as a argument to the binary.
271 |
272 | The binary can be specified during the bootstrapping of the repo using CLI , or later on added into the config file under "execute" subsection
273 | of the repo's configuration: `[repos..execute]` under name `after_publish_bin`. Example:
274 |
275 | ```toml
276 | [repos.github_com_auhau_auhau_github_io.execute]
277 | after_publish_bin = "update-dns.sh"
278 | ```
279 |
280 | ### Publishing sub-directory
281 |
282 | ipfs-publish enables you to publish only part of the repo, by specifying the `publish_dir` parameter. This can be used
283 | together with the building binary to publish only the build site sub-folder.
284 |
285 | ### Specific branch to publish
286 |
287 | You can configure specific branch in your Git repo that should be published. You can do so during adding adding the
288 | repo, or later on adding `branch=` to the config:
289 |
290 | ```toml
291 | [repos.github_com_auhau_auhau_github_io]
292 | branch = "gh-pages"
293 | ```
294 |
295 | ### CloudFlare
296 |
297 | As IPNS is currently not very performent for resolution, it is best practice to avoid it. In order to overcome this, there
298 | is native support for changing DNSLink DNS record on CloudFlare provider (for other providers you have to write your own
299 | script and use after-publish hook).
300 |
301 | In order for this to work, ipfs-publish has to have access to CloudFlare. You have to provide a API token, for all
302 | possible ways how to do that see [python-cloudflare](https://github.com/cloudflare/python-cloudflare/#providing-cloudflare-username-and-api-key)
303 | documentation.
304 |
305 | !!! danger "DNS Access"
306 | Configure this with security in mind! If somebody stole your API token, they could very effectively attack your website!
307 |
308 | !!! tip "Scoped API tokens"
309 | Use API Tokens with smallest privileges (eq. edit DNS entry) and limit them only to Zone that is needed!
310 |
311 | !!! success "Recommended setting"
312 | It is recommended to use the environment variable `CF_API_KEY` with API Token, preferably configured on the systemd
313 | unit as these files are not readable without `sudo` and the environment variables are not passed to any hooks
314 | (`build` and `after_publish` script), which should provide hopefully satisfying level of security.
315 |
316 | If you want to add support for this later on, you have to specify Zone and DNS ID like so:
317 |
318 | ```toml
319 | [repos.github_com_auhau_auhau_github_io.cloudflare]
320 | zone_id = "fb91814936c9812312aasdfc57ac516e98"
321 | dns_id = "c964dfc80ed523124d1casd513hu0a52"
322 | ```
323 |
--------------------------------------------------------------------------------
/publish/publishing.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import logging
3 | import os
4 | import pathlib
5 | import re
6 | import secrets
7 | import shutil
8 | import string
9 | import subprocess
10 | import tempfile
11 | import typing
12 |
13 | import click
14 | import git
15 | import inquirer
16 | import ipfshttpclient
17 |
18 | from publish import cloudflare
19 | from publish import config as config_module, exceptions, PUBLISH_IGNORE_FILENAME, DEFAULT_LENGTH_OF_SECRET, \
20 | IPNS_KEYS_NAME_PREFIX, IPNS_KEYS_TYPE, helpers
21 |
22 | logger = logging.getLogger('publish.publishing')
23 |
24 | repo_class = typing.Union[typing.Type['GithubRepo'], typing.Type['GenericRepo']]
25 | repo_instance = typing.Union['GithubRepo', 'GenericRepo']
26 |
27 | DEFAULT_BRANCH_PLACEHOLDER = ''
28 |
29 |
30 | def get_name_from_url(url: str) -> str:
31 | """
32 | Converts URL into string, with removing https:// and any non-alphabet character with _
33 | :param url:
34 | :return:
35 | """
36 | return re.sub(r'\W', '_', url.replace('https://', '')).lower()
37 |
38 |
39 | def validate_name(name: str, config: config_module.Config) -> bool:
40 | """
41 | Validate that name is not already present in the configuration.
42 |
43 | :param name:
44 | :param config:
45 | :return:
46 | """
47 | return name.lower() not in config.repos
48 |
49 |
50 | LIFETIME_SYNTAX_REGEX = r'(?:(\d+)(h|m|s)(?!.*\2))'
51 | """
52 | Regex validating lifetime syntax, examples:
53 | 1h -> TRUE
54 | 1M -> TRUE
55 | 1s -> TRUE
56 | 5h2m1s -> TRUE
57 | 1m2m -> FALSE
58 | 1h 2m -> FALSE
59 | """
60 |
61 | LIFETIME_SYNTAX_CHECK_REGEX = f'^{LIFETIME_SYNTAX_REGEX}+?$'
62 | LIFETIME_MAPPING = {
63 | 'h': 'hours',
64 | 'm': 'minutes',
65 | 's': 'seconds',
66 | }
67 |
68 |
69 | def convert_lifetime(value: str) -> datetime.timedelta:
70 | """
71 | Converts lifetime string into timedelta object
72 | :param value:
73 | :return:
74 | """
75 | if re.match(LIFETIME_SYNTAX_CHECK_REGEX, value, re.IGNORECASE) is None:
76 | raise exceptions.PublishingException('Unknown lifetime syntax!')
77 |
78 | matches = re.findall(LIFETIME_SYNTAX_REGEX, value, re.IGNORECASE)
79 | base = datetime.timedelta()
80 | for match in matches:
81 | unit = LIFETIME_MAPPING[match[1].lower()]
82 |
83 | base += datetime.timedelta(**{unit: int(match[0])})
84 |
85 | return base
86 |
87 |
88 | def validate_time_span(lifetime: str):
89 | """
90 | Function validating lifetime syntax
91 | :param lifetime:
92 | :return:
93 | """
94 | try:
95 | convert_lifetime(lifetime)
96 | return True
97 | except exceptions.PublishingException:
98 | return False
99 |
100 |
101 | def validate_url(url):
102 | """
103 | Attribution goes to Django project.
104 |
105 | :param url:
106 | :return:
107 | """
108 | regex = re.compile(
109 | r'^(?:http)s?://'
110 | r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
111 | r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
112 | r'(?::\d+)?' # optional port
113 | r'(?:/?|[/?]\S+)$', re.IGNORECASE)
114 |
115 | return re.match(regex, url) is not None
116 |
117 |
118 | def validate_repo(url: str) -> bool:
119 | """
120 | Validate Git repository which is supposed to be placed on passed URL.
121 | Validated are two points: validity of the URL and being able to access the Git repo.
122 |
123 | Checking accessibility of the repo is done using `git ls-remote`, hence the repo must not be protected with
124 | password.
125 |
126 | :param url:
127 | :return:
128 | """
129 | if not validate_url(url):
130 | return False
131 |
132 | result = subprocess.run('git -c core.askpass=\'echo\' ls-remote ' + url, shell=True, capture_output=True)
133 | if result.returncode != 0:
134 | logger.error(f'Error while fetching Git\'s remote refs! {result.stderr.decode("utf-8")}')
135 |
136 | return result.returncode == 0
137 |
138 |
139 | def validate_branch(git_url: str, name: str) -> bool:
140 | """
141 | Validate that branch name exists in the Git repository defined by git_url.
142 |
143 | :param git_url:
144 | :param name:
145 | :return:
146 | """
147 | if name == DEFAULT_BRANCH_PLACEHOLDER:
148 | return True
149 |
150 | result = subprocess.run('git -c core.askpass=\'echo\' ls-remote ' + git_url, shell=True, capture_output=True)
151 | if result.returncode != 0:
152 | raise exceptions.RepoException(f'Error while fetching Git\'s remote refs! {result.stderr.decode("utf-8")}')
153 |
154 | refs_list = result.stdout.decode("utf-8").split('\n')
155 | regex = re.compile(r'refs/heads/(.*)')
156 |
157 | for entry in refs_list:
158 | match = regex.search(entry)
159 |
160 | if match is not None and match.group(1) == name:
161 | return True
162 |
163 | return False
164 |
165 |
166 | def get_default_branch(gir_url: str) -> str:
167 | """
168 | Returns the default branch for Git repo
169 | :param gir_url:
170 | :return:
171 | """
172 | result = subprocess.run(f'git -c core.askpass=\'echo\' ls-remote --symref {gir_url} HEAD',
173 | shell=True, capture_output=True)
174 | if result.returncode != 0:
175 | raise exceptions.RepoException(f'Error while fetching Git\'s remote refs! {result.stderr.decode("utf-8")}')
176 |
177 | refs_list = result.stdout.decode("utf-8")
178 | match = re.findall(r'refs/heads/([\w_-]*)\t', refs_list, re.MULTILINE)
179 |
180 | if len(match) != 1:
181 | raise exceptions.RepoException('We can\'t determine which is the default branch, please specify it manually!')
182 |
183 | return match[0]
184 |
185 |
186 | def is_github_url(url: str) -> bool:
187 | """
188 | Validate if passed URL is GitHub's url.
189 |
190 | :param url:
191 | :return:
192 | """
193 | return 'github' in url.lower()
194 |
195 |
196 | def get_repo_class(url: str) -> repo_class:
197 | """
198 | For Git repo's URL it returns appropriate class that should represents the repo.
199 | :param url:
200 | :return:
201 | """
202 | if is_github_url(url):
203 | return GithubRepo
204 |
205 | return GenericRepo
206 |
207 |
208 | def bootstrap_repo(config: config_module.Config, git_repo_url=None, **kwargs) -> repo_instance:
209 | """
210 | Initiate the interactive bootstrap process of creating new Repo's instance
211 |
212 | :param config:
213 | :param git_repo_url:
214 | :param kwargs:
215 | :return:
216 | """
217 | if git_repo_url is None:
218 | git_repo_url = inquirer.shortcuts.text('Git URL of the repo', validate=lambda _, x: validate_repo(x))
219 |
220 | if is_github_url(git_repo_url):
221 | return GithubRepo.bootstrap_repo(config, git_repo_url=git_repo_url, **kwargs)
222 |
223 | return GenericRepo.bootstrap_repo(config, git_repo_url=git_repo_url, **kwargs)
224 |
225 |
226 | class GenericRepo(cloudflare.CloudFlareMixin):
227 | """
228 | Generic Repo's class that represent and store all information about Git repository that can be placed on any
229 | Git's provider.
230 |
231 | It allows to publish repo's content to IPFS and IPNS.
232 | """
233 |
234 | _TOML_MAPPING = {
235 | 'name': None,
236 | 'git_repo_url': None,
237 | 'branch': None,
238 | 'secret': None,
239 | 'publish_dir': None,
240 | 'last_ipfs_addr': None,
241 | 'pin': None,
242 | 'build_bin': 'execute',
243 | 'after_publish_bin': 'execute',
244 | 'republish': 'ipns',
245 | 'ipns_key': 'ipns',
246 | 'ipns_addr': 'ipns',
247 | 'ipns_lifetime': 'ipns',
248 | 'zone_id': 'cloudflare',
249 | 'dns_id': 'cloudflare'
250 | }
251 | """
252 | Mapping that maps the repo's properties into TOML's config sections.
253 | """
254 |
255 | name: str = None
256 | """
257 | Defines name of repo under which it will be represented in the IPFS Publish name/configuration etc.
258 | """
259 |
260 | git_repo_url: str = None
261 | """
262 | Defines where the Git repo is placed and from where it will be clonned.
263 | """
264 |
265 | branch: typing.Optional[str] = None
266 | """
267 | Defines what branch should be checked out.
268 | """
269 |
270 | secret: str = None
271 | """
272 | Defines random string secret, that is used to secure the webhook calls from attacker who would try to triger publish
273 | events on its own.
274 | """
275 |
276 | ipns_addr: str = ''
277 | """
278 | IPNS address in format "/ipns/", that defines the address where the repo is published.
279 | """
280 |
281 | ipns_key: str = ''
282 | """
283 | Defines name of the key that will be used for publishing IPNS record
284 | """
285 |
286 | ipns_lifetime: str = '24h'
287 | """
288 | Defines the lifetime of IPNS entries
289 | """
290 |
291 | pin: bool = True
292 | """
293 | Defines if the published content is pinned to the IPFS node
294 | """
295 |
296 | last_ipfs_addr: typing.Optional[str] = None
297 | """
298 | Stores the last IPFS address of the published address in format "/ipfs//"
299 | """
300 |
301 | publish_dir: str = '/'
302 | """
303 | Defines a path inside the repo that will be published. Default is the root of the repo.
304 | """
305 |
306 | build_bin: typing.Optional[str] = None
307 | """
308 | Binary that is invoked prior the publishing to IPFS.
309 | """
310 |
311 | after_publish_bin: typing.Optional[str] = None
312 | """
313 | Binary that is invoked after the content of the repo is published to IPFS. The binary gets as argument
314 | the IPFS address that it was published under.
315 | """
316 |
317 | def __init__(self, config: config_module.Config, name: str, git_repo_url: str, secret: str,
318 | branch: typing.Optional[str] = None,
319 | ipns_addr: typing.Optional[str] = None, ipns_key: typing.Optional[str] = None, ipns_lifetime='24h',
320 | republish=False, pin=True, last_ipfs_addr=None, publish_dir: str = '/',
321 | build_bin=None, after_publish_bin=None, ipns_ttl='15m', **kwargs):
322 | self.name = name
323 | self.git_repo_url = git_repo_url
324 | self.branch = branch
325 | self.secret = secret
326 | self.config = config
327 |
328 | # IPFS setting
329 | self.pin = pin
330 | self.republish = republish
331 | self.ipns_key = ipns_key
332 | self.last_ipfs_addr = last_ipfs_addr
333 | self.ipns_lifetime = ipns_lifetime
334 | self.ipns_addr = ipns_addr
335 | self.ipns_ttl = ipns_ttl
336 |
337 | # Build etc. setting
338 | self.publish_dir = publish_dir
339 | self.build_bin = build_bin
340 | self.after_publish_bin = after_publish_bin
341 |
342 | super().__init__(**kwargs)
343 |
344 | @property
345 | def webhook_url(self) -> str:
346 | """
347 | Returns URL with FQDN for the webhook invocation.
348 | :return:
349 | """
350 | return f'{self.config.webhook_base}/publish/{self.name}?secret={self.secret}'
351 |
352 | def _run_bin(self, cwd: pathlib.Path, cmd: str, *args):
353 | """
354 | Execute binary with arguments in specified directory.
355 |
356 | :param cwd: Directory in which the binary will be invoked
357 | :param cmd: Binary definition invoked with shell
358 | :param args:
359 | :raises exceptions.RepoException: If the binary exited with non-zero status
360 | :return:
361 | """
362 | os.chdir(str(cwd))
363 | full_cmd = f'{cmd} {" ".join(args)}'
364 | logger.info(f'Running shell command "{full_cmd}" with cwd={cwd}')
365 |
366 | r = subprocess.run(full_cmd, shell=True, capture_output=True)
367 |
368 | if r.returncode != 0:
369 | r.stderr and logger.debug(f'STDERR: {r.stderr.decode("utf-8")}')
370 | r.stdout and logger.debug(f'STDOUT: {r.stdout.decode("utf-8")}')
371 | raise exceptions.RepoException(f'\'{cmd}\' binary exited with non-zero code!')
372 |
373 | def publish_repo(self) -> None:
374 | """
375 | Main method that handles publishing of the repo to IPFS.
376 |
377 | :return:
378 | """
379 | path = self._clone_repo()
380 |
381 | if self.build_bin:
382 | self._run_bin(path, self.build_bin)
383 |
384 | self._remove_ignored_files(path)
385 |
386 | ipfs = self.config.ipfs
387 | if not self.config['keep_pinned_previous_versions'] and self.last_ipfs_addr is not None:
388 | logger.info(f'Unpinning hash: {self.last_ipfs_addr}')
389 | ipfs.pin.rm(self.last_ipfs_addr)
390 |
391 | publish_dir = path / (self.publish_dir[1:] if self.publish_dir.startswith('/') else self.publish_dir)
392 | logger.info(f'Adding directory {publish_dir} to IPFS')
393 | result = ipfs.add(publish_dir, recursive=True, pin=self.pin)
394 | cid = f'/ipfs/{result[-1]["Hash"]}/'
395 | self.last_ipfs_addr = cid
396 | logger.info(f'Repo successfully added to IPFS with hash: {cid}')
397 |
398 | if self.ipns_key is not None:
399 | self.publish_name(cid)
400 |
401 | try:
402 | self.update_dns(cid)
403 | except exceptions.ConfigException:
404 | pass
405 |
406 | if self.after_publish_bin:
407 | self._run_bin(path, self.after_publish_bin, cid)
408 |
409 | self._cleanup_repo(path)
410 |
411 | def publish_name(self, cid) -> None:
412 | """
413 | Main method that handles publishing of the IPFS addr into IPNS.
414 | :return:
415 | """
416 | if cid is None:
417 | return
418 |
419 | logger.info('Updating IPNS name')
420 | ipfs = self.config.ipfs
421 | ipfs.name.publish(cid, key=self.ipns_key, ttl=self.ipns_ttl)
422 | logger.info('IPNS successfully published')
423 |
424 | def _clone_repo(self) -> pathlib.Path:
425 | """
426 | Method that will clone the repo defined by git_repo_url into temporary directory and returns the path.
427 | :return: Path to the root of the cloned repo
428 | """
429 | path = tempfile.mkdtemp()
430 | logger.info(f'Cloning repo: \'{self.git_repo_url}\' to {path}')
431 |
432 | if self.branch:
433 | git.Repo.clone_from(self.git_repo_url, path, branch=self.branch)
434 | else:
435 | git.Repo.clone_from(self.git_repo_url, path)
436 |
437 | return pathlib.Path(path).resolve()
438 |
439 | def _remove_ignored_files(self, path: pathlib.Path):
440 | """
441 | Reads the ignore file and removes the ignored files based on glob from the directory and all subdirectories.
442 | Also removes the ignore file itself and .git folder.
443 |
444 | :param path:
445 | :return:
446 | """
447 | shutil.rmtree(path / '.git')
448 | ignore_file = path / PUBLISH_IGNORE_FILENAME
449 |
450 | if not ignore_file.exists():
451 | return
452 |
453 | entries = ignore_file.read_text()
454 | for entry in entries.split('\n'):
455 | self._remove_glob(path, entry)
456 |
457 | ignore_file.unlink()
458 |
459 | def _remove_glob(self, path: pathlib.Path, glob: str):
460 | """
461 | Removes all files from path that matches the glob string.
462 |
463 | :param path:
464 | :param glob:
465 | :return:
466 | """
467 | for path_to_delete in path.glob(glob):
468 | path_to_delete = path_to_delete.resolve()
469 | if not path_to_delete.exists():
470 | continue
471 |
472 | if path not in path_to_delete.parents:
473 | raise exceptions.RepoException(
474 | f'Trying to delete file outside the repo temporary directory! {path_to_delete}')
475 |
476 | if path_to_delete.is_file():
477 | path_to_delete.unlink()
478 | else:
479 | shutil.rmtree(str(path_to_delete))
480 |
481 | @staticmethod
482 | def _cleanup_repo(path):
483 | """
484 | Removes the cloned repo from path.
485 |
486 | :param path:
487 | :return:
488 | """
489 | logging.info(f'Cleaning up path: {path}')
490 | shutil.rmtree(path)
491 |
492 | def to_toml_dict(self) -> dict:
493 | """
494 | Serialize the instance into dictionary that is saved to TOML config.
495 | :return:
496 | """
497 | out = {}
498 | for attr, section in self._TOML_MAPPING.items():
499 | value = getattr(self, attr, None)
500 |
501 | if section is None:
502 | if value is not None:
503 | out[attr] = value
504 | else:
505 | if section not in out:
506 | out[section] = {}
507 |
508 | if value is not None:
509 | out[section][attr] = value
510 |
511 | return out
512 |
513 | @classmethod
514 | def from_toml_dict(cls, data: dict, config: config_module.Config) -> 'GenericRepo':
515 | """
516 | Deserialize the passed data dict of TOML config into instance
517 | :param data:
518 | :param config:
519 | :return:
520 | """
521 |
522 | try:
523 | return cls(config=config, **helpers.flatten(data))
524 | except TypeError:
525 | raise exceptions.RepoException('Passed repo\'s data are not valid for creating valid Repo instance!')
526 |
527 | @classmethod
528 | def bootstrap_property(cls, name: str, category: str, message: str, value: typing.Any = None,
529 | default: typing.Any = None,
530 | validate: typing.Callable = None):
531 | if value is not None:
532 | if validate is not None and not validate(None, value):
533 | raise exceptions.RepoException(f'Invalid {name}: {value}!')
534 |
535 | return value
536 |
537 | return getattr(inquirer.shortcuts, category)(message, validate=validate, default=default)
538 |
539 | @classmethod
540 | def bootstrap_repo(cls, config: config_module.Config, name=None, git_repo_url=None, branch=None, secret=None,
541 | ipns_key=None, ipns_lifetime=None, pin=None, republish=None, after_publish_bin=None,
542 | build_bin=None, publish_dir: typing.Optional[str] = None, ipns_ttl=None) -> 'GenericRepo':
543 | """
544 | Method that interactively bootstraps the repository by asking interactive questions.
545 |
546 | :param ipns_ttl:
547 | :param config:
548 | :param name:
549 | :param git_repo_url:
550 | :param branch:
551 | :param secret:
552 | :param ipns_key:
553 | :param ipns_lifetime:
554 | :param pin:
555 | :param republish:
556 | :param after_publish_bin:
557 | :param build_bin:
558 | :param publish_dir:
559 | :return:
560 | """
561 |
562 | git_repo_url = cls.bootstrap_property('Git repo URL', 'text', 'Git URL of the repo', git_repo_url,
563 | validate=lambda _, x: validate_repo(x))
564 |
565 | name = cls.bootstrap_property('Name', 'text', 'Name of the new repo', name,
566 | default=get_name_from_url(git_repo_url),
567 | validate=lambda _, x: validate_name(x, config)).lower()
568 |
569 | branch = cls.bootstrap_property('Branch name', 'text', 'Which branch name should be build?', branch,
570 | default=DEFAULT_BRANCH_PLACEHOLDER,
571 | validate=lambda _, x: validate_branch(git_repo_url, x))
572 | if branch == DEFAULT_BRANCH_PLACEHOLDER:
573 | branch = get_default_branch(git_repo_url)
574 |
575 | ipns_key, ipns_addr = bootstrap_ipns(config, name, ipns_key)
576 | zone_id, dns_id = cloudflare.bootstrap_cloudflare()
577 |
578 | if secret is None:
579 | secret = ''.join(
580 | secrets.choice(string.ascii_uppercase + string.digits) for _ in range(DEFAULT_LENGTH_OF_SECRET))
581 |
582 | pin = cls.bootstrap_property('Pin flag', 'confirm', 'Do you want to pin the published IPFS objects?', pin,
583 | default=True)
584 |
585 | if build_bin is None:
586 | build_bin = inquirer.shortcuts.text('Path to build binary, if you want to do some pre-processing '
587 | 'before publishing', default='')
588 |
589 | if after_publish_bin is None:
590 | after_publish_bin = inquirer.shortcuts.text('Path to after-publish binary, if you want to do some '
591 | 'actions after publishing', default='')
592 |
593 | if publish_dir is None:
594 | publish_dir = inquirer.shortcuts.text('Directory to be published inside the repo. Path related to the root '
595 | 'of the repo', default='/')
596 |
597 | ipns_lifetime = ipns_lifetime or '24h'
598 | if not validate_time_span(ipns_lifetime):
599 | raise exceptions.RepoException('Passed lifetime is not valid! Supported units are: h(our), m(inute), '
600 | 's(seconds)!')
601 |
602 | ipns_ttl = ipns_ttl or '15m'
603 | if not validate_time_span(ipns_ttl):
604 | raise exceptions.RepoException('Passed ttl is not valid! Supported units are: h(our), m(inute), '
605 | 's(seconds)!')
606 |
607 | if ipns_key is None and after_publish_bin is None and zone_id is None:
608 | raise exceptions.RepoException(
609 | 'You have choose not to use IPNS, not modify DNSLink entry on CloudFlare and you also have not '
610 | 'specified any after publish command. This does not make sense! What do you want to do '
611 | 'with this setting?! I have no idea, so aborting!')
612 |
613 | return cls(config=config, name=name, git_repo_url=git_repo_url, branch=branch, secret=secret, pin=pin,
614 | publish_dir=publish_dir,
615 | ipns_key=ipns_key, ipns_addr=ipns_addr, build_bin=build_bin, after_publish_bin=after_publish_bin,
616 | republish=republish, ipns_lifetime=ipns_lifetime, ipns_ttl=ipns_ttl, dns_id=dns_id,
617 | zone_id=zone_id)
618 |
619 |
620 | def bootstrap_ipns(config: config_module.Config, name: str, ipns_key: str = None) -> typing.Tuple[str, str]:
621 | """
622 | Functions that handle bootstraping of IPNS informations.
623 |
624 | :param config:
625 | :param name:
626 | :param ipns_key:
627 | :return:
628 | """
629 |
630 | ipns_addr = None
631 | if ipns_key is None:
632 | wanna_ipns = inquirer.shortcuts.confirm('Do you want to publish to IPNS?', default=True)
633 |
634 | if wanna_ipns:
635 | ipns_key = f'{IPNS_KEYS_NAME_PREFIX}_{name}'
636 |
637 | try:
638 | out = config.ipfs.key.gen(ipns_key, IPNS_KEYS_TYPE)
639 | except ipfshttpclient.exceptions.Error:
640 | use_existing = inquirer.shortcuts.confirm(f'There is already IPNS key with name \'{ipns_key}\', '
641 | f'do you want to use it?', default=True)
642 |
643 | if use_existing:
644 | keys = config.ipfs.key.list()
645 | out = next((x for x in keys['Keys'] if x['Name'] == ipns_key), None)
646 |
647 | if out is None:
648 | raise exceptions.RepoException('We were not able to generate or fetch the IPNS key')
649 | else:
650 | while True:
651 | ipns_key = inquirer.shortcuts.text('Then please provide non-existing name for the IPNS key')
652 |
653 | try:
654 | out = config.ipfs.key.gen(ipns_key, IPNS_KEYS_TYPE)
655 | break
656 | except ipfshttpclient.exceptions.Error:
657 | click.echo('There is already existing key with this name!')
658 | continue
659 |
660 | ipns_addr = f'/ipns/{out["Id"]}/'
661 | else:
662 | keys = config.ipfs.key.list()
663 | key_object = next((x for x in keys['Keys'] if x['Name'] == ipns_key), None)
664 | if key_object is None:
665 | logger.info('The passed IPNS key name \'{}\' was not found, generating new key with this name')
666 | key_object = config.ipfs.key.gen(ipns_key, IPNS_KEYS_TYPE)
667 |
668 | ipns_addr = f'/ipns/{key_object["Id"]}/'
669 |
670 | return ipns_key, ipns_addr
671 |
672 |
673 | class GithubRepo(GenericRepo):
674 | """
675 | Special case of Repo specific to GitHub hosted repos.
676 | """
677 |
678 | def __init__(self, git_repo_url, **kwargs):
679 | if not is_github_url(git_repo_url):
680 | raise exceptions.RepoException('The passed Git repo URL is not related to GitHub!')
681 |
682 | super().__init__(git_repo_url=git_repo_url, **kwargs)
683 |
684 | @property
685 | def webhook_url(self):
686 | return f'{self.config.webhook_base}/publish/{self.name}'
687 |
--------------------------------------------------------------------------------