├── ipgit ├── __init__.py ├── git.py └── app.py ├── requirements.txt ├── .dockerignore ├── heroku.yml ├── .github ├── dependabot.yml └── workflows │ └── build.yml ├── Dockerfile ├── sender.py ├── README.md └── .gitignore /ipgit/__init__.py: -------------------------------------------------------------------------------- 1 | from . app import app # noqa 2 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | fastapi==0.116.2 2 | uvicorn==0.31.1 3 | pfluent==0.0.1 4 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .git 2 | __pycache__ 3 | *.pyc 4 | *.pyo 5 | *.pyd 6 | .Python 7 | venv 8 | -------------------------------------------------------------------------------- /heroku.yml: -------------------------------------------------------------------------------- 1 | build: 2 | docker: 3 | web: Dockerfile 4 | 5 | run: 6 | web: ipfs daemon --init & sleep 5 && uvicorn ipgit:app --host 0.0.0.0 --port $PORT 7 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: pip 4 | directory: './' 5 | schedule: 6 | interval: daily 7 | - package-ecosystem: docker 8 | directory: './' 9 | schedule: 10 | interval: daily 11 | - package-ecosystem: github-actions 12 | directory: './' 13 | schedule: 14 | interval: daily 15 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ipfs/go-ipfs:latest as ipfs 2 | FROM python:3.12.4-slim 3 | 4 | WORKDIR /app 5 | 6 | COPY --from=ipfs /usr/local/bin/ipfs /bin/ipfs 7 | 8 | RUN apt update \ 9 | && apt install -y git \ 10 | && rm -rf /var/lib/apt/lists/* 11 | 12 | ENV PORT=8000 13 | 14 | COPY ./requirements.txt . 15 | RUN pip install -r requirements.txt 16 | 17 | COPY ./ ./ 18 | 19 | CMD ipfs daemon --init \ 20 | & uvicorn ipgit:app --host 0.0.0.0 --port ${PORT} 21 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: build 2 | 3 | on: 4 | - push 5 | - pull_request 6 | 7 | jobs: 8 | build: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v4 12 | - uses: actions/setup-python@v5 13 | with: 14 | python-version: '3.9' 15 | cache: 'pip' 16 | - run: pip install -r requirements.txt 17 | 18 | flake: 19 | runs-on: ubuntu-latest 20 | steps: 21 | - uses: actions/checkout@v4 22 | - uses: actions/setup-python@v5 23 | - run: pip install flake8 24 | - run: flake8 --statistics ipgit/ 25 | 26 | image: 27 | runs-on: ubuntu-latest 28 | steps: 29 | - uses: actions/checkout@v4 30 | - run: docker build . -t ipgit 31 | - name: Run container 32 | run: | 33 | docker run --rm -itd -p 8000:8000 ipgit 34 | sleep 10 35 | docker ps -a 36 | - name: Test container 37 | run: | 38 | git clone https://github.com/ipfs/go-ipfs.git 39 | cd go-ipfs 40 | git remote add local http://localhost:8000/go-ipfs 41 | git push local master 42 | 43 | deploy: 44 | if: github.ref == 'refs/heads/master' 45 | runs-on: ubuntu-latest 46 | needs: 47 | - image 48 | steps: 49 | - uses: actions/checkout@v4 50 | - uses: akhileshns/heroku-deploy@v4 51 | with: 52 | heroku_app_name: ipgit 53 | heroku_email: ${{ secrets.HEROKU_EMAIL }} 54 | heroku_api_key: ${{ secrets.HEROKU_API_KEY }} 55 | usedocker: true 56 | -------------------------------------------------------------------------------- /ipgit/git.py: -------------------------------------------------------------------------------- 1 | import io 2 | import stat 3 | from typing import IO 4 | from subprocess import PIPE 5 | from pathlib import Path 6 | 7 | from pfluent import Runner 8 | 9 | 10 | class Git(object): 11 | def __init__(self, path: str): 12 | super(Git, self).__init__() 13 | self.path = Path(path) 14 | 15 | @staticmethod 16 | def init(path: str) -> 'Git': 17 | Runner('git')\ 18 | .arg('init')\ 19 | .arg('--bare')\ 20 | .arg(path)\ 21 | .run(check=True) 22 | return Git(path) 23 | 24 | def add_hook(self, name: str, hook: str) -> str: 25 | path = Path(self.path, 'hooks', name) 26 | path.write_text(hook) 27 | st = path.stat() 28 | path.chmod(st.st_mode | stat.S_IEXEC) 29 | return str(path) 30 | 31 | def inforefs(self, service: str) -> IO: 32 | result = Runner(service)\ 33 | .arg('--stateless-rpc')\ 34 | .arg('--advertise-refs')\ 35 | .arg(self.path)\ 36 | .run(check=True, capture_output=True) 37 | 38 | # Adapted from: 39 | # https://github.com/schacon/grack/blob/master/lib/grack.rb 40 | data = b'# service=' + service.encode() 41 | datalen = len(data) + 4 42 | datalen = b'%04x' % datalen 43 | data = datalen + data + b'0000' + result.stdout 44 | 45 | return io.BytesIO(data) 46 | 47 | def service(self, service: str, data: bytes) -> IO: 48 | proc = Runner(service)\ 49 | .arg('--stateless-rpc')\ 50 | .arg(self.path)\ 51 | .popen(stdin=PIPE, stdout=PIPE) 52 | 53 | try: 54 | data, _ = proc.communicate(data) 55 | finally: 56 | proc.wait() 57 | 58 | return io.BytesIO(data) 59 | -------------------------------------------------------------------------------- /ipgit/app.py: -------------------------------------------------------------------------------- 1 | import tempfile 2 | from enum import Enum 3 | from pathlib import Path 4 | 5 | from pfluent import Runner 6 | from fastapi import FastAPI 7 | from starlette.requests import Request 8 | from starlette.responses import StreamingResponse 9 | 10 | from ipgit.git import Git 11 | 12 | TEMPDIR = tempfile.TemporaryDirectory() 13 | 14 | app = FastAPI() 15 | 16 | 17 | class Service(Enum): 18 | receive = 'git-receive-pack' 19 | upload = 'git-upload-pack' 20 | 21 | 22 | @app.get('/Qm{qmhash}/info/refs') 23 | async def ipfsinforefs(qmhash: str, service: Service): 24 | qmhash = f'Qm{qmhash}' 25 | path = Path(TEMPDIR.name, qmhash) 26 | repo = Git(path) 27 | 28 | Runner('ipfs')\ 29 | .arg('get', qmhash)\ 30 | .arg('--output', path)\ 31 | .run(check=True) 32 | 33 | data = repo.inforefs(service.value) 34 | media = f'application/x-{service.value}-advertisement' 35 | return StreamingResponse(data, media_type=media) 36 | 37 | 38 | @app.get('/{path}/info/refs') 39 | async def inforefs(path: str, service: Service): 40 | path = Path(TEMPDIR.name, path) 41 | repo = Git(path) if path.exists() else Git.init(path) 42 | 43 | hook = r''' 44 | #!/bin/sh 45 | echo "IPFS hash:" 46 | ipfs add --recursive --quieter --pin $PWD 47 | ''' 48 | repo.add_hook('post-receive', hook) 49 | 50 | data = repo.inforefs(service.value) 51 | media = f'application/x-{service.value}-advertisement' 52 | return StreamingResponse(data, media_type=media) 53 | 54 | 55 | @app.post('/{path}/{service}') 56 | async def service(path: str, service: Service, req: Request): 57 | path = Path(TEMPDIR.name, path) 58 | repo = Git(path) 59 | 60 | stream = req.stream() 61 | data = [data async for data in stream] 62 | data = b''.join(data) 63 | 64 | data = repo.service(service.value, data) 65 | media = f'application/x-{service.value}-result' 66 | return StreamingResponse(data, media_type=media) 67 | -------------------------------------------------------------------------------- /sender.py: -------------------------------------------------------------------------------- 1 | import io 2 | import json 3 | import base64 4 | from typing import IO 5 | from functools import lru_cache 6 | 7 | import httpx 8 | from git import Repo 9 | 10 | 11 | class Sender(object): 12 | def __init__(self, path: str, url: str = None): 13 | super(Sender, self).__init__() 14 | self.repo = Repo(path) 15 | self.url = url or 'http://localhost:5001/api/v0' 16 | self.client = httpx.Client(base_url=self.url) 17 | 18 | def _add(self, data: IO) -> str: 19 | file = {'file': data} 20 | response = self.client.post('/add', files=file) 21 | response = response.json() 22 | return {'/': response['Hash']} 23 | 24 | def _put(self, data: dict) -> str: 25 | data = json.dumps(data).encode() 26 | data = io.BytesIO(data) 27 | file = {'file': data} 28 | response = self.client.post('/dag/put', files=file) 29 | response = response.json() 30 | return response['Cid'] 31 | 32 | @lru_cache(maxsize=None) 33 | def _send_blob(self, blob: object) -> dict: 34 | data = blob.data_stream.read() 35 | data = io.BytesIO(data) 36 | return self._add(data) 37 | 38 | def _send_tree(self, tree: object) -> dict: 39 | if tree.type == 'blob': 40 | return self._send_blob(tree) 41 | data = {b.name: self._send_blob(b) for b in tree.blobs} 42 | return self._put(data) 43 | 44 | def _send_commit(self, commit: object) -> dict: 45 | parents = ((p.hexsha, p) for p in commit.parents) 46 | parents = {s: self._send_commit(p) for s, p in parents} 47 | tree = {t.name: self._send_tree(t) for t in commit.tree} 48 | 49 | node = { 50 | 'author': { 51 | 'date': commit.authored_datetime.isoformat(), 52 | 'email': commit.author.email, 53 | 'name': commit.author.name, 54 | }, 55 | 'committer': { 56 | 'date': commit.committed_datetime.isoformat(), 57 | 'email': commit.committer.email, 58 | 'name': commit.committer.name, 59 | }, 60 | 'tree': tree, 61 | 'message': commit.message, 62 | 'parents': parents, 63 | } 64 | 65 | return self._put(node) 66 | 67 | def _send_branch(self, branch: object) -> dict: 68 | commits = self.repo.iter_commits(branch.name) 69 | commits = {c.hexsha: self._send_commit(c) for c in commits} 70 | return self._put(commits) 71 | 72 | def send(self): 73 | branches = {b.name: self._send_branch(b) for b in self.repo.branches} 74 | branches = {'blobs': branches} 75 | return self._put(branches) 76 | 77 | 78 | sender = Sender('./') 79 | sent = sender.send() 80 | 81 | from pprint import pprint as pp 82 | pp(sent) 83 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Inter Planetary GIT 2 | 3 | [![build](https://github.com/meyer1994/ipgit/actions/workflows/build.yml/badge.svg)](https://github.com/meyer1994/ipgit/actions/workflows/build.yml) 4 | [![standard-readme compliant](https://img.shields.io/badge/readme%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/RichardLitt/standard-readme) 5 | 6 | Because it is possible! 7 | 8 | ## Table of Contents 9 | 10 | - [About](#about) 11 | - [Install](#install) 12 | - [Usage](#usage) 13 | - [Thanks](#thanks) 14 | 15 | ## About 16 | 17 | I think web 3.0 future is promising. I like the idea of a distributed web. As a 18 | consequence, I like IPFS. It is such a cool concept. However, in my opinion, 19 | most people do not use it because it is hard to do so. Go? Running a Docker 20 | container in my machine? What? Can't I just use it as a simple tool or service? 21 | 22 | **Yes, you can.** 23 | 24 | This is why I created this, extremely simple and limited, web application. It 25 | is just an endpoint. A git remote, for the tech savy. You can configure with 26 | one bash command and just use it. See the (small) example below: 27 | 28 | ```sh 29 | $ git remote add ipfs http://ipgit.herokuapp.com/ 30 | $ git push ipfs 31 | remote: Resolving deltas: 100% (53/53), done. 32 | remote: IPFS hash: 33 | remote: QmU8wwg65D2MpbumSKPTWUhydmAin5jmXbwNhxUWzyeXs1 34 | ``` 35 | 36 | It works the other way around as well. If you want, you can use it to clone git 37 | repositories that are stored in IPFS. 38 | 39 | ```sh 40 | $ git clone https://ipgit.herokuapp.com/QmZUnAU4Vn7DvDHEnJ1dz2uV2dimf79HNXdffgY9MbQGWP 41 | Cloning into 'QmZUnAU4Vn7DvDHEnJ1dz2uV2dimf79HNXdffgY9MbQGWP'... 42 | $ ls QmZUnAU4Vn7DvDHEnJ1dz2uV2dimf79HNXdffgY9MbQGWP/ 43 | Dockerfile Makefile app.py git.py heroku.yml post-update requirements.txt sender.py 44 | ``` 45 | 46 | That is it! No installation, no requirements. Just plain old git. 47 | 48 | Your files will be pinned by default. However, because heroku shuts down the 49 | after some inactivity time, it is not guarantee that your files will be there 50 | when you need them. You should try pinning them into some file pinning service 51 | to avoid loosing it. 52 | 53 | ## Install 54 | 55 | This project uses [fastapi][1] and [uvicorn][2] for server interactions. 56 | [IPFS][3] needs to be running on the local machine for the server to start. 57 | And, obviously, you will need [git][4] installed. 58 | 59 | ```sh 60 | $ pip install -r requirements.txt 61 | ``` 62 | 63 | ## Usage 64 | 65 | To run a local version of this project, just execute: 66 | 67 | ```sh 68 | $ ipfs daemon --init 69 | $ uvicor app:app --reload 70 | $ make local # optional 71 | ``` 72 | 73 | `make local` adds a `local` remote on `http://localhost:8000` for development. 74 | When developing, you can simply test your modification by calling 75 | `git push local`. 76 | 77 | ## Thanks 78 | 79 | This project would not have been possible without the code in the following 80 | repositories. They helped me understand a lot about git http backend. 81 | 82 | - [git_http_backend.py][5] 83 | - [grack][6] 84 | 85 | 86 | [1]: https://fastapi.tiangolo.com/ 87 | [2]: https://uvicorn.org/ 88 | [3]: https://ipfs.io/ 89 | [4]: https://git-scm.com/ 90 | 91 | [5]: https://github.com/dvdotsenko/git_http_backend.py 92 | [6]: https://github.com/schacon/grack 93 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.toptal.com/developers/gitignore/api/git,python,venv,dotenv,linux 3 | # Edit at https://www.toptal.com/developers/gitignore?templates=git,python,venv,dotenv,linux 4 | 5 | ### dotenv ### 6 | .env 7 | 8 | ### Git ### 9 | # Created by git for backups. To disable backups in Git: 10 | # $ git config --global mergetool.keepBackup false 11 | *.orig 12 | 13 | # Created by git when using merge tools for conflicts 14 | *.BACKUP.* 15 | *.BASE.* 16 | *.LOCAL.* 17 | *.REMOTE.* 18 | *_BACKUP_*.txt 19 | *_BASE_*.txt 20 | *_LOCAL_*.txt 21 | *_REMOTE_*.txt 22 | 23 | ### Linux ### 24 | *~ 25 | 26 | # temporary files which can be created if a process still has a handle open of a deleted file 27 | .fuse_hidden* 28 | 29 | # KDE directory preferences 30 | .directory 31 | 32 | # Linux trash folder which might appear on any partition or disk 33 | .Trash-* 34 | 35 | # .nfs files are created when an open file is removed but is still being accessed 36 | .nfs* 37 | 38 | ### Python ### 39 | # Byte-compiled / optimized / DLL files 40 | __pycache__/ 41 | *.py[cod] 42 | *$py.class 43 | 44 | # C extensions 45 | *.so 46 | 47 | # Distribution / packaging 48 | .Python 49 | build/ 50 | develop-eggs/ 51 | dist/ 52 | downloads/ 53 | eggs/ 54 | .eggs/ 55 | lib/ 56 | lib64/ 57 | parts/ 58 | sdist/ 59 | var/ 60 | wheels/ 61 | pip-wheel-metadata/ 62 | share/python-wheels/ 63 | *.egg-info/ 64 | .installed.cfg 65 | *.egg 66 | MANIFEST 67 | 68 | # PyInstaller 69 | # Usually these files are written by a python script from a template 70 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 71 | *.manifest 72 | *.spec 73 | 74 | # Installer logs 75 | pip-log.txt 76 | pip-delete-this-directory.txt 77 | 78 | # Unit test / coverage reports 79 | htmlcov/ 80 | .tox/ 81 | .nox/ 82 | .coverage 83 | .coverage.* 84 | .cache 85 | nosetests.xml 86 | coverage.xml 87 | *.cover 88 | *.py,cover 89 | .hypothesis/ 90 | .pytest_cache/ 91 | pytestdebug.log 92 | 93 | # Translations 94 | *.mo 95 | *.pot 96 | 97 | # Django stuff: 98 | *.log 99 | local_settings.py 100 | db.sqlite3 101 | db.sqlite3-journal 102 | 103 | # Flask stuff: 104 | instance/ 105 | .webassets-cache 106 | 107 | # Scrapy stuff: 108 | .scrapy 109 | 110 | # Sphinx documentation 111 | docs/_build/ 112 | doc/_build/ 113 | 114 | # PyBuilder 115 | target/ 116 | 117 | # Jupyter Notebook 118 | .ipynb_checkpoints 119 | 120 | # IPython 121 | profile_default/ 122 | ipython_config.py 123 | 124 | # pyenv 125 | .python-version 126 | 127 | # pipenv 128 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 129 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 130 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 131 | # install all needed dependencies. 132 | #Pipfile.lock 133 | 134 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 135 | __pypackages__/ 136 | 137 | # Celery stuff 138 | celerybeat-schedule 139 | celerybeat.pid 140 | 141 | # SageMath parsed files 142 | *.sage.py 143 | 144 | # Environments 145 | .venv 146 | env/ 147 | venv/ 148 | ENV/ 149 | env.bak/ 150 | venv.bak/ 151 | pythonenv* 152 | 153 | # Spyder project settings 154 | .spyderproject 155 | .spyproject 156 | 157 | # Rope project settings 158 | .ropeproject 159 | 160 | # mkdocs documentation 161 | /site 162 | 163 | # mypy 164 | .mypy_cache/ 165 | .dmypy.json 166 | dmypy.json 167 | 168 | # Pyre type checker 169 | .pyre/ 170 | 171 | # pytype static type analyzer 172 | .pytype/ 173 | 174 | # profiling data 175 | .prof 176 | 177 | ### venv ### 178 | # Virtualenv 179 | # http://iamzed.com/2009/05/07/a-primer-on-virtualenv/ 180 | [Bb]in 181 | [Ii]nclude 182 | [Ll]ib 183 | [Ll]ib64 184 | [Ll]ocal 185 | [Ss]cripts 186 | pyvenv.cfg 187 | pip-selfcheck.json 188 | 189 | # End of https://www.toptal.com/developers/gitignore/api/git,python,venv,dotenv,linux 190 | --------------------------------------------------------------------------------