├── my_spaces ├── __init__.py ├── templates │ └── Dockerfile └── main.py ├── MANIFEST.in ├── demo.jpeg ├── header.png ├── setup.py ├── README.md └── .gitignore /my_spaces/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include my_spaces/templates/ * 2 | -------------------------------------------------------------------------------- /demo.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FrancescoSaverioZuppichini/my-spaces/HEAD/demo.jpeg -------------------------------------------------------------------------------- /header.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FrancescoSaverioZuppichini/my-spaces/HEAD/header.png -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import find_packages, setup 2 | 3 | setup( 4 | name="my-spaces", 5 | description="Self Host Hugging Face Spaces", 6 | version="0.0.1", 7 | packages=find_packages(include=["my_spaces", "my_spaces.*"]), 8 | include_package_data=True, 9 | install_requires=["jinja2", "typer[all]", "docker"], 10 | entry_points={ 11 | "console_scripts": ["my-spaces=my_spaces.main:main"], 12 | }, 13 | ) 14 | -------------------------------------------------------------------------------- /my_spaces/templates/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM nvcr.io/nvidia/pytorch:22.10-py3 2 | # ARG HUGGING_FACE_HUB_TOKEN 3 | ENV DEBIAN_FRONTEND noninteractive 4 | # gradio and streamlit default ports 5 | EXPOSE 7860 8501 6 | RUN apt update && apt install -y git-lfs ffmpeg libsm6 libxext6 cmake libgl1-mesa-glx \ 7 | && rm -rf /var/lib/apt/lists/* 8 | RUN git lfs install 9 | WORKDIR /home/user 10 | WORKDIR /home/user/app 11 | # we will reinstall pillow using pillow-smid, for better performances 12 | RUN pip uninstall -y pillow \ 13 | && pip install -U --force-reinstall pillow-simd 14 | RUN pip install "protobuf<4" "click<8.1" gradio datasets huggingface_hub ftfy GitPython 15 | # clone user stuff 16 | RUN git clone {{ repo_url }} . 17 | RUN if [ -f "requirements.txt" ]; then pip install -r requirements.txt; fi; 18 | RUN if [ -f "packages.txt" ]; then apt-get install $(grep -vE "^\s*#" packages.txt | tr "\n" " "); fi; 19 | # some space had this error 20 | # https://stackoverflow.com/questions/72706073/attributeerror-partially-initialized-module-cv2-has-no-attribute-gapi-wip-gs 21 | # so we need to downgrade opencv 22 | RUN pip uninstall -y opencv-python \ 23 | && pip install opencv-python==4.5.5.64 24 | # if hf token was passed 25 | # run the app once for the initial setup 26 | # RUN if [ "$HUGGING_FACE_HUB_TOKEN" ]; then python app.py; fi 27 | ENTRYPOINT ["python", "app.py"] -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # My Spaces: self hosting 🤗 spaces 2 | 3 | ![alt](header.png) 4 | 5 | My Spaces allows you to quickly self-host almost any [hugging face space](https://huggingface.co/spaces) wherever you want! 6 | 7 | Under the hood we are using docker and [nvidia pytorch containers](https://catalog.ngc.nvidia.com/orgs/nvidia/containers/pytorch) 8 | 9 | Since hugging face doesn't distribute the docker image used in their spaces, a lot of spaces won't work due to broken links or stuff like that. 10 | 11 | Not all space works, this is because I have no idea about which version of some package hugging face is using (e.g. `ffmepg`). If you are from hugging face, feel free to comment on this [issue](https://github.com/FrancescoSaverioZuppichini/my-spaces/issues/1) 12 | 13 | This project aims to provide transparent and real open machine learning demo to the people. If something doesn't work please feel free to open an issue :) 14 | 15 | Contributions are welcomed! 16 | 17 | ## Getting Started 18 | 19 | You can also install the latest version on GitHub 20 | 21 | ```bash 22 | pip install git+https://github.com/FrancescoSaverioZuppichini/my-spaces 23 | ``` 24 | 25 | TODO 26 | 27 | ## Run a space! 28 | 29 | You need to get the hub token 30 | 31 | ### From a git repo url 32 | ```bash 33 | export HUGGING_FACE_HUB_TOKEN= 34 | my-spaces run https://huggingface.co/spaces/Francesco/stable-diffusion 35 | ``` 36 | Output 37 | ``` 38 | INFO:root:🚀 Running ... 39 | INFO:root:🐋 Log from container: 40 | Downloading: 100%|██████████| 543/543 [00:00<00:00, 294kB/s]B/s] 41 | Downloading: 100%|██████████| 342/342 [00:00<00:00, 277kB/s] ?it/s] 42 | Downloading: 100%|██████████| 4.63k/4.63k [00:00<00:00, 3.51MB/s] 1.20s/it] 43 | ``` 44 | After a while, open up `http://localhost:7860/` if it was a gradio app, or `http://localhost:8501/` if it was a stremlit app 45 | 46 | ![alt](demo.jpeg) 47 | 48 | **The generated Dockerfile will be inside `~/.my-spaces/dockerfiles`** 49 | 50 | 51 | For each space, we create an docker image, build and run a container 52 | 53 | All images use the `my-space` name and the repo name as `tag`. For example, using following space (that doesn't work on hugging face space gods only know why) `https://huggingface.co/spaces/Francesco/stable-diffusion` we will create an image named `my-spaces:stable-diffusion`. 54 | 55 | ### From Docker hub 56 | 57 | I've personally [built and distributed the following images](https://hub.docker.com/repository/docker/zuppif/my-spaces), you run them by 58 | 59 | ``` 60 | export HUGGING_FACE_HUB_TOKEN= 61 | my-spaces run zuppif/my-spaces:stable-diffusion 62 | ``` 63 | 64 | ## Q&A 65 | **Works on Windows?** 66 | No idea, please try and report back :) -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | my-spaces/ 2 | # Byte-compiled / optimized / DLL files 3 | __pycache__/ 4 | *.py[cod] 5 | *$py.class 6 | 7 | # C extensions 8 | *.so 9 | 10 | # Distribution / packaging 11 | .Python 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | cover/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | db.sqlite3-journal 64 | 65 | # Flask stuff: 66 | instance/ 67 | .webassets-cache 68 | 69 | # Scrapy stuff: 70 | .scrapy 71 | 72 | # Sphinx documentation 73 | docs/_build/ 74 | 75 | # PyBuilder 76 | .pybuilder/ 77 | target/ 78 | 79 | # Jupyter Notebook 80 | .ipynb_checkpoints 81 | 82 | # IPython 83 | profile_default/ 84 | ipython_config.py 85 | 86 | # pyenv 87 | # For a library or package, you might want to ignore these files since the code is 88 | # intended to run in multiple environments; otherwise, check them in: 89 | # .python-version 90 | 91 | # pipenv 92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 95 | # install all needed dependencies. 96 | #Pipfile.lock 97 | 98 | # poetry 99 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 100 | # This is especially recommended for binary packages to ensure reproducibility, and is more 101 | # commonly ignored for libraries. 102 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 103 | #poetry.lock 104 | 105 | # pdm 106 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 107 | #pdm.lock 108 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 109 | # in version control. 110 | # https://pdm.fming.dev/#use-with-ide 111 | .pdm.toml 112 | 113 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 114 | __pypackages__/ 115 | 116 | # Celery stuff 117 | celerybeat-schedule 118 | celerybeat.pid 119 | 120 | # SageMath parsed files 121 | *.sage.py 122 | 123 | # Environments 124 | .env 125 | .venv 126 | env/ 127 | venv/ 128 | ENV/ 129 | env.bak/ 130 | venv.bak/ 131 | 132 | # Spyder project settings 133 | .spyderproject 134 | .spyproject 135 | 136 | # Rope project settings 137 | .ropeproject 138 | 139 | # mkdocs documentation 140 | /site 141 | 142 | # mypy 143 | .mypy_cache/ 144 | .dmypy.json 145 | dmypy.json 146 | 147 | # Pyre type checker 148 | .pyre/ 149 | 150 | # pytype static type analyzer 151 | .pytype/ 152 | 153 | # Cython debug symbols 154 | cython_debug/ 155 | 156 | # PyCharm 157 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 158 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 159 | # and can be added to the global gitignore or merged into this file. For a more nuclear 160 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 161 | #.idea/ -------------------------------------------------------------------------------- /my_spaces/main.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | from dataclasses import dataclass, field 4 | from os import environ 5 | from pathlib import Path 6 | from typing import List, Optional 7 | 8 | import docker 9 | import typer 10 | from docker.models.containers import Container 11 | from docker.models.images import Image 12 | from docker.types import DeviceRequest 13 | from jinja2 import Template 14 | 15 | logging.basicConfig(level=logging.INFO) 16 | 17 | app = typer.Typer() 18 | ORGANIZATION = "zuppif" 19 | 20 | 21 | @dataclass 22 | class LocalSpaceFolder: 23 | root: Path = Path.home() / ".my-spaces" 24 | 25 | def __post_init__(self): 26 | self.root.mkdir(exist_ok=True) 27 | self.dockerfiles_root = self.root / "dockerfiles" 28 | self.dockerfiles_root.mkdir(exist_ok=True) 29 | 30 | 31 | @dataclass 32 | class LocalSpace: 33 | client: docker.DockerClient 34 | image: str 35 | tag: str 36 | 37 | def __post_init__(self): 38 | self.container = self.maybe_find_container() 39 | 40 | def maybe_find_container(self) -> Optional[Container]: 41 | containers: List[Container] = self.client.containers.list(all=True) 42 | for container in containers: 43 | tags = container.image.tags 44 | for tag in tags: 45 | if tag == f"{self.image}:{self.tag}": 46 | return container 47 | 48 | def build_dockerfile( 49 | self, repo_url: str, template_path: Path, out_dir: Path 50 | ) -> Path: 51 | with template_path.open("r") as f: 52 | template = Template(f.read()) 53 | template_out_path = out_dir / f"Dockerfile.{self.tag}" 54 | template_out_path.write_text(template.render(dict(repo_url=repo_url))) 55 | return template_out_path 56 | 57 | def build(self, repo_url: str, template_path: Path, out_dir: Path): 58 | dockerfile_path = self.build_dockerfile(repo_url, template_path, out_dir) 59 | with dockerfile_path.open("rb") as f: 60 | image, logs = self.client.images.build( 61 | path=str(out_dir), fileobj=f, tag=f"{self.image}:{self.tag}" 62 | ) 63 | return self 64 | 65 | def run(self): 66 | container: Container = self.client.containers.run( 67 | f"{self.image}:{self.tag}", 68 | detach=True, 69 | environment={"HUGGING_FACE_HUB_TOKEN": environ["HUGGING_FACE_HUB_TOKEN"]}, 70 | ipc_mode="host", 71 | network_mode="host", 72 | device_requests=[DeviceRequest(capabilities=[["gpu"]])], 73 | stop_signal="SIGINT", 74 | ) 75 | return container 76 | 77 | def stop(self): 78 | if self.container: 79 | self.container.stop() 80 | 81 | def start(self, force_run: bool = False): 82 | if force_run: 83 | if self.container: 84 | self.container.remove() 85 | self.container = None 86 | if self.container: 87 | self.container.start() 88 | else: 89 | self.container = self.run() 90 | return self.container 91 | 92 | @classmethod 93 | def from_repo_url(cls, repo_url: str, client: docker.DockerClient): 94 | tag = Path(repo_url).parts[-1] 95 | image = f"my-spaces" 96 | return cls(client, image, tag) 97 | 98 | 99 | @dataclass 100 | class LocalSpaces: 101 | folder: Optional[LocalSpaceFolder] = None 102 | template_path: Optional[Path] = None 103 | spaces: List[LocalSpace] = field(default_factory=list) 104 | 105 | def __post_init__(self): 106 | self.folder = LocalSpaceFolder() if self.folder is None else self.folder 107 | self.template_path = ( 108 | Path(__file__).parent / "templates" / "Dockerfile" 109 | if self.template_path is None 110 | else self.template_path 111 | ) 112 | self.client = docker.from_env() 113 | 114 | def run(self, idenfitier: str, force_run: bool = False): 115 | is_image_link = "zuppif/" in idenfitier 116 | if is_image_link: 117 | # in this case, we just pull it 118 | image, tag = idenfitier.split(":") 119 | self.client.images.pull(image, tag=tag) 120 | self.space = LocalSpace(self.client, image, tag) 121 | else: 122 | # identifier must be a link to a girhub repo, so we create the image 123 | self.space = LocalSpace.from_repo_url(idenfitier, self.client) 124 | images: dict[str, Image] = {} 125 | # let's check if we had build it before 126 | for image in self.client.images.list(): 127 | for tag in image.tags: 128 | images[tag] = image 129 | if not self.space.image in images: 130 | logging.info(f"🔨 Building {self.space.image}:{self.space.tag} ...") 131 | self.space.build( 132 | idenfitier, self.template_path, self.folder.dockerfiles_root 133 | ) 134 | logging.info("🔨 Done! ") 135 | logging.info("🚀 Running ...") 136 | container = self.space.start(force_run) 137 | logging.info("🐋 Log from container: ") 138 | for line in container.logs(stream=True): 139 | print("\t>", line.strip().decode("utf-8")) 140 | 141 | def stop(self): 142 | logging.info("🛑 Stopping container ... ") 143 | self.space.stop() 144 | logging.info("👋 Done! ") 145 | 146 | def list(self) -> List[str]: 147 | images: List[Image] = self.client.images.list(name=f"{ORGANIZATION}/my-spaces") 148 | local_images: List[Image] = self.client.images.list(name=f"my-spaces") 149 | images += local_images 150 | # tags is my-space:asdsadsadas 151 | return [image.tags[0].split(":")[-1] for image in images] 152 | 153 | 154 | @app.command() 155 | def list(): 156 | local_spaces = LocalSpaces(LocalSpaceFolder(root=Path("./my-spaces"))) 157 | logging.info("👇 Current spaces:") 158 | logging.info("- \n".join(local_spaces.list())) 159 | 160 | 161 | @app.command() 162 | def run( 163 | identifier: str, 164 | force_run: bool = typer.Option( 165 | default=False, 166 | help="Will remove the previous container and re-run it from scratch. Useful if something went wrong (e.g. you hit ctrl+c while it was downloading stuff.", 167 | ), 168 | ): 169 | try: 170 | local_spaces = LocalSpaces(LocalSpaceFolder()) 171 | local_spaces.run(identifier, force_run) 172 | except KeyboardInterrupt: 173 | local_spaces.stop() 174 | sys.exit() 175 | 176 | 177 | def main(): 178 | app() 179 | 180 | 181 | if __name__ == "__main__": 182 | main() 183 | --------------------------------------------------------------------------------