├── Open_Sora_jupyter.ipynb └── README.md /Open_Sora_jupyter.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "id": "view-in-github" 7 | }, 8 | "source": [ 9 | "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/camenduru/Open-Sora-jupyter/blob/main/Open_Sora_jupyter.ipynb)" 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": null, 15 | "metadata": { 16 | "id": "VjYy0F2gZIPR" 17 | }, 18 | "outputs": [], 19 | "source": [ 20 | "!pip install ninja colossalai mmengine\n", 21 | "!pip install https://download.pytorch.org/whl/cu121/torch-2.2.1%2Bcu121-cp310-cp310-linux_x86_64.whl\n", 22 | "!pip install https://download.pytorch.org/whl/cu121/xformers-0.0.25-cp310-cp310-manylinux2014_x86_64.whl\n", 23 | "\n", 24 | "%cd /content\n", 25 | "!git clone -b 23.05-devel https://github.com/NVIDIA/apex\n", 26 | "%cd /content/apex\n", 27 | "!pip install -v --disable-pip-version-check --no-cache-dir --global-option=\"--cpp_ext\" --global-option=\"--cuda_ext\" ./\n", 28 | "\n", 29 | "%cd /content\n", 30 | "!git clone https://github.com/Dao-AILab/flash-attention\n", 31 | "%cd /content/flash-attention\n", 32 | "!pip install -v -e .\n", 33 | "\n", 34 | "%cd /content\n", 35 | "!git clone -b dev https://github.com/camenduru/Open-Sora\n", 36 | "%cd /content/Open-Sora\n", 37 | "!pip install -v -e .\n", 38 | "\n", 39 | "!apt -y install -qq aria2\n", 40 | "!aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/hpcai-tech/Open-Sora/resolve/main/OpenSora-v1-16x256x256.pth -d /content/Open-Sora/models -o OpenSora-v1-16x256x256.pth\n", 41 | "!aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/hpcai-tech/Open-Sora/resolve/main/OpenSora-v1-HQ-16x256x256.pth -d /content/Open-Sora/models -o OpenSora-v1-HQ-16x256x256.pth\n", 42 | "!aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/hpcai-tech/Open-Sora/resolve/main/OpenSora-v1-HQ-16x512x512.pth -d /content/Open-Sora/models -o OpenSora-v1-HQ-16x512x512.pth\n", 43 | "\n", 44 | "!git clone https://huggingface.co/DeepFloyd/t5-v1_1-xxl /content/Open-Sora/pretrained_models/t5_ckpts/t5-v1_1-xxl" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": null, 50 | "metadata": {}, 51 | "outputs": [], 52 | "source": [ 53 | "%cd /content/Open-Sora\n", 54 | "# Sample 16x256x256 (5s/sample)\n", 55 | "!torchrun --standalone --nproc_per_node 1 scripts/inference.py configs/opensora/inference/16x256x256.py --ckpt-path /content/Open-Sora/models/OpenSora-v1-16x256x256.pth" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": null, 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [ 64 | "%cd /content/Open-Sora\n", 65 | "# Sample 16x512x512 (20s/sample, 100 time steps)\n", 66 | "!torchrun --standalone --nproc_per_node 1 scripts/inference.py configs/opensora/inference/16x512x512.py --ckpt-path /content/Open-Sora/models/OpenSora-v1-HQ-16x512x512.pth" 67 | ] 68 | }, 69 | { 70 | "cell_type": "code", 71 | "execution_count": null, 72 | "metadata": {}, 73 | "outputs": [], 74 | "source": [ 75 | "%cd /content/Open-Sora\n", 76 | "# Sample 64x512x512 (40s/sample, 100 time steps)\n", 77 | "!torchrun --standalone --nproc_per_node 1 scripts/inference.py configs/opensora/inference/64x512x512.py --ckpt-path /content/Open-Sora/models/OpenSora-v1-HQ-16x512x512.pth" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": null, 83 | "metadata": {}, 84 | "outputs": [], 85 | "source": [ 86 | "%cd /content/Open-Sora\n", 87 | "# Sample 64x512x512 with sequence parallelism (30s/sample, 100 time steps)\n", 88 | "# sequence parallelism is enabled automatically when nproc_per_node is larger than 1\n", 89 | "!torchrun --standalone --nproc_per_node 2 scripts/inference.py configs/opensora/inference/64x512x512.py --ckpt-path /content/Open-Sora/models/OpenSora-v1-HQ-16x512x512.pth" 90 | ] 91 | } 92 | ], 93 | "metadata": { 94 | "accelerator": "GPU", 95 | "colab": { 96 | "gpuType": "T4", 97 | "provenance": [] 98 | }, 99 | "kernelspec": { 100 | "display_name": "Python 3", 101 | "name": "python3" 102 | }, 103 | "language_info": { 104 | "name": "python" 105 | } 106 | }, 107 | "nbformat": 4, 108 | "nbformat_minor": 0 109 | } 110 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 🐣 Please follow me for new updates https://twitter.com/camenduru
2 | 🔥 Please join our discord server https://discord.gg/k5BwmmvJJU
3 | 🥳 Please join my patreon community https://patreon.com/camenduru
4 | 5 | ### 🍊 Jupyter Notebook 6 | 7 | - 🚦 Important: Open-Sora is a work-in-progress model. 8 | - 🚦 This Jupyter notebook is only for showing the progress. 9 | - 🚦 This is not the final model. 10 | 11 | | Notebook | Info 12 | | --- | --- | 13 | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/camenduru/Open-Sora-jupyter/blob/main/Open_Sora_jupyter.ipynb) | Open_Sora_jupyter (😭 Pro Colab ~22GB VRAM) 14 | 15 | ### 🧬 Code 16 | https://github.com/hpcaitech/Open-Sora 17 | 18 | ### 🌐 Page 19 | https://hpc-ai.com/blog/open-sora-v1.0 20 | 21 | ### 🖼 Output v1 22 | https://hpcaitech.github.io/Open-Sora/ 23 | 24 | https://github.com/camenduru/Open-Sora-jupyter/assets/54370274/ae4afbe4-5b02-44db-8036-881c33fd3705 25 | 26 | https://github.com/camenduru/Open-Sora-jupyter/assets/54370274/02c19b8f-7dd4-4462-b854-be34b097a14d 27 | 28 | https://github.com/camenduru/Open-Sora-jupyter/assets/54370274/3ca302c9-f859-4769-a670-e68f3473b2d4 29 | 30 | https://github.com/camenduru/Open-Sora-jupyter/assets/54370274/30658109-5c6a-44a8-92da-32f4c8c677e3 31 | 32 | ### 🏢 Sponsor 33 | https://modelslab.com 34 | --------------------------------------------------------------------------------