├── README.md
└── bria_rmbg_jupyter.ipynb
/README.md:
--------------------------------------------------------------------------------
1 | 🐣 Please follow me for new updates https://twitter.com/camenduru
2 | 🔥 Please join our discord server https://discord.gg/k5BwmmvJJU
3 | 🥳 Please join my patreon community https://patreon.com/camenduru
4 |
5 | ### 🍊 Jupyter Notebook
6 |
7 | | Notebook | Info
8 | | --- | --- |
9 | [](https://colab.research.google.com/github/camenduru/bria-rmbg-jupyter/blob/main/bria_rmbg_jupyter.ipynb) | bria_rmbg_jupyter
10 |
11 | ### 🧬 Code
12 | https://huggingface.co/spaces/briaai/BRIA-RMBG-1.4/tree/main
13 |
14 | ### 🌐 Page
15 | https://bria.ai/
16 |
17 | ### 📦 Model
18 | https://huggingface.co/briaai/RMBG-1.4
19 |
20 | ### 🖼 Output
21 | 
22 |
23 | ### 🏢 Sponsor
24 | https://replicate.com
25 |
--------------------------------------------------------------------------------
/bria_rmbg_jupyter.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {
6 | "id": "view-in-github"
7 | },
8 | "source": [
9 | "[](https://colab.research.google.com/github/camenduru/bria-rmbg-jupyter/blob/main/bria_rmbg_jupyter.ipynb)"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": null,
15 | "metadata": {
16 | "id": "VjYy0F2gZIPR"
17 | },
18 | "outputs": [],
19 | "source": [
20 | "!pip install -q gradio\n",
21 | "!wget https://github.com/camenduru/BRIA-RMBG-1.4-hf/raw/main/briarmbg.py -O briarmbg.py\n",
22 | "!wget https://github.com/camenduru/BRIA-RMBG-1.4-hf/raw/main/input.jpg -O input.jpg\n",
23 | "\n",
24 | "import numpy as np\n",
25 | "import torch\n",
26 | "import torch.nn.functional as F\n",
27 | "from torchvision.transforms.functional import normalize\n",
28 | "from huggingface_hub import hf_hub_download\n",
29 | "import gradio as gr\n",
30 | "from briarmbg import BriaRMBG\n",
31 | "import PIL\n",
32 | "from PIL import Image\n",
33 | "from typing import Tuple\n",
34 | "\n",
35 | "net=BriaRMBG()\n",
36 | "model_path = hf_hub_download(\"briaai/RMBG-1.4\", 'model.pth')\n",
37 | "if torch.cuda.is_available():\n",
38 | " net.load_state_dict(torch.load(model_path))\n",
39 | " net=net.cuda()\n",
40 | "else:\n",
41 | " net.load_state_dict(torch.load(model_path,map_location=\"cpu\"))\n",
42 | "net.eval() \n",
43 | " \n",
44 | "def resize_image(image):\n",
45 | " image = image.convert('RGB')\n",
46 | " model_input_size = (1024, 1024)\n",
47 | " image = image.resize(model_input_size, Image.BILINEAR)\n",
48 | " return image\n",
49 | "\n",
50 | "def process(image):\n",
51 | " orig_image = Image.fromarray(image)\n",
52 | " w,h = orig_im_size = orig_image.size\n",
53 | " image = resize_image(orig_image)\n",
54 | " im_np = np.array(image)\n",
55 | " im_tensor = torch.tensor(im_np, dtype=torch.float32).permute(2,0,1)\n",
56 | " im_tensor = torch.unsqueeze(im_tensor,0)\n",
57 | " im_tensor = torch.divide(im_tensor,255.0)\n",
58 | " im_tensor = normalize(im_tensor,[0.5,0.5,0.5],[1.0,1.0,1.0])\n",
59 | " if torch.cuda.is_available():\n",
60 | " im_tensor=im_tensor.cuda()\n",
61 | "\n",
62 | " result=net(im_tensor)\n",
63 | " result = torch.squeeze(F.interpolate(result[0][0], size=(h,w), mode='bilinear') ,0)\n",
64 | " ma = torch.max(result)\n",
65 | " mi = torch.min(result)\n",
66 | " result = (result-mi)/(ma-mi) \n",
67 | " im_array = (result*255).cpu().data.numpy().astype(np.uint8)\n",
68 | " pil_im = Image.fromarray(np.squeeze(im_array))\n",
69 | " new_im = Image.new(\"RGBA\", pil_im.size, (0,0,0,0))\n",
70 | " new_im.paste(orig_image, mask=pil_im)\n",
71 | " return new_im\n",
72 | "\n",
73 | "gr.Markdown(\"## BRIA RMBG 1.4\")\n",
74 | "gr.HTML('''\n",
75 | "
\n", 76 | " This is a demo for BRIA RMBG 1.4 that using\n", 77 | " BRIA RMBG-1.4 image matting model as backbone. \n", 78 | "
\n", 79 | "''')\n", 80 | "title = \"Background Removal\"\n", 81 | "description = r\"\"\"Background removal model developed by BRIA.AI, trained on a carefully selected dataset and is available as an open-source model for non-commercial use.