├── .gitignore ├── preload.py ├── cd_modules ├── compression.py ├── extra_networks_deltas.py ├── ui_extra_networks_deltas.py ├── deltas.py └── custom_diffusion.py ├── README.md ├── cli_scripts └── make_reg.py ├── scripts └── main.py └── LICENSE /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | -------------------------------------------------------------------------------- /preload.py: -------------------------------------------------------------------------------- 1 | from modules.paths import models_path 2 | import os 3 | 4 | def preload(parser): 5 | parser.add_argument("--deltas-dir", type=str, help="finetuned delta weights directory", default=os.path.join(models_path, 'deltas')) 6 | -------------------------------------------------------------------------------- /cd_modules/compression.py: -------------------------------------------------------------------------------- 1 | import torch 2 | 3 | def decompose(A, top_sum=0.5): 4 | '''Low rank approximation of a 2D tensor, keeping only 5 | largest singular values that sums to topsum * (original sum)''' 6 | U, S, Vh = torch.linalg.svd(A.float(), full_matrices=False) 7 | r = max(1, sum(torch.cumsum(S, 0) < sum(S) * top_sum * 1.0001)) # 1.0001 because floats 8 | return U[:, :r] @ torch.diag(S)[:r, :r], Vh[:r] 9 | -------------------------------------------------------------------------------- /cd_modules/extra_networks_deltas.py: -------------------------------------------------------------------------------- 1 | from modules import extra_networks, sd_hijack 2 | from cd_modules import deltas 3 | 4 | # Swapping in and out weights for every Processing 5 | # Not the most efficient method, might change later 6 | class ExtraNetworkDelta(extra_networks.ExtraNetwork): 7 | def __init__(self): 8 | super().__init__('delta') 9 | self.backup = {} 10 | 11 | 12 | def activate(self, p, params_list): 13 | deltas.Delta.refresh() 14 | for params in params_list: 15 | params.items[1:] = params.items[1:] or [1] 16 | delta_name, strength = params.items 17 | strength = float(strength) 18 | delta = deltas.Delta(path=deltas.Delta.deltas[delta_name]) 19 | delta.apply(strength, p.sd_model, self.backup) 20 | 21 | 22 | def deactivate(self, p): 23 | deltas.Delta.restore(p.sd_model, self.backup) 24 | self.backup = {} 25 | -------------------------------------------------------------------------------- /cd_modules/ui_extra_networks_deltas.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | 4 | from modules import ui_extra_networks, shared 5 | from cd_modules import deltas 6 | 7 | class ExtraNetworksPageDeltas(ui_extra_networks.ExtraNetworksPage): 8 | def __init__(self): 9 | super().__init__('Deltas') 10 | 11 | def refresh(self): 12 | deltas.Delta.refresh() 13 | 14 | def list_items(self): 15 | for name, path in deltas.Delta.deltas.items(): 16 | path, ext = os.path.splitext(path) 17 | previews = [path + ".png", path + ".preview.png"] 18 | 19 | preview = None 20 | for file in previews: 21 | if os.path.isfile(file): 22 | preview = "./file=" + file.replace('\\', '/') + "?mtime=" + str(os.path.getmtime(file)) 23 | break 24 | 25 | yield { 26 | "name": name, 27 | "filename": path, 28 | "preview": preview, 29 | "prompt": json.dumps(f""), 30 | "local_preview": path + ".png", 31 | } 32 | 33 | def allowed_directories_for_previews(self): 34 | return [shared.cmd_opts.deltas_dir] 35 | 36 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Custom Diffusion WebUI 2 | 3 | An unofficial extension that implements [Custom Diffusion](https://www.cs.cmu.edu/~custom-diffusion/) for [Automatic1111's WebUI](https://github.com/AUTOMATIC1111/stable-diffusion-webui). 4 | 5 | ## What is Custom Diffusion 6 | 7 | [Custom Diffusion](https://www.cs.cmu.edu/~custom-diffusion/) is, in short, finetuning-lite with TI. Instead of tuning the whole model, only the K and V matrices of the cross-attention blocks are tuned simultaneously with token embedding(s). It has similar speed and memory requirements to TI and supposedly gives better results in less steps. 8 | 9 | ## How to use this 10 | 11 | ### Training 12 | You can find the UI in the `Train/Train Custom Diffusion` tab. Just train as you would a normal TI embedding. Under the training log directory, alongside with `name-steps.pt` you should also see `name-steps.delta.safetensors`, which contain finetuned delta weights (~50MB at half precision uncompressed). 13 | 14 | #### Regularization images 15 | Custom Diffusion proper includes regularization. To generate regularization images, go to `Custom Diffusion Utils/Make regularization images`. You can then optionally supply the generated images directory as regularization when training. 16 | 17 | 18 | ### Using trained weights 19 | The trained deltas will be under `models/deltas` (`--deltas-dir`); you can also copy over logged `.safetensors` versions. The delta weights can be used in txt2img/img2img as an Extra Network. You can select them under the extra networks tab like hypernets. Use the token embedding like a normal TI embedding. 20 | 21 | ## Disclaimer 22 | This is an unofficial implementation based on [the paper](https://arxiv.org/abs/2212.04488) and the features and implementation details may be different to [the original](https://github.com/adobe-research/custom-diffusion). 23 | 24 | ## Todo (roughly ordered by priority) 25 | - [x] UI/UX 26 | - [ ] More testing and demo 27 | - [x] Separate lr for embedding and model weights 28 | - [x] Blending (simple linear combination of deltas) 29 | - [ ] Merging (optimization based from paper) 30 | - [x] Compression 31 | - [ ] Let users choose what weights to finetune 32 | - [x] Regularization 33 | - [ ] Multi-concept training 34 | -------------------------------------------------------------------------------- /cd_modules/deltas.py: -------------------------------------------------------------------------------- 1 | import os 2 | import glob 3 | from modules.paths import models_path 4 | from safetensors.torch import safe_open, save_file 5 | from cd_modules.compression import decompose 6 | import torch 7 | import json 8 | 9 | class Delta(dict): 10 | deltas = {} 11 | 12 | @classmethod 13 | def refresh(cls): 14 | cls.deltas = cls.list_deltas() 15 | 16 | @staticmethod 17 | def list_deltas(): 18 | res = {} 19 | for filename in sorted( 20 | glob.iglob( 21 | os.path.join(models_path, "deltas", "**/*.safetensors"), recursive=True 22 | ) 23 | ): 24 | name = os.path.splitext(os.path.basename(filename))[0] 25 | res[name] = filename 26 | return res 27 | 28 | @staticmethod 29 | def restore(model, backup): 30 | with torch.no_grad(): 31 | for k, v in model.named_parameters(): 32 | if k in backup: 33 | v[:] = backup[k] 34 | 35 | 36 | def __init__(self, *, path=None, tensors=None): 37 | assert (path is None) != (tensors is None) 38 | if path is not None: 39 | st = safe_open(path, 'pt') 40 | metadata = json.loads(st.metadata()['json']) 41 | entries = metadata['entries'] 42 | self.metadata = metadata['meta'] 43 | def get_entry(k): 44 | if entries[k] == 'delta': 45 | d = st.get_tensor(k) 46 | elif entries[k] == 'delta_factors': 47 | d = st.get_tensor(k+'.US').float() @ st.get_tensor(k+'.Vh').float() 48 | else: 49 | raise ValueError(f'Unknown format: {entries[k]}') 50 | return d 51 | self.update(((k, get_entry(k)) for k in entries)) 52 | else: 53 | self.update(tensors) 54 | self.metadata = {'version': '0.2.0'} 55 | 56 | def apply(self, strength, model, backup): 57 | for k, v in model.named_parameters(): 58 | if k not in self: 59 | continue 60 | if k not in backup: 61 | backup[k] = v.detach().clone() 62 | with torch.no_grad(): 63 | v[:] = v.detach() + self[k].to(v.device) * strength 64 | 65 | def save(self, path, fmt='delta', top_sum=None): 66 | metadata = {'meta': self.metadata, 'entries': {k: fmt for k in self}} 67 | if fmt == 'delta': 68 | tensors = self 69 | elif fmt == 'delta_factors': 70 | tensors = {} 71 | for k, v in self.items(): 72 | tensors[k+'.US'], tensors[k+'.Vh'] = map(lambda a: a.half().contiguous(), 73 | decompose(v, top_sum)) 74 | else: 75 | raise ValueError(f'unknown storage format: {fmt}') 76 | save_file(tensors, path, {'json': json.dumps(metadata)}) 77 | 78 | -------------------------------------------------------------------------------- /cli_scripts/make_reg.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from pathlib import Path 3 | 4 | if __name__ == "__main__": 5 | sys.path.extend(str(Path(__file__).parent / x) for x in ["../", "../../../"]) 6 | argv = [*sys.argv] 7 | sys.argv = ["webui.py", "--disable-console-progressbars"] 8 | 9 | import os 10 | import tqdm 11 | from modules import shared, processing 12 | from PIL import Image 13 | import re 14 | import random 15 | 16 | 17 | def _get_gen_params( 18 | *, 19 | n_images: str, 20 | data_root: str, 21 | template_file: str, 22 | placeholder_token: str, 23 | shuffle_tags: bool, 24 | ): 25 | with open(template_file, "r") as file: 26 | lines = [x.strip() for x in file.readlines()] 27 | assert data_root, "dataset directory not specified" 28 | assert os.path.isdir(data_root), "Dataset directory doesn't exist" 29 | assert os.listdir(data_root), "Dataset directory is empty" 30 | 31 | image_paths = [ 32 | os.path.join(data_root, file_path) for file_path in os.listdir(data_root) 33 | ] 34 | print("Preparing dataset...") 35 | ds = [] 36 | re_numbers_at_start = re.compile(r"^[-\d]+\s*") 37 | re_word = ( 38 | re.compile(shared.opts.dataset_filename_word_regex) 39 | if len(shared.opts.dataset_filename_word_regex) > 0 40 | else None 41 | ) 42 | for path in tqdm.tqdm(image_paths): 43 | if shared.state.interrupted: 44 | raise Exception("interrupted") 45 | try: 46 | w, h = Image.open(path).size 47 | except Exception: 48 | continue 49 | 50 | text_filename = os.path.splitext(path)[0] + ".txt" 51 | filename = os.path.basename(path) 52 | if os.path.exists(text_filename): 53 | with open(text_filename, "r", encoding="utf8") as file: 54 | filename_text = file.read() 55 | else: 56 | filename_text = os.path.splitext(filename)[0] 57 | filename_text = re.sub(re_numbers_at_start, "", filename_text) 58 | if re_word: 59 | tokens = re_word.findall(filename_text) 60 | filename_text = (shared.opts.dataset_filename_join_string or "").join( 61 | tokens 62 | ) 63 | ds.append((w, h, filename_text)) 64 | 65 | def create_text(filename_text): 66 | text = random.choice(lines) 67 | tags = filename_text.split(",") 68 | if shuffle_tags: 69 | random.shuffle(tags) 70 | text = text.replace("[filewords]", ",".join(tags)) 71 | text = text.replace("[name]", placeholder_token) 72 | return text 73 | 74 | n_rep = int(n_images[:-1]) if n_images[-1] == "x" else -(-int(n_images) // len(ds)) 75 | print(f"To generate {n_rep} * {len(ds)} = {n_rep * len(ds)} images") 76 | return [(w, h, create_text(t)) for w, h, t in ds for _ in range(n_rep)] 77 | 78 | 79 | def make_reg_images( 80 | data_root: str, 81 | n_images: str, 82 | output_path: str, 83 | template_file: str, 84 | shuffle_tags: bool, 85 | placeholder_token: str = "", 86 | ): 87 | """Generate regularization images 88 | 89 | Args: 90 | data_root: Dataset root 91 | n_images: Total number of images, rounded up to the next multiple of the 92 | dataset size. Supply "Nx" for N times the original dataset size. 93 | output_path: Destination of the generated images. 94 | template_file: Texual inversion template file 95 | shuffle_tags: Shuffle comma-delimitted tags 96 | placeholder_token: String to replace [name] with in templates 97 | """ 98 | assert ( 99 | data_root and n_images and output_path and template_file 100 | ), "Missing required input(s)" 101 | if "/" not in template_file and "\\" not in template_file: 102 | from modules.textual_inversion.textual_inversion import ( 103 | textual_inversion_templates, 104 | ) 105 | 106 | template_file = textual_inversion_templates.get(template_file, None).path 107 | params = _get_gen_params( 108 | data_root=data_root, 109 | placeholder_token=placeholder_token, 110 | template_file=template_file, 111 | shuffle_tags=shuffle_tags, 112 | n_images=n_images, 113 | ) 114 | print("generating images") 115 | os.makedirs(output_path, exist_ok=True) 116 | for i, (w, h, prompt) in tqdm.tqdm(enumerate(params)): 117 | p = processing.StableDiffusionProcessingTxt2Img( 118 | sd_model=shared.sd_model, 119 | do_not_save_grid=True, 120 | do_not_save_samples=True, 121 | do_not_reload_embeddings=True, 122 | prompt=prompt, 123 | width=w, 124 | height=h, 125 | steps=20, 126 | sampler_name="DPM++ 2M", 127 | ) 128 | processed = processing.process_images(p) 129 | image = processed.images[0] 130 | stem = f"{i+1:05d}" 131 | image.save(str(Path(output_path) / f"{stem}.png")) 132 | with open(Path(output_path) / f"{stem}.txt", "w") as f: 133 | f.write(prompt) 134 | print("done") 135 | return f"{len(params)} images saved to {output_path}", "" 136 | 137 | 138 | if __name__ == "__main__": 139 | from launch import run_pip 140 | 141 | run_pip("install fire", "fire") 142 | import webui 143 | 144 | webui.initialize() 145 | sys.argv = argv 146 | import fire 147 | 148 | fire.Fire(make_reg_images) 149 | -------------------------------------------------------------------------------- /scripts/main.py: -------------------------------------------------------------------------------- 1 | import modules.scripts as scripts 2 | import gradio as gr 3 | from modules.ui import create_refresh_button 4 | from modules.paths import models_path 5 | from modules.script_callbacks import on_ui_train_tabs, on_ui_tabs 6 | from modules.ui_components import FormRow 7 | import modules.sd_models as sd_models 8 | import glob 9 | import os 10 | from modules import shared, sd_hijack, extra_networks, ui_extra_networks 11 | from modules.textual_inversion import textual_inversion 12 | from modules.call_queue import wrap_gradio_gpu_call 13 | from safetensors import safe_open 14 | import cd_modules 15 | import torch 16 | import modules 17 | import cd_modules.custom_diffusion 18 | import cd_modules.ui_extra_networks_deltas 19 | import cd_modules.extra_networks_deltas 20 | import cd_modules.deltas 21 | import cli_scripts.make_reg 22 | 23 | 24 | class Script(scripts.Script): 25 | def title(self): 26 | return "Custom Diffusion" 27 | 28 | def show(self, is_img2img): 29 | return False 30 | 31 | # monkeypatch initialize's as there's currently no API to register new networks 32 | ui_extra_networks_initialize_bak = ui_extra_networks.intialize 33 | def ui_extra_networks_initialize_patched(): 34 | ui_extra_networks_initialize_bak() 35 | ui_extra_networks.register_page(cd_modules.ui_extra_networks_deltas.ExtraNetworksPageDeltas()) 36 | print('patched in extra network ui page: deltas') 37 | ui_extra_networks.intialize = ui_extra_networks_initialize_patched 38 | 39 | extra_networks_initialize_bak = extra_networks.initialize 40 | def extra_networks_initialize_patched(): 41 | extra_networks_initialize_bak() 42 | extra_networks.register_extra_network(cd_modules.extra_networks_deltas.ExtraNetworkDelta()) 43 | print('patched in extra network: deltas') 44 | extra_networks.initialize = extra_networks_initialize_patched 45 | 46 | 47 | def get_textual_inversion_template_names(): 48 | return sorted([x for x in textual_inversion.textual_inversion_templates]) 49 | 50 | 51 | def train_tabs_callback(ui_train_tab_params): 52 | with gr.Tab(label="Train Custom Diffusion"): 53 | with FormRow(): 54 | train_embedding_name = gr.Dropdown( 55 | label="Embedding", 56 | elem_id="train_embedding", 57 | choices=sorted( 58 | sd_hijack.model_hijack.embedding_db.word_embeddings.keys() 59 | ), 60 | ) 61 | create_refresh_button( 62 | train_embedding_name, 63 | sd_hijack.model_hijack.embedding_db.load_textual_inversion_embeddings, 64 | lambda: { 65 | "choices": sorted( 66 | sd_hijack.model_hijack.embedding_db.word_embeddings.keys() 67 | ) 68 | }, 69 | "refresh_train_embedding_name", 70 | ) 71 | 72 | with FormRow(): 73 | embedding_learn_rate = gr.Textbox( 74 | label="Embedding Learning rate", 75 | placeholder="Embedding Learning rate", 76 | value="0.005", 77 | elem_id="train_embedding_learn_rate", 78 | ) 79 | kv_learn_rate = gr.Textbox(label='KV Learning rate', placeholder="KV Learning rate", value="1e-5", elem_id="train_kv_learn_rate") 80 | 81 | with FormRow(): 82 | clip_grad_mode = gr.Dropdown( 83 | value="disabled", 84 | label="Gradient Clipping", 85 | choices=["disabled", "value", "norm"], 86 | ) 87 | clip_grad_value = gr.Textbox( 88 | placeholder="Gradient clip value", value="0.1", show_label=False 89 | ) 90 | 91 | with FormRow(): 92 | batch_size = gr.Number( 93 | label="Batch size", value=1, precision=0, elem_id="train_batch_size" 94 | ) 95 | gradient_step = gr.Number( 96 | label="Gradient accumulation steps", 97 | value=1, 98 | precision=0, 99 | elem_id="train_gradient_step", 100 | ) 101 | 102 | dataset_directory = gr.Textbox( 103 | label="Dataset directory", 104 | placeholder="Path to directory with input images", 105 | elem_id="train_dataset_directory", 106 | ) 107 | with FormRow(): 108 | reg_dataset_directory = gr.Textbox( 109 | label="Regularization dataset directory (optional)", 110 | placeholder="Path to directory reg images", 111 | ) 112 | prior_loss_weight = gr.Slider( 113 | label="Prior-preservation loss weight", 114 | value=1., 115 | minimum=0., 116 | maximum=10., 117 | ) 118 | log_directory = gr.Textbox( 119 | label="Log directory", 120 | placeholder="Path to directory where to write outputs", 121 | value="textual_inversion", 122 | elem_id="train_log_directory", 123 | ) 124 | 125 | with FormRow(): 126 | template_file = gr.Dropdown( 127 | label="Prompt template", 128 | value="style_filewords.txt", 129 | elem_id="train_template_file", 130 | choices=get_textual_inversion_template_names(), 131 | ) 132 | create_refresh_button( 133 | template_file, 134 | textual_inversion.list_textual_inversion_templates, 135 | lambda: {"choices": get_textual_inversion_template_names()}, 136 | "refrsh_train_template_file", 137 | ) 138 | 139 | training_width = gr.Slider( 140 | minimum=64, 141 | maximum=2048, 142 | step=8, 143 | label="Width", 144 | value=512, 145 | elem_id="train_training_width", 146 | ) 147 | training_height = gr.Slider( 148 | minimum=64, 149 | maximum=2048, 150 | step=8, 151 | label="Height", 152 | value=512, 153 | elem_id="train_training_height", 154 | ) 155 | varsize = gr.Checkbox( 156 | label="Do not resize images", value=False, elem_id="train_varsize" 157 | ) 158 | steps = gr.Number( 159 | label="Max steps", value=100000, precision=0, elem_id="train_steps" 160 | ) 161 | 162 | with FormRow(): 163 | create_image_every = gr.Number( 164 | label="Save an image to log directory every N steps, 0 to disable", 165 | value=500, 166 | precision=0, 167 | elem_id="train_create_image_every", 168 | ) 169 | save_embedding_every = gr.Number( 170 | label="Save a copy of embedding to log directory every N steps, 0 to disable", 171 | value=500, 172 | precision=0, 173 | elem_id="train_save_embedding_every", 174 | ) 175 | 176 | save_image_with_stored_embedding = gr.Checkbox( 177 | label="Save images with embedding in PNG chunks", 178 | value=True, 179 | elem_id="train_save_image_with_stored_embedding", 180 | ) 181 | preview_from_txt2img = gr.Checkbox( 182 | label="Read parameters (prompt, etc...) from txt2img tab when making previews", 183 | value=False, 184 | elem_id="train_preview_from_txt2img", 185 | ) 186 | 187 | shuffle_tags = gr.Checkbox( 188 | label="Shuffle tags by ',' when creating prompts.", 189 | value=False, 190 | elem_id="train_shuffle_tags", 191 | ) 192 | tag_drop_out = gr.Slider( 193 | minimum=0, 194 | maximum=1, 195 | step=0.1, 196 | label="Drop out tags when creating prompts.", 197 | value=0, 198 | elem_id="train_tag_drop_out", 199 | ) 200 | 201 | latent_sampling_method = gr.Radio( 202 | label="Choose latent sampling method", 203 | value="once", 204 | choices=["once", "deterministic", "random"], 205 | elem_id="train_latent_sampling_method", 206 | ) 207 | 208 | top_sum = gr.Slider( 209 | minimum=0, 210 | maximum=1, 211 | step=0.01, 212 | label="Low-rank approximation sum threshold (lower value means smaller file size, 1 to disable)", 213 | value=0.5, 214 | elem_id="train_top_sum", 215 | ) 216 | 217 | with gr.Row(): 218 | train_embedding = gr.Button( 219 | value="Train Embedding", 220 | variant="primary", 221 | elem_id="train_train_embedding", 222 | ) 223 | interrupt_training = gr.Button( 224 | value="Interrupt", elem_id="train_interrupt_training" 225 | ) 226 | dummy_component = gr.Label(visible=False) 227 | with gr.Column(elem_id='ti_gallery_container'): 228 | ti_output = gr.Text(value="", show_label=False) 229 | ti_outcome = gr.HTML(value="") 230 | 231 | train_embedding.click( 232 | fn=wrap_gradio_gpu_call(cd_modules.custom_diffusion.train_embedding, extra_outputs=[gr.update()]), 233 | _js="start_training_textual_inversion", 234 | inputs=[ 235 | dummy_component, 236 | train_embedding_name, 237 | embedding_learn_rate, 238 | batch_size, 239 | gradient_step, 240 | dataset_directory, 241 | reg_dataset_directory, 242 | prior_loss_weight, 243 | log_directory, 244 | training_width, 245 | training_height, 246 | varsize, 247 | steps, 248 | clip_grad_mode, 249 | clip_grad_value, 250 | shuffle_tags, 251 | tag_drop_out, 252 | latent_sampling_method, 253 | create_image_every, 254 | save_embedding_every, 255 | template_file, 256 | save_image_with_stored_embedding, 257 | preview_from_txt2img, 258 | kv_learn_rate, 259 | top_sum, 260 | *ui_train_tab_params.txt2img_preview_params, 261 | ], 262 | outputs=[ 263 | ti_output, 264 | ti_outcome, 265 | ] 266 | ) 267 | 268 | interrupt_training.click( 269 | fn=lambda: shared.state.interrupt(), 270 | inputs=[], 271 | outputs=[], 272 | ) 273 | 274 | 275 | on_ui_train_tabs(train_tabs_callback) 276 | 277 | def btn_compress_click(delta_name, top_sum, custom_name): 278 | if not delta_name: 279 | return "Error: delta not selected" 280 | from safetensors import safe_open 281 | from safetensors.torch import save_file 282 | from cd_modules.compression import decompose 283 | import json 284 | from pathlib import Path 285 | orig_path = cd_modules.deltas.Delta.deltas[delta_name] 286 | st = safe_open(orig_path, 'pt') 287 | metadata = json.loads(st.metadata()['json']) 288 | entries = metadata['entries'] 289 | tensors = {} 290 | for k, v in entries.items(): 291 | if v == 'delta': 292 | d = st.get_tensor(k) 293 | elif v == 'delta_factors': 294 | print('Warning: compressing already factored delta') 295 | d = st.get_tensor(k+'.US').float() @ st.get_tensor(k+'.Vh').float() 296 | else: 297 | return 'Error: Unknown format: {v}' 298 | tensors[k+'.US'], tensors[k+'.Vh'] = map(lambda a: a.half().contiguous(), 299 | decompose(d, top_sum)) 300 | metadata = {'meta': {'version': '0.2.0'}, 'entries': {k: 'delta_factors' for k in entries}} 301 | p = Path(orig_path) 302 | new_path = str(p.parent / ((custom_name or p.stem + f'.lora{int(100 * top_sum)}') + p.suffix)) 303 | save_file(tensors, new_path, {'json': json.dumps(metadata)}) 304 | return f'Compressed delta saved to {new_path}' 305 | 306 | 307 | def ui_tabs_callback(): 308 | with gr.Blocks() as cd: 309 | with gr.Row().style(equal_height=False): 310 | with gr.Column(variant='compact'): 311 | with gr.Blocks(): 312 | with gr.Tab("Compress"): 313 | with gr.Row(): 314 | delta_name = gr.Dropdown( 315 | list(cd_modules.deltas.Delta.deltas.keys()), label="Delta" 316 | ) 317 | create_refresh_button(delta_name, cd_modules.deltas.Delta.refresh, 318 | lambda: dict(choices=list(cd_modules.deltas.Delta.deltas.keys())), 'refresh_deltas') 319 | top_sum = gr.Slider( 320 | minimum=0, 321 | maximum=1, 322 | step=0.01, 323 | label="Low-rank approximation sum threshold (lower value means smaller file size, 1 to disable)", 324 | value=0.5, 325 | elem_id="train_top_sum", 326 | ) 327 | custom_name = gr.Textbox(label="Custom Name (Optional)") 328 | btn_compress = gr.Button( 329 | value="Compress", 330 | variant="primary", 331 | elem_id="btn_compress", 332 | ) 333 | with gr.Tab("Merge"): 334 | gr.Markdown("Coming soon") 335 | with gr.Tab("Make regularization images"): 336 | with FormRow(): 337 | data_root = gr.Textbox(label="Dataset root") 338 | output_path = gr.Textbox(label="Destination of the generated images") 339 | n_images = gr.Textbox(label='Total number of images. Will be rounded up to the next multiple of the dataset size', placeholder='A number or "Nx" for N times the original dataset size.') 340 | shuffle_tags = gr.Checkbox(label="Shuffle comma-delimitted tags") 341 | 342 | with FormRow(): 343 | template_file = gr.Dropdown( 344 | label="Prompt template", 345 | value="style_filewords.txt", 346 | elem_id="train_template_file2", 347 | choices=get_textual_inversion_template_names(), 348 | ) 349 | create_refresh_button( 350 | template_file, 351 | textual_inversion.list_textual_inversion_templates, 352 | lambda: {"choices": get_textual_inversion_template_names()}, 353 | "refrsh_train_template_file2", 354 | ) 355 | placeholder_token = gr.Textbox(label="String to replace [name] with in templates") 356 | btn_make_reg = gr.Button(value="Generate images", variant="primary") 357 | 358 | with gr.Column(variant='compact'): 359 | cd_output = gr.Text(show_label=False) 360 | cd_outcome = gr.HTML() 361 | btn_compress.click(btn_compress_click, [delta_name, top_sum, custom_name], [cd_output]) 362 | btn_make_reg.click(wrap_gradio_gpu_call(cli_scripts.make_reg.make_reg_images, extra_outputs=[gr.update()]), [data_root, n_images, output_path, template_file, shuffle_tags, placeholder_token], [cd_output, cd_outcome]) 363 | 364 | return [(cd, 'Custom Diffusion Utils', 'cdblock')] 365 | 366 | 367 | on_ui_tabs(ui_tabs_callback) 368 | -------------------------------------------------------------------------------- /cd_modules/custom_diffusion.py: -------------------------------------------------------------------------------- 1 | from modules.textual_inversion.textual_inversion import * 2 | 3 | import os 4 | import sys 5 | import traceback 6 | import datetime 7 | 8 | import torch 9 | import tqdm 10 | import html 11 | import safetensors.torch 12 | 13 | import numpy as np 14 | from PIL import PngImagePlugin 15 | from torch.utils.tensorboard import SummaryWriter 16 | 17 | from modules import shared, devices, sd_hijack, processing, sd_models, images, sd_samplers, sd_hijack_checkpoint, errors 18 | import modules.textual_inversion.dataset 19 | from modules.textual_inversion.learn_schedule import LearnRateScheduler 20 | 21 | from modules.textual_inversion.image_embedding import embedding_to_b64, embedding_from_b64, insert_image_data_embed, extract_image_data_embed, caption_image_overlay 22 | from modules.textual_inversion.logging import save_settings_to_file 23 | 24 | from cd_modules.deltas import Delta 25 | 26 | def train_embedding(*args): 27 | assert not shared.cmd_opts.lowvram, 'Training models with lowvram not possible' 28 | 29 | apply_optimizations = shared.opts.training_xattention_optimizations 30 | try: 31 | if not apply_optimizations: 32 | sd_hijack.undo_optimizations() 33 | 34 | embedding, filename = _train_embedding(*args) 35 | 36 | res = f""" 37 | Training {'interrupted' if shared.state.interrupted else 'finished'} at {embedding.step} steps. 38 | Embedding saved to {html.escape(filename)} 39 | """ 40 | return res, "" 41 | except Exception: 42 | raise 43 | finally: 44 | if not apply_optimizations: 45 | sd_hijack.apply_optimizations() 46 | 47 | def _train_embedding(id_task, embedding_name, learn_rate, batch_size, gradient_step, data_root, reg_root, prior_loss_weight, log_directory, training_width, training_height, varsize, steps, clip_grad_mode, clip_grad_value, shuffle_tags, tag_drop_out, latent_sampling_method, create_image_every, save_embedding_every, template_filename, save_image_with_stored_embedding, preview_from_txt2img, kv_learn_rate, top_sum, preview_prompt, preview_negative_prompt, preview_steps, preview_sampler_index, preview_cfg_scale, preview_seed, preview_width, preview_height): 48 | save_embedding_every = save_embedding_every or 0 49 | create_image_every = create_image_every or 0 50 | template_file = textual_inversion_templates.get(template_filename, None) 51 | validate_train_inputs(embedding_name, learn_rate, batch_size, gradient_step, data_root, template_file, template_filename, steps, save_embedding_every, create_image_every, log_directory, name="embedding") 52 | template_file = template_file.path 53 | 54 | shared.state.job = "train-embedding" 55 | shared.state.textinfo = "Initializing textual inversion training..." 56 | shared.state.job_count = steps 57 | 58 | filename = os.path.join(shared.cmd_opts.embeddings_dir, f'{embedding_name}.pt') 59 | os.makedirs(shared.cmd_opts.deltas_dir, exist_ok=True) 60 | kv_filename = os.path.join(shared.cmd_opts.deltas_dir, f'{embedding_name}.delta.safetensors') 61 | 62 | log_directory = os.path.join(log_directory, datetime.datetime.now().strftime("%Y-%m-%d"), embedding_name) 63 | unload = shared.opts.unload_models_when_training 64 | 65 | if save_embedding_every > 0: 66 | embedding_dir = os.path.join(log_directory, "embeddings") 67 | os.makedirs(embedding_dir, exist_ok=True) 68 | else: 69 | embedding_dir = None 70 | 71 | if create_image_every > 0: 72 | images_dir = os.path.join(log_directory, "images") 73 | os.makedirs(images_dir, exist_ok=True) 74 | else: 75 | images_dir = None 76 | 77 | if create_image_every > 0 and save_image_with_stored_embedding: 78 | images_embeds_dir = os.path.join(log_directory, "image_embeddings") 79 | os.makedirs(images_embeds_dir, exist_ok=True) 80 | else: 81 | images_embeds_dir = None 82 | 83 | hijack = sd_hijack.model_hijack 84 | 85 | embedding = hijack.embedding_db.word_embeddings[embedding_name] 86 | checkpoint = sd_models.select_checkpoint() 87 | 88 | initial_step = embedding.step or 0 89 | if initial_step >= steps: 90 | shared.state.textinfo = "Model has already been trained beyond specified max steps" 91 | return embedding, filename 92 | 93 | scheduler = LearnRateScheduler(learn_rate, steps, initial_step) 94 | kv_scheduler = LearnRateScheduler(kv_learn_rate, steps, initial_step) 95 | clip_grad = torch.nn.utils.clip_grad_value_ if clip_grad_mode == "value" else \ 96 | torch.nn.utils.clip_grad_norm_ if clip_grad_mode == "norm" else \ 97 | None 98 | if clip_grad: 99 | clip_grad_sched = LearnRateScheduler(clip_grad_value, steps, initial_step, verbose=False) 100 | # dataset loading may take a while, so input validations and early returns should be done before this 101 | shared.state.textinfo = f"Preparing dataset from {html.escape(data_root)}..." 102 | old_parallel_processing_allowed = shared.parallel_processing_allowed 103 | 104 | if shared.opts.training_enable_tensorboard: 105 | tensorboard_writer = tensorboard_setup(log_directory) 106 | 107 | pin_memory = shared.opts.pin_memory 108 | 109 | ds = modules.textual_inversion.dataset.PersonalizedBase(data_root=data_root, width=training_width, height=training_height, repeats=shared.opts.training_image_repeats_per_epoch, placeholder_token=embedding_name, model=shared.sd_model, cond_model=shared.sd_model.cond_stage_model, device=devices.device, template_file=template_file, batch_size=batch_size, gradient_step=gradient_step, shuffle_tags=shuffle_tags, tag_drop_out=tag_drop_out, latent_sampling_method=latent_sampling_method, varsize=varsize) 110 | ds_reg = None 111 | if reg_root: 112 | import tempfile 113 | with tempfile.TemporaryDirectory() as d: 114 | with open(os.path.join(d, 'tmp.txt'), 'w') as f: 115 | f.write('[filewords]') 116 | ds_reg = modules.textual_inversion.dataset.PersonalizedBase(data_root=reg_root, width=training_width, height=training_height, repeats=shared.opts.training_image_repeats_per_epoch, placeholder_token='', model=shared.sd_model, cond_model=shared.sd_model.cond_stage_model, device=devices.device, template_file=f.name, batch_size=batch_size, gradient_step=gradient_step, shuffle_tags=shuffle_tags, tag_drop_out=tag_drop_out, latent_sampling_method=latent_sampling_method, varsize=varsize) 117 | 118 | if shared.opts.save_training_settings_to_txt: 119 | save_settings_to_file(log_directory, {**dict(model_name=checkpoint.model_name, model_hash=checkpoint.shorthash, num_of_dataset_images=len(ds), num_vectors_per_token=len(embedding.vec)), **locals()}) 120 | 121 | latent_sampling_method = ds.latent_sampling_method 122 | 123 | dl = modules.textual_inversion.dataset.PersonalizedDataLoader(ds, latent_sampling_method=latent_sampling_method, batch_size=ds.batch_size, pin_memory=pin_memory) 124 | dl_reg = ds_reg and modules.textual_inversion.dataset.PersonalizedDataLoader(ds_reg, latent_sampling_method=latent_sampling_method, batch_size=ds_reg.batch_size, pin_memory=pin_memory) 125 | from itertools import chain, count 126 | reg_batch_iter = dl_reg and chain.from_iterable(dl_reg for _ in count()) 127 | 128 | if unload: 129 | shared.parallel_processing_allowed = False 130 | shared.sd_model.first_stage_model.to(devices.cpu) 131 | 132 | embedding.vec.requires_grad = True 133 | kvs = {n: p for n, p in shared.sd_model.named_parameters() if '2.to_k' in n or '2.to_v' in n} 134 | kvs_bak = {n: p.detach().clone() for n, p in kvs.items()} 135 | 136 | optimizer = torch.optim.AdamW([embedding.vec], lr=scheduler.learn_rate, weight_decay=0.0) 137 | kv_optimzer = torch.optim.AdamW(kvs.values(), lr=kv_scheduler.learn_rate, weight_decay=0.0, eps=1e-5) 138 | if shared.opts.save_optimizer_state: 139 | optimizer_state_dict = None 140 | if os.path.exists(filename + '.optim'): 141 | optimizer_saved_dict = torch.load(filename + '.optim', map_location='cpu') 142 | if embedding.checksum() == optimizer_saved_dict.get('hash', None): 143 | optimizer_state_dict = optimizer_saved_dict.get('optimizer_state_dict', None) 144 | 145 | if optimizer_state_dict is not None: 146 | optimizer.load_state_dict(optimizer_state_dict) 147 | print("Loaded existing optimizer from checkpoint") 148 | else: 149 | print("No saved optimizer exists in checkpoint") 150 | 151 | scaler = torch.cuda.amp.GradScaler() 152 | 153 | # force allow_fp16 because pytorch doesn't like scaling fp16 154 | scaler._unscale_grads_bak = scaler._unscale_grads_ 155 | scaler._unscale_grads_ = (lambda optimizer, inv_scale, found_inf, allow_fp16: 156 | scaler._unscale_grads_bak(optimizer, inv_scale, found_inf, True)) 157 | 158 | batch_size = ds.batch_size 159 | gradient_step = ds.gradient_step 160 | # n steps = batch_size * gradient_step * n image processed 161 | steps_per_epoch = len(ds) // batch_size // gradient_step 162 | max_steps_per_epoch = len(ds) // batch_size - (len(ds) // batch_size) % gradient_step 163 | loss_step = 0 164 | _loss_step = 0 #internal 165 | 166 | last_saved_file = "" 167 | last_saved_delta = "" 168 | last_saved_image = "" 169 | forced_filename = "" 170 | embedding_yet_to_be_embedded = False 171 | 172 | is_training_inpainting_model = shared.sd_model.model.conditioning_key in {'hybrid', 'concat'} 173 | img_c = None 174 | 175 | def save_deltas(path): 176 | delta = Delta(tensors={k: kvs[k] - kvs_bak[k] for k in kvs}) 177 | delta.save(path, *([] if top_sum == 1 else ['delta_factors', top_sum])) 178 | 179 | pbar = tqdm.tqdm(total=steps - initial_step) 180 | try: 181 | for i in range((steps-initial_step) * gradient_step): 182 | if scheduler.finished: 183 | break 184 | if shared.state.interrupted: 185 | break 186 | for j, batch in enumerate(dl): 187 | # works as a drop_last=True for gradient accumulation 188 | if j == max_steps_per_epoch: 189 | break 190 | scheduler.apply(optimizer, embedding.step) 191 | kv_scheduler.apply(kv_optimzer, embedding.step) 192 | if scheduler.finished: 193 | break 194 | if shared.state.interrupted: 195 | break 196 | 197 | if clip_grad: 198 | clip_grad_sched.step(embedding.step) 199 | 200 | with devices.autocast(): 201 | def get_loss(batch): 202 | x = batch.latent_sample.to(devices.device, non_blocking=pin_memory) 203 | c = shared.sd_model.cond_stage_model(batch.cond_text) 204 | 205 | if is_training_inpainting_model: 206 | if img_c is None: 207 | img_c = processing.txt2img_image_conditioning(shared.sd_model, c, training_width, training_height) 208 | 209 | cond = {"c_concat": [img_c], "c_crossattn": [c]} 210 | else: 211 | cond = c 212 | 213 | return shared.sd_model(x, cond)[0] / gradient_step 214 | 215 | loss = get_loss(batch) 216 | if reg_batch_iter: 217 | loss += get_loss(next(reg_batch_iter)) * prior_loss_weight 218 | 219 | _loss_step += loss.item() 220 | scaler.scale(loss).backward() 221 | 222 | # go back until we reach gradient accumulation steps 223 | if (j + 1) % gradient_step != 0: 224 | continue 225 | 226 | if clip_grad: 227 | clip_grad(embedding.vec, clip_grad_sched.learn_rate) 228 | 229 | scaler.step(optimizer) 230 | scaler.step(kv_optimzer) 231 | scaler.update() 232 | embedding.step += 1 233 | pbar.update() 234 | optimizer.zero_grad(set_to_none=True) 235 | kv_optimzer.zero_grad(set_to_none=True) 236 | loss_step = _loss_step 237 | _loss_step = 0 238 | 239 | steps_done = embedding.step + 1 240 | 241 | epoch_num = embedding.step // steps_per_epoch 242 | epoch_step = embedding.step % steps_per_epoch 243 | 244 | description = f"Training textual inversion [Epoch {epoch_num}: {epoch_step+1}/{steps_per_epoch}] loss: {loss_step:.7f}" 245 | pbar.set_description(description) 246 | if embedding_dir is not None and steps_done % save_embedding_every == 0: 247 | # Before saving, change name to match current checkpoint. 248 | embedding_name_every = f'{embedding_name}-{steps_done}' 249 | last_saved_file = os.path.join(embedding_dir, f'{embedding_name_every}.pt') 250 | last_saved_delta = os.path.join(embedding_dir, f'{embedding_name_every}.delta.safetensors') 251 | save_embedding(embedding, optimizer, checkpoint, embedding_name_every, last_saved_file, remove_cached_checksum=True) 252 | embedding_yet_to_be_embedded = True 253 | save_deltas(last_saved_delta) 254 | 255 | write_loss(log_directory, "textual_inversion_loss.csv", embedding.step, steps_per_epoch, { 256 | "loss": f"{loss_step:.7f}", 257 | "learn_rate": scheduler.learn_rate, 258 | "kv_learn_rate": kv_scheduler.learn_rate, 259 | }) 260 | 261 | if images_dir is not None and steps_done % create_image_every == 0: 262 | forced_filename = f'{embedding_name}-{steps_done}' 263 | last_saved_image = os.path.join(images_dir, forced_filename) 264 | 265 | shared.sd_model.first_stage_model.to(devices.device) 266 | 267 | p = processing.StableDiffusionProcessingTxt2Img( 268 | sd_model=shared.sd_model, 269 | do_not_save_grid=True, 270 | do_not_save_samples=True, 271 | do_not_reload_embeddings=True, 272 | ) 273 | 274 | if preview_from_txt2img: 275 | p.prompt = preview_prompt 276 | p.negative_prompt = preview_negative_prompt 277 | p.steps = preview_steps 278 | p.sampler_name = sd_samplers.samplers[preview_sampler_index].name 279 | p.cfg_scale = preview_cfg_scale 280 | p.seed = preview_seed 281 | p.width = preview_width 282 | p.height = preview_height 283 | else: 284 | p.prompt = batch.cond_text[0] 285 | p.steps = 20 286 | p.width = training_width 287 | p.height = training_height 288 | 289 | preview_text = p.prompt 290 | 291 | processed = processing.process_images(p) 292 | image = processed.images[0] if len(processed.images) > 0 else None 293 | 294 | if unload: 295 | shared.sd_model.first_stage_model.to(devices.cpu) 296 | 297 | if image is not None: 298 | shared.state.assign_current_image(image) 299 | 300 | last_saved_image, last_text_info = images.save_image(image, images_dir, "", p.seed, p.prompt, shared.opts.samples_format, processed.infotexts[0], p=p, forced_filename=forced_filename, save_to_dirs=False) 301 | last_saved_image += f", prompt: {preview_text}" 302 | 303 | if shared.opts.training_enable_tensorboard and shared.opts.training_tensorboard_save_images: 304 | tensorboard_add_image(tensorboard_writer, f"Validation at epoch {epoch_num}", image, embedding.step) 305 | 306 | if save_image_with_stored_embedding and os.path.exists(last_saved_file) and embedding_yet_to_be_embedded: 307 | 308 | last_saved_image_chunks = os.path.join(images_embeds_dir, f'{embedding_name}-{steps_done}.png') 309 | 310 | info = PngImagePlugin.PngInfo() 311 | data = torch.load(last_saved_file) 312 | info.add_text("sd-ti-embedding", embedding_to_b64(data)) 313 | 314 | title = "<{}>".format(data.get('name', '???')) 315 | 316 | try: 317 | vectorSize = list(data['string_to_param'].values())[0].shape[0] 318 | except Exception as e: 319 | vectorSize = '?' 320 | 321 | checkpoint = sd_models.select_checkpoint() 322 | footer_left = checkpoint.model_name 323 | footer_mid = '[{}]'.format(checkpoint.shorthash) 324 | footer_right = '{}v {}s'.format(vectorSize, steps_done) 325 | 326 | captioned_image = caption_image_overlay(image, title, footer_left, footer_mid, footer_right) 327 | captioned_image = insert_image_data_embed(captioned_image, data) 328 | 329 | captioned_image.save(last_saved_image_chunks, "PNG", pnginfo=info) 330 | embedding_yet_to_be_embedded = False 331 | 332 | last_saved_image, last_text_info = images.save_image(image, images_dir, "", p.seed, p.prompt, shared.opts.samples_format, processed.infotexts[0], p=p, forced_filename=forced_filename, save_to_dirs=False) 333 | last_saved_image += f", prompt: {preview_text}" 334 | 335 | shared.state.job_no = embedding.step 336 | 337 | shared.state.textinfo = f""" 338 |

339 | Loss: {loss_step:.7f}
340 | Step: {steps_done}
341 | Last prompt: {html.escape(batch.cond_text[0])}
342 | Last saved embedding: {html.escape(last_saved_file)}
343 | Last saved delta weights: {html.escape(last_saved_delta)}
344 | Last saved image: {html.escape(last_saved_image)}
345 |

346 | """ 347 | filename = os.path.join(shared.cmd_opts.embeddings_dir, f'{embedding_name}.pt') 348 | save_embedding(embedding, optimizer, checkpoint, embedding_name, filename, remove_cached_checksum=True) 349 | save_deltas(kv_filename) 350 | except Exception: 351 | print(traceback.format_exc(), file=sys.stderr) 352 | pass 353 | finally: 354 | with torch.no_grad(): 355 | for k, v in kvs.items(): 356 | v[:] = kvs_bak[k] 357 | 358 | pbar.leave = False 359 | pbar.close() 360 | shared.sd_model.first_stage_model.to(devices.device) 361 | shared.parallel_processing_allowed = old_parallel_processing_allowed 362 | 363 | return embedding, filename 364 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU AFFERO GENERAL PUBLIC LICENSE 2 | Version 3, 19 November 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU Affero General Public License is a free, copyleft license for 11 | software and other kinds of works, specifically designed to ensure 12 | cooperation with the community in the case of network server software. 13 | 14 | The licenses for most software and other practical works are designed 15 | to take away your freedom to share and change the works. By contrast, 16 | our General Public Licenses are intended to guarantee your freedom to 17 | share and change all versions of a program--to make sure it remains free 18 | software for all its users. 19 | 20 | When we speak of free software, we are referring to freedom, not 21 | price. Our General Public Licenses are designed to make sure that you 22 | have the freedom to distribute copies of free software (and charge for 23 | them if you wish), that you receive source code or can get it if you 24 | want it, that you can change the software or use pieces of it in new 25 | free programs, and that you know you can do these things. 26 | 27 | Developers that use our General Public Licenses protect your rights 28 | with two steps: (1) assert copyright on the software, and (2) offer 29 | you this License which gives you legal permission to copy, distribute 30 | and/or modify the software. 31 | 32 | A secondary benefit of defending all users' freedom is that 33 | improvements made in alternate versions of the program, if they 34 | receive widespread use, become available for other developers to 35 | incorporate. Many developers of free software are heartened and 36 | encouraged by the resulting cooperation. However, in the case of 37 | software used on network servers, this result may fail to come about. 38 | The GNU General Public License permits making a modified version and 39 | letting the public access it on a server without ever releasing its 40 | source code to the public. 41 | 42 | The GNU Affero General Public License is designed specifically to 43 | ensure that, in such cases, the modified source code becomes available 44 | to the community. It requires the operator of a network server to 45 | provide the source code of the modified version running there to the 46 | users of that server. Therefore, public use of a modified version, on 47 | a publicly accessible server, gives the public access to the source 48 | code of the modified version. 49 | 50 | An older license, called the Affero General Public License and 51 | published by Affero, was designed to accomplish similar goals. This is 52 | a different license, not a version of the Affero GPL, but Affero has 53 | released a new version of the Affero GPL which permits relicensing under 54 | this license. 55 | 56 | The precise terms and conditions for copying, distribution and 57 | modification follow. 58 | 59 | TERMS AND CONDITIONS 60 | 61 | 0. Definitions. 62 | 63 | "This License" refers to version 3 of the GNU Affero General Public License. 64 | 65 | "Copyright" also means copyright-like laws that apply to other kinds of 66 | works, such as semiconductor masks. 67 | 68 | "The Program" refers to any copyrightable work licensed under this 69 | License. Each licensee is addressed as "you". "Licensees" and 70 | "recipients" may be individuals or organizations. 71 | 72 | To "modify" a work means to copy from or adapt all or part of the work 73 | in a fashion requiring copyright permission, other than the making of an 74 | exact copy. The resulting work is called a "modified version" of the 75 | earlier work or a work "based on" the earlier work. 76 | 77 | A "covered work" means either the unmodified Program or a work based 78 | on the Program. 79 | 80 | To "propagate" a work means to do anything with it that, without 81 | permission, would make you directly or secondarily liable for 82 | infringement under applicable copyright law, except executing it on a 83 | computer or modifying a private copy. Propagation includes copying, 84 | distribution (with or without modification), making available to the 85 | public, and in some countries other activities as well. 86 | 87 | To "convey" a work means any kind of propagation that enables other 88 | parties to make or receive copies. Mere interaction with a user through 89 | a computer network, with no transfer of a copy, is not conveying. 90 | 91 | An interactive user interface displays "Appropriate Legal Notices" 92 | to the extent that it includes a convenient and prominently visible 93 | feature that (1) displays an appropriate copyright notice, and (2) 94 | tells the user that there is no warranty for the work (except to the 95 | extent that warranties are provided), that licensees may convey the 96 | work under this License, and how to view a copy of this License. If 97 | the interface presents a list of user commands or options, such as a 98 | menu, a prominent item in the list meets this criterion. 99 | 100 | 1. Source Code. 101 | 102 | The "source code" for a work means the preferred form of the work 103 | for making modifications to it. "Object code" means any non-source 104 | form of a work. 105 | 106 | A "Standard Interface" means an interface that either is an official 107 | standard defined by a recognized standards body, or, in the case of 108 | interfaces specified for a particular programming language, one that 109 | is widely used among developers working in that language. 110 | 111 | The "System Libraries" of an executable work include anything, other 112 | than the work as a whole, that (a) is included in the normal form of 113 | packaging a Major Component, but which is not part of that Major 114 | Component, and (b) serves only to enable use of the work with that 115 | Major Component, or to implement a Standard Interface for which an 116 | implementation is available to the public in source code form. A 117 | "Major Component", in this context, means a major essential component 118 | (kernel, window system, and so on) of the specific operating system 119 | (if any) on which the executable work runs, or a compiler used to 120 | produce the work, or an object code interpreter used to run it. 121 | 122 | The "Corresponding Source" for a work in object code form means all 123 | the source code needed to generate, install, and (for an executable 124 | work) run the object code and to modify the work, including scripts to 125 | control those activities. However, it does not include the work's 126 | System Libraries, or general-purpose tools or generally available free 127 | programs which are used unmodified in performing those activities but 128 | which are not part of the work. For example, Corresponding Source 129 | includes interface definition files associated with source files for 130 | the work, and the source code for shared libraries and dynamically 131 | linked subprograms that the work is specifically designed to require, 132 | such as by intimate data communication or control flow between those 133 | subprograms and other parts of the work. 134 | 135 | The Corresponding Source need not include anything that users 136 | can regenerate automatically from other parts of the Corresponding 137 | Source. 138 | 139 | The Corresponding Source for a work in source code form is that 140 | same work. 141 | 142 | 2. Basic Permissions. 143 | 144 | All rights granted under this License are granted for the term of 145 | copyright on the Program, and are irrevocable provided the stated 146 | conditions are met. This License explicitly affirms your unlimited 147 | permission to run the unmodified Program. The output from running a 148 | covered work is covered by this License only if the output, given its 149 | content, constitutes a covered work. This License acknowledges your 150 | rights of fair use or other equivalent, as provided by copyright law. 151 | 152 | You may make, run and propagate covered works that you do not 153 | convey, without conditions so long as your license otherwise remains 154 | in force. You may convey covered works to others for the sole purpose 155 | of having them make modifications exclusively for you, or provide you 156 | with facilities for running those works, provided that you comply with 157 | the terms of this License in conveying all material for which you do 158 | not control copyright. Those thus making or running the covered works 159 | for you must do so exclusively on your behalf, under your direction 160 | and control, on terms that prohibit them from making any copies of 161 | your copyrighted material outside their relationship with you. 162 | 163 | Conveying under any other circumstances is permitted solely under 164 | the conditions stated below. Sublicensing is not allowed; section 10 165 | makes it unnecessary. 166 | 167 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 168 | 169 | No covered work shall be deemed part of an effective technological 170 | measure under any applicable law fulfilling obligations under article 171 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 172 | similar laws prohibiting or restricting circumvention of such 173 | measures. 174 | 175 | When you convey a covered work, you waive any legal power to forbid 176 | circumvention of technological measures to the extent such circumvention 177 | is effected by exercising rights under this License with respect to 178 | the covered work, and you disclaim any intention to limit operation or 179 | modification of the work as a means of enforcing, against the work's 180 | users, your or third parties' legal rights to forbid circumvention of 181 | technological measures. 182 | 183 | 4. Conveying Verbatim Copies. 184 | 185 | You may convey verbatim copies of the Program's source code as you 186 | receive it, in any medium, provided that you conspicuously and 187 | appropriately publish on each copy an appropriate copyright notice; 188 | keep intact all notices stating that this License and any 189 | non-permissive terms added in accord with section 7 apply to the code; 190 | keep intact all notices of the absence of any warranty; and give all 191 | recipients a copy of this License along with the Program. 192 | 193 | You may charge any price or no price for each copy that you convey, 194 | and you may offer support or warranty protection for a fee. 195 | 196 | 5. Conveying Modified Source Versions. 197 | 198 | You may convey a work based on the Program, or the modifications to 199 | produce it from the Program, in the form of source code under the 200 | terms of section 4, provided that you also meet all of these conditions: 201 | 202 | a) The work must carry prominent notices stating that you modified 203 | it, and giving a relevant date. 204 | 205 | b) The work must carry prominent notices stating that it is 206 | released under this License and any conditions added under section 207 | 7. This requirement modifies the requirement in section 4 to 208 | "keep intact all notices". 209 | 210 | c) You must license the entire work, as a whole, under this 211 | License to anyone who comes into possession of a copy. This 212 | License will therefore apply, along with any applicable section 7 213 | additional terms, to the whole of the work, and all its parts, 214 | regardless of how they are packaged. This License gives no 215 | permission to license the work in any other way, but it does not 216 | invalidate such permission if you have separately received it. 217 | 218 | d) If the work has interactive user interfaces, each must display 219 | Appropriate Legal Notices; however, if the Program has interactive 220 | interfaces that do not display Appropriate Legal Notices, your 221 | work need not make them do so. 222 | 223 | A compilation of a covered work with other separate and independent 224 | works, which are not by their nature extensions of the covered work, 225 | and which are not combined with it such as to form a larger program, 226 | in or on a volume of a storage or distribution medium, is called an 227 | "aggregate" if the compilation and its resulting copyright are not 228 | used to limit the access or legal rights of the compilation's users 229 | beyond what the individual works permit. Inclusion of a covered work 230 | in an aggregate does not cause this License to apply to the other 231 | parts of the aggregate. 232 | 233 | 6. Conveying Non-Source Forms. 234 | 235 | You may convey a covered work in object code form under the terms 236 | of sections 4 and 5, provided that you also convey the 237 | machine-readable Corresponding Source under the terms of this License, 238 | in one of these ways: 239 | 240 | a) Convey the object code in, or embodied in, a physical product 241 | (including a physical distribution medium), accompanied by the 242 | Corresponding Source fixed on a durable physical medium 243 | customarily used for software interchange. 244 | 245 | b) Convey the object code in, or embodied in, a physical product 246 | (including a physical distribution medium), accompanied by a 247 | written offer, valid for at least three years and valid for as 248 | long as you offer spare parts or customer support for that product 249 | model, to give anyone who possesses the object code either (1) a 250 | copy of the Corresponding Source for all the software in the 251 | product that is covered by this License, on a durable physical 252 | medium customarily used for software interchange, for a price no 253 | more than your reasonable cost of physically performing this 254 | conveying of source, or (2) access to copy the 255 | Corresponding Source from a network server at no charge. 256 | 257 | c) Convey individual copies of the object code with a copy of the 258 | written offer to provide the Corresponding Source. This 259 | alternative is allowed only occasionally and noncommercially, and 260 | only if you received the object code with such an offer, in accord 261 | with subsection 6b. 262 | 263 | d) Convey the object code by offering access from a designated 264 | place (gratis or for a charge), and offer equivalent access to the 265 | Corresponding Source in the same way through the same place at no 266 | further charge. You need not require recipients to copy the 267 | Corresponding Source along with the object code. If the place to 268 | copy the object code is a network server, the Corresponding Source 269 | may be on a different server (operated by you or a third party) 270 | that supports equivalent copying facilities, provided you maintain 271 | clear directions next to the object code saying where to find the 272 | Corresponding Source. Regardless of what server hosts the 273 | Corresponding Source, you remain obligated to ensure that it is 274 | available for as long as needed to satisfy these requirements. 275 | 276 | e) Convey the object code using peer-to-peer transmission, provided 277 | you inform other peers where the object code and Corresponding 278 | Source of the work are being offered to the general public at no 279 | charge under subsection 6d. 280 | 281 | A separable portion of the object code, whose source code is excluded 282 | from the Corresponding Source as a System Library, need not be 283 | included in conveying the object code work. 284 | 285 | A "User Product" is either (1) a "consumer product", which means any 286 | tangible personal property which is normally used for personal, family, 287 | or household purposes, or (2) anything designed or sold for incorporation 288 | into a dwelling. In determining whether a product is a consumer product, 289 | doubtful cases shall be resolved in favor of coverage. For a particular 290 | product received by a particular user, "normally used" refers to a 291 | typical or common use of that class of product, regardless of the status 292 | of the particular user or of the way in which the particular user 293 | actually uses, or expects or is expected to use, the product. A product 294 | is a consumer product regardless of whether the product has substantial 295 | commercial, industrial or non-consumer uses, unless such uses represent 296 | the only significant mode of use of the product. 297 | 298 | "Installation Information" for a User Product means any methods, 299 | procedures, authorization keys, or other information required to install 300 | and execute modified versions of a covered work in that User Product from 301 | a modified version of its Corresponding Source. The information must 302 | suffice to ensure that the continued functioning of the modified object 303 | code is in no case prevented or interfered with solely because 304 | modification has been made. 305 | 306 | If you convey an object code work under this section in, or with, or 307 | specifically for use in, a User Product, and the conveying occurs as 308 | part of a transaction in which the right of possession and use of the 309 | User Product is transferred to the recipient in perpetuity or for a 310 | fixed term (regardless of how the transaction is characterized), the 311 | Corresponding Source conveyed under this section must be accompanied 312 | by the Installation Information. But this requirement does not apply 313 | if neither you nor any third party retains the ability to install 314 | modified object code on the User Product (for example, the work has 315 | been installed in ROM). 316 | 317 | The requirement to provide Installation Information does not include a 318 | requirement to continue to provide support service, warranty, or updates 319 | for a work that has been modified or installed by the recipient, or for 320 | the User Product in which it has been modified or installed. Access to a 321 | network may be denied when the modification itself materially and 322 | adversely affects the operation of the network or violates the rules and 323 | protocols for communication across the network. 324 | 325 | Corresponding Source conveyed, and Installation Information provided, 326 | in accord with this section must be in a format that is publicly 327 | documented (and with an implementation available to the public in 328 | source code form), and must require no special password or key for 329 | unpacking, reading or copying. 330 | 331 | 7. Additional Terms. 332 | 333 | "Additional permissions" are terms that supplement the terms of this 334 | License by making exceptions from one or more of its conditions. 335 | Additional permissions that are applicable to the entire Program shall 336 | be treated as though they were included in this License, to the extent 337 | that they are valid under applicable law. If additional permissions 338 | apply only to part of the Program, that part may be used separately 339 | under those permissions, but the entire Program remains governed by 340 | this License without regard to the additional permissions. 341 | 342 | When you convey a copy of a covered work, you may at your option 343 | remove any additional permissions from that copy, or from any part of 344 | it. (Additional permissions may be written to require their own 345 | removal in certain cases when you modify the work.) You may place 346 | additional permissions on material, added by you to a covered work, 347 | for which you have or can give appropriate copyright permission. 348 | 349 | Notwithstanding any other provision of this License, for material you 350 | add to a covered work, you may (if authorized by the copyright holders of 351 | that material) supplement the terms of this License with terms: 352 | 353 | a) Disclaiming warranty or limiting liability differently from the 354 | terms of sections 15 and 16 of this License; or 355 | 356 | b) Requiring preservation of specified reasonable legal notices or 357 | author attributions in that material or in the Appropriate Legal 358 | Notices displayed by works containing it; or 359 | 360 | c) Prohibiting misrepresentation of the origin of that material, or 361 | requiring that modified versions of such material be marked in 362 | reasonable ways as different from the original version; or 363 | 364 | d) Limiting the use for publicity purposes of names of licensors or 365 | authors of the material; or 366 | 367 | e) Declining to grant rights under trademark law for use of some 368 | trade names, trademarks, or service marks; or 369 | 370 | f) Requiring indemnification of licensors and authors of that 371 | material by anyone who conveys the material (or modified versions of 372 | it) with contractual assumptions of liability to the recipient, for 373 | any liability that these contractual assumptions directly impose on 374 | those licensors and authors. 375 | 376 | All other non-permissive additional terms are considered "further 377 | restrictions" within the meaning of section 10. If the Program as you 378 | received it, or any part of it, contains a notice stating that it is 379 | governed by this License along with a term that is a further 380 | restriction, you may remove that term. If a license document contains 381 | a further restriction but permits relicensing or conveying under this 382 | License, you may add to a covered work material governed by the terms 383 | of that license document, provided that the further restriction does 384 | not survive such relicensing or conveying. 385 | 386 | If you add terms to a covered work in accord with this section, you 387 | must place, in the relevant source files, a statement of the 388 | additional terms that apply to those files, or a notice indicating 389 | where to find the applicable terms. 390 | 391 | Additional terms, permissive or non-permissive, may be stated in the 392 | form of a separately written license, or stated as exceptions; 393 | the above requirements apply either way. 394 | 395 | 8. Termination. 396 | 397 | You may not propagate or modify a covered work except as expressly 398 | provided under this License. Any attempt otherwise to propagate or 399 | modify it is void, and will automatically terminate your rights under 400 | this License (including any patent licenses granted under the third 401 | paragraph of section 11). 402 | 403 | However, if you cease all violation of this License, then your 404 | license from a particular copyright holder is reinstated (a) 405 | provisionally, unless and until the copyright holder explicitly and 406 | finally terminates your license, and (b) permanently, if the copyright 407 | holder fails to notify you of the violation by some reasonable means 408 | prior to 60 days after the cessation. 409 | 410 | Moreover, your license from a particular copyright holder is 411 | reinstated permanently if the copyright holder notifies you of the 412 | violation by some reasonable means, this is the first time you have 413 | received notice of violation of this License (for any work) from that 414 | copyright holder, and you cure the violation prior to 30 days after 415 | your receipt of the notice. 416 | 417 | Termination of your rights under this section does not terminate the 418 | licenses of parties who have received copies or rights from you under 419 | this License. If your rights have been terminated and not permanently 420 | reinstated, you do not qualify to receive new licenses for the same 421 | material under section 10. 422 | 423 | 9. Acceptance Not Required for Having Copies. 424 | 425 | You are not required to accept this License in order to receive or 426 | run a copy of the Program. Ancillary propagation of a covered work 427 | occurring solely as a consequence of using peer-to-peer transmission 428 | to receive a copy likewise does not require acceptance. However, 429 | nothing other than this License grants you permission to propagate or 430 | modify any covered work. These actions infringe copyright if you do 431 | not accept this License. Therefore, by modifying or propagating a 432 | covered work, you indicate your acceptance of this License to do so. 433 | 434 | 10. Automatic Licensing of Downstream Recipients. 435 | 436 | Each time you convey a covered work, the recipient automatically 437 | receives a license from the original licensors, to run, modify and 438 | propagate that work, subject to this License. You are not responsible 439 | for enforcing compliance by third parties with this License. 440 | 441 | An "entity transaction" is a transaction transferring control of an 442 | organization, or substantially all assets of one, or subdividing an 443 | organization, or merging organizations. If propagation of a covered 444 | work results from an entity transaction, each party to that 445 | transaction who receives a copy of the work also receives whatever 446 | licenses to the work the party's predecessor in interest had or could 447 | give under the previous paragraph, plus a right to possession of the 448 | Corresponding Source of the work from the predecessor in interest, if 449 | the predecessor has it or can get it with reasonable efforts. 450 | 451 | You may not impose any further restrictions on the exercise of the 452 | rights granted or affirmed under this License. For example, you may 453 | not impose a license fee, royalty, or other charge for exercise of 454 | rights granted under this License, and you may not initiate litigation 455 | (including a cross-claim or counterclaim in a lawsuit) alleging that 456 | any patent claim is infringed by making, using, selling, offering for 457 | sale, or importing the Program or any portion of it. 458 | 459 | 11. Patents. 460 | 461 | A "contributor" is a copyright holder who authorizes use under this 462 | License of the Program or a work on which the Program is based. The 463 | work thus licensed is called the contributor's "contributor version". 464 | 465 | A contributor's "essential patent claims" are all patent claims 466 | owned or controlled by the contributor, whether already acquired or 467 | hereafter acquired, that would be infringed by some manner, permitted 468 | by this License, of making, using, or selling its contributor version, 469 | but do not include claims that would be infringed only as a 470 | consequence of further modification of the contributor version. For 471 | purposes of this definition, "control" includes the right to grant 472 | patent sublicenses in a manner consistent with the requirements of 473 | this License. 474 | 475 | Each contributor grants you a non-exclusive, worldwide, royalty-free 476 | patent license under the contributor's essential patent claims, to 477 | make, use, sell, offer for sale, import and otherwise run, modify and 478 | propagate the contents of its contributor version. 479 | 480 | In the following three paragraphs, a "patent license" is any express 481 | agreement or commitment, however denominated, not to enforce a patent 482 | (such as an express permission to practice a patent or covenant not to 483 | sue for patent infringement). To "grant" such a patent license to a 484 | party means to make such an agreement or commitment not to enforce a 485 | patent against the party. 486 | 487 | If you convey a covered work, knowingly relying on a patent license, 488 | and the Corresponding Source of the work is not available for anyone 489 | to copy, free of charge and under the terms of this License, through a 490 | publicly available network server or other readily accessible means, 491 | then you must either (1) cause the Corresponding Source to be so 492 | available, or (2) arrange to deprive yourself of the benefit of the 493 | patent license for this particular work, or (3) arrange, in a manner 494 | consistent with the requirements of this License, to extend the patent 495 | license to downstream recipients. "Knowingly relying" means you have 496 | actual knowledge that, but for the patent license, your conveying the 497 | covered work in a country, or your recipient's use of the covered work 498 | in a country, would infringe one or more identifiable patents in that 499 | country that you have reason to believe are valid. 500 | 501 | If, pursuant to or in connection with a single transaction or 502 | arrangement, you convey, or propagate by procuring conveyance of, a 503 | covered work, and grant a patent license to some of the parties 504 | receiving the covered work authorizing them to use, propagate, modify 505 | or convey a specific copy of the covered work, then the patent license 506 | you grant is automatically extended to all recipients of the covered 507 | work and works based on it. 508 | 509 | A patent license is "discriminatory" if it does not include within 510 | the scope of its coverage, prohibits the exercise of, or is 511 | conditioned on the non-exercise of one or more of the rights that are 512 | specifically granted under this License. You may not convey a covered 513 | work if you are a party to an arrangement with a third party that is 514 | in the business of distributing software, under which you make payment 515 | to the third party based on the extent of your activity of conveying 516 | the work, and under which the third party grants, to any of the 517 | parties who would receive the covered work from you, a discriminatory 518 | patent license (a) in connection with copies of the covered work 519 | conveyed by you (or copies made from those copies), or (b) primarily 520 | for and in connection with specific products or compilations that 521 | contain the covered work, unless you entered into that arrangement, 522 | or that patent license was granted, prior to 28 March 2007. 523 | 524 | Nothing in this License shall be construed as excluding or limiting 525 | any implied license or other defenses to infringement that may 526 | otherwise be available to you under applicable patent law. 527 | 528 | 12. No Surrender of Others' Freedom. 529 | 530 | If conditions are imposed on you (whether by court order, agreement or 531 | otherwise) that contradict the conditions of this License, they do not 532 | excuse you from the conditions of this License. If you cannot convey a 533 | covered work so as to satisfy simultaneously your obligations under this 534 | License and any other pertinent obligations, then as a consequence you may 535 | not convey it at all. For example, if you agree to terms that obligate you 536 | to collect a royalty for further conveying from those to whom you convey 537 | the Program, the only way you could satisfy both those terms and this 538 | License would be to refrain entirely from conveying the Program. 539 | 540 | 13. Remote Network Interaction; Use with the GNU General Public License. 541 | 542 | Notwithstanding any other provision of this License, if you modify the 543 | Program, your modified version must prominently offer all users 544 | interacting with it remotely through a computer network (if your version 545 | supports such interaction) an opportunity to receive the Corresponding 546 | Source of your version by providing access to the Corresponding Source 547 | from a network server at no charge, through some standard or customary 548 | means of facilitating copying of software. This Corresponding Source 549 | shall include the Corresponding Source for any work covered by version 3 550 | of the GNU General Public License that is incorporated pursuant to the 551 | following paragraph. 552 | 553 | Notwithstanding any other provision of this License, you have 554 | permission to link or combine any covered work with a work licensed 555 | under version 3 of the GNU General Public License into a single 556 | combined work, and to convey the resulting work. The terms of this 557 | License will continue to apply to the part which is the covered work, 558 | but the work with which it is combined will remain governed by version 559 | 3 of the GNU General Public License. 560 | 561 | 14. Revised Versions of this License. 562 | 563 | The Free Software Foundation may publish revised and/or new versions of 564 | the GNU Affero General Public License from time to time. Such new versions 565 | will be similar in spirit to the present version, but may differ in detail to 566 | address new problems or concerns. 567 | 568 | Each version is given a distinguishing version number. If the 569 | Program specifies that a certain numbered version of the GNU Affero General 570 | Public License "or any later version" applies to it, you have the 571 | option of following the terms and conditions either of that numbered 572 | version or of any later version published by the Free Software 573 | Foundation. If the Program does not specify a version number of the 574 | GNU Affero General Public License, you may choose any version ever published 575 | by the Free Software Foundation. 576 | 577 | If the Program specifies that a proxy can decide which future 578 | versions of the GNU Affero General Public License can be used, that proxy's 579 | public statement of acceptance of a version permanently authorizes you 580 | to choose that version for the Program. 581 | 582 | Later license versions may give you additional or different 583 | permissions. However, no additional obligations are imposed on any 584 | author or copyright holder as a result of your choosing to follow a 585 | later version. 586 | 587 | 15. Disclaimer of Warranty. 588 | 589 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 590 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 591 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 592 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 593 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 594 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 595 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 596 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 597 | 598 | 16. Limitation of Liability. 599 | 600 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 601 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 602 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 603 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 604 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 605 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 606 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 607 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 608 | SUCH DAMAGES. 609 | 610 | 17. Interpretation of Sections 15 and 16. 611 | 612 | If the disclaimer of warranty and limitation of liability provided 613 | above cannot be given local legal effect according to their terms, 614 | reviewing courts shall apply local law that most closely approximates 615 | an absolute waiver of all civil liability in connection with the 616 | Program, unless a warranty or assumption of liability accompanies a 617 | copy of the Program in return for a fee. 618 | 619 | END OF TERMS AND CONDITIONS 620 | 621 | How to Apply These Terms to Your New Programs 622 | 623 | If you develop a new program, and you want it to be of the greatest 624 | possible use to the public, the best way to achieve this is to make it 625 | free software which everyone can redistribute and change under these terms. 626 | 627 | To do so, attach the following notices to the program. It is safest 628 | to attach them to the start of each source file to most effectively 629 | state the exclusion of warranty; and each file should have at least 630 | the "copyright" line and a pointer to where the full notice is found. 631 | 632 | 633 | Copyright (C) 634 | 635 | This program is free software: you can redistribute it and/or modify 636 | it under the terms of the GNU Affero General Public License as published 637 | by the Free Software Foundation, either version 3 of the License, or 638 | (at your option) any later version. 639 | 640 | This program is distributed in the hope that it will be useful, 641 | but WITHOUT ANY WARRANTY; without even the implied warranty of 642 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 643 | GNU Affero General Public License for more details. 644 | 645 | You should have received a copy of the GNU Affero General Public License 646 | along with this program. If not, see . 647 | 648 | Also add information on how to contact you by electronic and paper mail. 649 | 650 | If your software can interact with users remotely through a computer 651 | network, you should also make sure that it provides a way for users to 652 | get its source. For example, if your program is a web application, its 653 | interface could display a "Source" link that leads users to an archive 654 | of the code. There are many ways you could offer source, and different 655 | solutions will be better for different programs; see section 13 for the 656 | specific requirements. 657 | 658 | You should also get your employer (if you work as a programmer) or school, 659 | if any, to sign a "copyright disclaimer" for the program, if necessary. 660 | For more information on this, and how to apply and follow the GNU AGPL, see 661 | . 662 | --------------------------------------------------------------------------------