├── requirements.txt ├── test ├── zero_len_file.safetensors ├── header_size_too_big.safetensors └── duplicate_keys_in_header.safetensors ├── .gitignore ├── safetensors_tags.py ├── safetensors_object.py ├── safetensors_cf.py ├── readme.md ├── safetensors_file.py ├── safetensors_util.py ├── safetensors_worker.py └── lora_keys_sd15.py /requirements.txt: -------------------------------------------------------------------------------- 1 | click==8.1.3 2 | -------------------------------------------------------------------------------- /test/zero_len_file.safetensors: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | **/__pycache__/ 2 | /.venv/ 3 | -------------------------------------------------------------------------------- /test/header_size_too_big.safetensors: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/by321/safetensors_util/HEAD/test/header_size_too_big.safetensors -------------------------------------------------------------------------------- /test/duplicate_keys_in_header.safetensors: -------------------------------------------------------------------------------- 1 | `{"key01":{"dtype":"U8","shape":[2,3],"data_offsets":[0,6]},"key01":{"dtype":"U8","shape":[1],"data_offsets":[6,7]},"key01":{"dtype":"U8","shape":[2,1],"data_offsets":[7,9]},"key03":{"dtype":"U8","shape":[1,3],"data_offsets":[9,12]},"key04":{"dtype":"U8","shape":[1],"data_offsets":[12,13]},"key03":{"dtype":"U8","shape":[1],"data_offsets":[13,14]}} -------------------------------------------------------------------------------- /safetensors_tags.py: -------------------------------------------------------------------------------- 1 | import json, sys 2 | from safetensors_file import SafeTensorsFile 3 | from safetensors_worker import _ParseMore 4 | """ 5 | This script extracts the JSON header's ss_tag_frequency from a safetensors file, then outputs it. 6 | 7 | TODO: gracefully error out on safetensors files without ["__metadata__"]["ss_tag_frequency"] 8 | """ 9 | 10 | def get_tags(tensorsfile: str) -> str: 11 | s = SafeTensorsFile.open_file(tensorsfile, quiet=True) # omit the first non-JSON line 12 | js = s.get_header() 13 | md = js["__metadata__"] 14 | _ParseMore(md) # pretty print the metadata 15 | stf = md["ss_tag_frequency"] 16 | return json.dumps(stf, ensure_ascii=False, separators=(', ', ': '), indent=4) 17 | 18 | tensorsfile = sys.argv[1] 19 | hdata = get_tags(tensorsfile) 20 | print(hdata) 21 | -------------------------------------------------------------------------------- /safetensors_object.py: -------------------------------------------------------------------------------- 1 | import json, sys 2 | from safetensors_file import SafeTensorsFile 3 | from safetensors_worker import _ParseMore 4 | 5 | """ 6 | $ python3 safetensors_m.py ss_network_module /path/to/file/summer_dress.safetensors 7 | "networks.lora" 8 | 9 | $ python3 safetensors_object.py nonexistent_module /path/to/file/summer_dress.safetensors 10 | Error: Metadata does not contain a `nonexistent_module` item, did you spell it right? 11 | 12 | $ python3 safetensors_object.py ss_network_module /path/to/file/Joanne.safetensors 13 | Error: File is embedding/textual inversion, not a LoRA/Lycoris training set 14 | 15 | $ python3 safetensors_object.py ss_network_module /path/to/file/weird_file.safetensors 16 | Error: File header does not contain a `__metadata__` item 17 | 18 | $ python3 safetensors_m.py ss_tag_frequency /path/to/file/trina.safetensors 19 | { 20 | "6_trina": { 21 | "trina": 26, 22 | " black hair": 20, 23 | " hands on hips": 1, 24 | " looking at viewer": 7 25 | } 26 | } 27 | 28 | """ 29 | 30 | def get_object(tensorsfile: str) -> str: 31 | s = SafeTensorsFile.open_file(tensorsfile, quiet=True) 32 | js = s.get_header() 33 | s.close_file() 34 | 35 | if "emp_params" in js: 36 | return "Error: File is embedding/textual inversion, not a LoRA/Lycoris training set" 37 | if "__metadata__" not in js: 38 | return "Error: File header does not contain a `__metadata__` item" 39 | md = js["__metadata__"] 40 | if md_object not in md: 41 | return f'Error: Metadata does not contain a `{md_object}` item, did you spell it right?' 42 | _ParseMore(md) # pretty print the metadata 43 | stf = md[md_object] 44 | return json.dumps(stf, ensure_ascii=False, separators=(', ', ': '), indent=4) 45 | 46 | md_object = sys.argv[1] 47 | tensorsfile = sys.argv[2] 48 | hdata = get_object(tensorsfile) 49 | 50 | print(hdata) 51 | -------------------------------------------------------------------------------- /safetensors_cf.py: -------------------------------------------------------------------------------- 1 | import os, sys, json, copy 2 | from safetensors_file import SafeTensorsFile 3 | import safetensors_worker 4 | import numpy as np 5 | 6 | def adjust_new_header(keys:list[str],newhdr:dict)->int: 7 | cur_offset=0; savedbytes=0 8 | for k in keys: 9 | v=newhdr[k] 10 | dl=v['data_offsets'][1]-v['data_offsets'][0] 11 | if v['dtype']=='F32': 12 | dl=dl//2; savedbytes+=dl 13 | v['dtype']='F16' 14 | elif v['dtype']=='F64': 15 | dl=dl//4; savedbytes+=dl*3 16 | v['dtype']='F16' 17 | v['data_offsets'][0]=cur_offset 18 | cur_offset+=dl 19 | v['data_offsets'][1]=cur_offset 20 | newhdr[k]=v 21 | return savedbytes 22 | 23 | def convert_to_float16_clamped(float_array): 24 | f16_info = np.finfo(np.float16) 25 | result = float_array.astype(np.float16) 26 | result = np.where(np.isposinf(result), f16_info.max, result) 27 | result = np.where(np.isneginf(result), f16_info.min, result) 28 | return result 29 | 30 | def CompactFloat(cmdLine:dict,input_file:str,output_file:str)->int: 31 | if safetensors_worker._need_force_overwrite(output_file,cmdLine): return -1 32 | 33 | s=SafeTensorsFile.open_file(input_file,cmdLine['quiet']) 34 | if s.error!=0: return s.error 35 | 36 | hdr=s.get_header() 37 | newhdr=copy.deepcopy(hdr) #create a new header 38 | if "__metadata__" in hdr: 39 | newhdr["__metadata__"]=hdr["__metadata__"] 40 | hdr.pop("__metadata__") 41 | 42 | keys=list(hdr.keys()) 43 | keys.sort(key=lambda x:hdr[x]['data_offsets'][0]) #sort keys by starting offset 44 | tensorsaves=adjust_new_header(keys,newhdr) 45 | 46 | #for k in keys: 47 | # if hdr[k]['dtype']=='F32' or hdr[k]['dtype']=='F64': 48 | # print("---",hdr[k],'->',newhdr[k]) 49 | #print(newhdr) 50 | print("size reduction from converting F32 and F64 to F16:",tensorsaves) 51 | 52 | newhdrbuf=json.dumps(newhdr,separators=(',',':'),ensure_ascii=False).encode('utf-8') 53 | newhdrlen:int=int(len(newhdrbuf)) 54 | pad:int=((newhdrlen+7)&(~7))-newhdrlen #pad to multiple of 8 55 | 56 | with open(output_file,"wb") as fo: 57 | fo.write(int(newhdrlen+pad).to_bytes(8,'little')) 58 | fo.write(newhdrbuf) 59 | if pad>0: fo.write(bytearray([32]*pad)) 60 | for k in keys: 61 | buf=s.load_one_tensor(k) 62 | #print(hdr[k],len(buf)) 63 | if hdr[k]['dtype']=='F32' or hdr[k]['dtype']=='F64': 64 | intype=np.float32 if hdr[k]['dtype']=='F32' else np.float64 65 | data=np.frombuffer(buf,dtype=intype) 66 | df16=convert_to_float16_clamped(data) 67 | df16.tofile(fo) 68 | else: 69 | fo.write(buf) 70 | print(f"final file size: {fo.tell()} vs {s.st.st_size}, dif={s.st.st_size-fo.tell()}") 71 | 72 | return 0 73 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | ### Features 2 | 3 | This is a light-weight utility program for [safetensors files](https://github.com/huggingface/safetensors "safetensors files") written in Python only (no major external dependencies). Currently it can do the following: 4 | 5 | Usage: safetensors_util.py [OPTIONS] COMMAND [ARGS]... 6 | 7 | Options: 8 | --version Show the version and exit. 9 | -q, --quiet quiet mode, don't print informational stuff 10 | --help Show this message and exit. 11 | 12 | Commands: 13 | cf compact F32 and F64 tensors to F16 14 | checkhdr check header for possible errors 15 | checklora see if input file is a SD 1.x LoRA file 16 | extractdata extract one tensor and save to file 17 | extracthdr extract file header and save to file 18 | header print file header 19 | listkeys print header key names (except __metadata__) as a Python list 20 | metadata print only __metadata__ in file header 21 | writemd read __metadata__ from json and write to safetensors file 22 | 23 | 24 | The most useful thing is probably the read and write metadata commands. To read metadata: 25 | 26 | python safetensors_util.py metadata input_file.safetensors -pm 27 | 28 | Many safetensors files, for example LoRA files, have a \_\_metadata\_\_ field that records metadata such as learning rates during training, number of epochs, number of images used, etc. 29 | 30 | The optional **-pm** flag is meant to make \_\_metadata\_\_ more readable. Because safetensors files allow only string-to-string key-value pairs in metadata, non-string values must be quoted, for example: 31 | 32 | "ss_dataset_dirs":"{\"abc\": {\"n_repeats\": 2, \"img_count\": 60}}", 33 | 34 | The **-pm** flag tries to turn the above into this: 35 | 36 | "ss_dataset_dirs" : { 37 | "abc":{ 38 | "n_repeats":2, 39 | "img_count":60 40 | } 41 | } 42 | 43 | You can also create a JSON file containing a \_\_metadata\_\_ entry: 44 | 45 | { 46 | "__metadata__":{ 47 | "Description": "Stable Diffusion 1.5 LoRA trained on cat pictures", 48 | "Trigger Words":["cat from hell","killer kitten"], 49 | "Base Model": "Stable Diffusion 1.5", 50 | "Training Info": { 51 | "trainer": "modified Kohya SS", 52 | "resolution":[512,512], 53 | "lr":1e-6, 54 | "text_lr":1e-6, 55 | "schedule": "linear", 56 | "text_scheduler": "linear", 57 | "clip_skip": 0, 58 | "regularization_images": "none" 59 | }, 60 | "ss_network_alpha":16, 61 | "ss_network_dim":16 62 | } 63 | } 64 | 65 | and write it to a safetensors file header using the **writemd** command: 66 | 67 | python safetensors_util.py writemd input.safetensors input.json output.safetensors 68 | -------------------------------------------------------------------------------- /safetensors_file.py: -------------------------------------------------------------------------------- 1 | import os, sys, json 2 | 3 | class SafeTensorsException(Exception): 4 | def __init__(self, msg:str): 5 | self.msg=msg 6 | super().__init__(msg) 7 | 8 | @staticmethod 9 | def invalid_file(filename:str,whatiswrong:str): 10 | s=f"{filename} is not a valid .safetensors file: {whatiswrong}" 11 | return SafeTensorsException(msg=s) 12 | 13 | def __str__(self): 14 | return self.msg 15 | 16 | class SafeTensorsChunk: 17 | def __init__(self,name:str,dtype:str,shape:list[int],offset0:int,offset1:int): 18 | self.name=name 19 | self.dtype=dtype 20 | self.shape=shape 21 | self.offset0=offset0 22 | self.offset1=offset1 23 | 24 | class SafeTensorsFile: 25 | def __init__(self): 26 | self.f=None #file handle 27 | self.hdrbuf=None #header byte buffer 28 | self.header=None #parsed header as a dict 29 | self.error=0 30 | 31 | def __del__(self): 32 | self.close_file() 33 | 34 | def __enter__(self): 35 | return self 36 | 37 | def __exit__(self, exc_type, exc_value, traceback): 38 | self.close_file() 39 | 40 | def close_file(self): 41 | if self.f is not None: 42 | self.f.close() 43 | self.f=None 44 | self.filename="" 45 | 46 | #test file: duplicate_keys_in_header.safetensors 47 | def _CheckDuplicateHeaderKeys(self): 48 | def parse_object_pairs(pairs): 49 | return [k for k,_ in pairs] 50 | 51 | keys=json.loads(self.hdrbuf,object_pairs_hook=parse_object_pairs) 52 | #print(keys) 53 | d={} 54 | for k in keys: 55 | if k in d: d[k]=d[k]+1 56 | else: d[k]=1 57 | hasError=False 58 | for k,v in d.items(): 59 | if v>1: 60 | print(f"key {k} used {v} times in header",file=sys.stderr) 61 | hasError=True 62 | if hasError: 63 | raise SafeTensorsException.invalid_file(self.filename,"duplicate keys in header") 64 | 65 | @staticmethod 66 | def open_file(filename:str,quiet=False,parseHeader=True): 67 | s=SafeTensorsFile() 68 | s.open(filename,quiet,parseHeader) 69 | return s 70 | 71 | def open(self,fn:str,quiet=False,parseHeader=True)->int: 72 | st=os.stat(fn) 73 | if st.st_size<8: #test file: zero_len_file.safetensors 74 | raise SafeTensorsException.invalid_file(fn,"length less than 8 bytes") 75 | 76 | f=open(fn,"rb") 77 | b8=f.read(8) #read header size 78 | if len(b8)!=8: 79 | raise SafeTensorsException.invalid_file(fn,f"read only {len(b8)} bytes at start of file") 80 | headerlen=int.from_bytes(b8,'little',signed=False) 81 | 82 | if (8+headerlen>st.st_size): #test file: header_size_too_big.safetensors 83 | raise SafeTensorsException.invalid_file(fn,"header extends past end of file") 84 | 85 | if quiet==False: 86 | print(f"{fn}: length={st.st_size}, header length={headerlen}") 87 | hdrbuf=f.read(headerlen) 88 | if len(hdrbuf)!=headerlen: 89 | raise SafeTensorsException.invalid_file(fn,f"header size is {headerlen}, but read {len(hdrbuf)} bytes") 90 | self.filename=fn 91 | self.f=f 92 | self.st=st 93 | self.hdrbuf=hdrbuf 94 | self.error=0 95 | self.headerlen=headerlen 96 | if parseHeader==True: 97 | self._CheckDuplicateHeaderKeys() 98 | self.header=json.loads(self.hdrbuf) 99 | return 0 100 | 101 | def get_header(self): 102 | return self.header 103 | 104 | def load_one_tensor(self,tensor_name:str): 105 | self.get_header() 106 | if tensor_name not in self.header: return None 107 | 108 | t=self.header[tensor_name] 109 | self.f.seek(8+self.headerlen+t['data_offsets'][0]) 110 | bytesToRead=t['data_offsets'][1]-t['data_offsets'][0] 111 | bytes=self.f.read(bytesToRead) 112 | if len(bytes)!=bytesToRead: 113 | print(f"{tensor_name}: length={bytesToRead}, only read {len(bytes)} bytes",file=sys.stderr) 114 | return bytes 115 | 116 | def copy_data_to_file(self,file_handle) -> int: 117 | 118 | self.f.seek(8+self.headerlen) 119 | bytesLeft:int=self.st.st_size - 8 - self.headerlen 120 | while bytesLeft>0: 121 | chunklen:int=min(bytesLeft,int(16*1024*1024)) #copy in blocks of 16 MB 122 | file_handle.write(self.f.read(chunklen)) 123 | bytesLeft-=chunklen 124 | 125 | return 0 126 | -------------------------------------------------------------------------------- /safetensors_util.py: -------------------------------------------------------------------------------- 1 | import sys, click 2 | 3 | import safetensors_worker 4 | # This file deals with command line only. If the command line is parsed successfully, 5 | # we will call one of the functions in safetensors_worker.py. 6 | 7 | readonly_input_file=click.argument("input_file", metavar='input_file', 8 | type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True)) 9 | output_file=click.argument("output_file", metavar='output_file', 10 | type=click.Path(file_okay=True, dir_okay=False, writable=True)) 11 | 12 | force_overwrite_flag=click.option("-f","--force-overwrite",default=False,is_flag=True, show_default=True, 13 | help="overwrite existing files") 14 | fix_ued_flag=click.option("-pm","--parse-more",default=False,is_flag=True, show_default=True, 15 | help="when printing metadata, unescaped doublequotes to make text more readable" ) 16 | quiet_flag=click.option("-q","--quiet",default=False,is_flag=True, show_default=True, 17 | help="Quiet mode, don't print informational stuff" ) 18 | 19 | @click.group() 20 | @click.version_option(version=8) 21 | @quiet_flag 22 | 23 | @click.pass_context 24 | def cli(ctx,quiet:bool): 25 | # ensure that ctx.obj exists and is a dict (in case `cli()` is called 26 | # by means other than the `if` block below) 27 | ctx.ensure_object(dict) 28 | ctx.obj['quiet'] = quiet 29 | 30 | 31 | @cli.command(name="header",short_help="print file header") 32 | @readonly_input_file 33 | @click.pass_context 34 | def cmd_header(ctx,input_file:str) -> int: 35 | sys.exit( safetensors_worker.PrintHeader(ctx.obj,input_file) ) 36 | 37 | 38 | @cli.command(name="metadata",short_help="print only __metadata__ in file header") 39 | @readonly_input_file 40 | @fix_ued_flag 41 | @click.pass_context 42 | def cmd_meta(ctx,input_file:str,parse_more:bool)->int: 43 | ctx.obj['parse_more'] = parse_more 44 | sys.exit( safetensors_worker.PrintMetadata(ctx.obj,input_file) ) 45 | 46 | 47 | @cli.command(name="listkeys",short_help="print header key names (except __metadata__) as a Python list") 48 | @readonly_input_file 49 | @click.pass_context 50 | def cmd_keyspy(ctx,input_file:str) -> int: 51 | sys.exit( safetensors_worker.HeaderKeysToLists(ctx.obj,input_file) ) 52 | 53 | 54 | @cli.command(name="writemd",short_help="read __metadata__ from json and write to safetensors file") 55 | @click.argument("in_st_file", metavar='input_st_file', 56 | type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True)) 57 | @click.argument("in_json_file", metavar='input_json_file', 58 | type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True)) 59 | @output_file 60 | @force_overwrite_flag 61 | @click.pass_context 62 | def cmd_writemd(ctx,in_st_file:str,in_json_file:str,output_file:str,force_overwrite:bool) -> int: 63 | """Read "__metadata__" from json file and write to safetensors header""" 64 | ctx.obj['force_overwrite'] = force_overwrite 65 | sys.exit( safetensors_worker.WriteMetadataToHeader(ctx.obj,in_st_file,in_json_file,output_file) ) 66 | 67 | 68 | @cli.command(name="extracthdr",short_help="extract file header and save to file") 69 | @readonly_input_file 70 | @output_file 71 | @force_overwrite_flag 72 | @click.pass_context 73 | def cmd_extractheader(ctx,input_file:str,output_file:str,force_overwrite:bool) -> int: 74 | ctx.obj['force_overwrite'] = force_overwrite 75 | sys.exit( safetensors_worker.ExtractHeader(ctx.obj,input_file,output_file) ) 76 | 77 | 78 | @cli.command(name="extractdata",short_help="extract one tensor and save to file") 79 | @readonly_input_file 80 | @click.argument("key_name", metavar='key_name',type=click.STRING) 81 | @output_file 82 | @force_overwrite_flag 83 | @click.pass_context 84 | def cmd_extractheader(ctx,input_file:str,key_name:str,output_file:str,force_overwrite:bool) -> int: 85 | ctx.obj['force_overwrite'] = force_overwrite 86 | sys.exit( safetensors_worker.ExtractData(ctx.obj,input_file,key_name,output_file) ) 87 | 88 | 89 | @cli.command(name="checklora",short_help="see if input file is a SD 1.x LoRA file") 90 | @readonly_input_file 91 | @click.pass_context 92 | def cmd_checklora(ctx,input_file:str)->int: 93 | sys.exit( safetensors_worker.CheckLoRA(ctx.obj,input_file) ) 94 | 95 | 96 | @cli.command(name="cf",short_help="compact F32 and F64 tensors to F16") 97 | @readonly_input_file 98 | @output_file 99 | @force_overwrite_flag 100 | @click.pass_context 101 | def cmd_compactfloat(ctx,input_file:str,output_file:str,force_overwrite:bool) -> int: 102 | import safetensors_cf 103 | ctx.obj['force_overwrite'] = force_overwrite 104 | sys.exit( safetensors_cf.CompactFloat(ctx.obj,input_file,output_file) ) 105 | 106 | 107 | @cli.command(name="checkhdr",short_help="check header for possible errors") 108 | @readonly_input_file 109 | @click.pass_context 110 | def cmd_compactfloat(ctx,input_file:str) -> int: 111 | sys.exit( safetensors_worker.CheckHeader(ctx.obj,input_file) ) 112 | 113 | 114 | if __name__ == '__main__': 115 | sys.stdout.reconfigure(encoding='utf-8') 116 | cli(obj={},max_content_width=96) 117 | -------------------------------------------------------------------------------- /safetensors_worker.py: -------------------------------------------------------------------------------- 1 | import os, sys, json 2 | from safetensors_file import SafeTensorsFile 3 | 4 | def _need_force_overwrite(output_file:str,cmdLine:dict) -> bool: 5 | if cmdLine["force_overwrite"]==False: 6 | if os.path.exists(output_file): 7 | print(f'output file "{output_file}" already exists, use -f flag to force overwrite',file=sys.stderr) 8 | return True 9 | return False 10 | 11 | def WriteMetadataToHeader(cmdLine:dict,in_st_file:str,in_json_file:str,output_file:str) -> int: 12 | if _need_force_overwrite(output_file,cmdLine): return -1 13 | 14 | with open(in_json_file,"rt") as f: 15 | inmeta=json.load(f) 16 | if not "__metadata__" in inmeta: 17 | print(f"file {in_json_file} does not contain a top-level __metadata__ item",file=sys.stderr) 18 | #json.dump(inmeta,fp=sys.stdout,indent=2) 19 | return -2 20 | inmeta=inmeta["__metadata__"] #keep only metadata 21 | #json.dump(inmeta,fp=sys.stdout,indent=2) 22 | 23 | s=SafeTensorsFile.open_file(in_st_file) 24 | js=s.get_header() 25 | 26 | if inmeta==[]: 27 | js.pop("__metadata__",0) 28 | print("loaded __metadata__ is an empty list, output file will not contain __metadata__ in header") 29 | else: 30 | print("adding __metadata__ to header:") 31 | json.dump(inmeta,fp=sys.stdout,indent=2) 32 | if isinstance(inmeta,dict): 33 | for k in inmeta: 34 | inmeta[k]=str(inmeta[k]) 35 | else: 36 | inmeta=str(inmeta) 37 | #js["__metadata__"]=json.dumps(inmeta,ensure_ascii=False) 38 | js["__metadata__"]=inmeta 39 | print() 40 | 41 | newhdrbuf=json.dumps(js,separators=(',',':'),ensure_ascii=False).encode('utf-8') 42 | newhdrlen:int=int(len(newhdrbuf)) 43 | pad:int=((newhdrlen+7)&(~7))-newhdrlen #pad to multiple of 8 44 | 45 | with open(output_file,"wb") as f: 46 | f.write(int(newhdrlen+pad).to_bytes(8,'little')) 47 | f.write(newhdrbuf) 48 | if pad>0: f.write(bytearray([32]*pad)) 49 | i:int=s.copy_data_to_file(f) 50 | if i==0: 51 | print(f"file {output_file} saved successfully") 52 | else: 53 | print(f"error {i} occurred when writing to file {output_file}") 54 | return i 55 | 56 | def PrintHeader(cmdLine:dict,input_file:str) -> int: 57 | s=SafeTensorsFile.open_file(input_file,cmdLine['quiet']) 58 | js=s.get_header() 59 | 60 | # All the .safetensors files I've seen have long key names, and as a result, 61 | # neither json nor pprint package prints text in very readable format, 62 | # so we print it ourselves, putting key name & value on one long line. 63 | # Note the print out is in Python format, not valid JSON format. 64 | firstKey=True 65 | print("{") 66 | for key in js: 67 | if firstKey: 68 | firstKey=False 69 | else: 70 | print(",") 71 | json.dump(key,fp=sys.stdout,ensure_ascii=False,separators=(',',':')) 72 | print(": ",end='') 73 | json.dump(js[key],fp=sys.stdout,ensure_ascii=False,separators=(',',':')) 74 | print("\n}") 75 | return 0 76 | 77 | def _ParseMore(d:dict): 78 | '''Basically try to turn this: 79 | 80 | "ss_dataset_dirs":"{\"abc\": {\"n_repeats\": 2, \"img_count\": 60}}", 81 | 82 | into this: 83 | 84 | "ss_dataset_dirs":{ 85 | "abc":{ 86 | "n_repeats":2, 87 | "img_count":60 88 | } 89 | }, 90 | 91 | ''' 92 | for key in d: 93 | value=d[key] 94 | #print("+++",key,value,type(value),"+++",sep='|') 95 | if isinstance(value,str): 96 | try: 97 | v2=json.loads(value) 98 | d[key]=v2 99 | value=v2 100 | except json.JSONDecodeError as e: 101 | pass 102 | if isinstance(value,dict): 103 | _ParseMore(value) 104 | 105 | def PrintMetadata(cmdLine:dict,input_file:str) -> int: 106 | with SafeTensorsFile.open_file(input_file,cmdLine['quiet']) as s: 107 | js=s.get_header() 108 | 109 | if not "__metadata__" in js: 110 | print("file header does not contain a __metadata__ item",file=sys.stderr) 111 | return -2 112 | 113 | md=js["__metadata__"] 114 | if cmdLine['parse_more']: 115 | _ParseMore(md) 116 | json.dump({"__metadata__":md},fp=sys.stdout,ensure_ascii=False,separators=(',',':'),indent=1) 117 | return 0 118 | 119 | def HeaderKeysToLists(cmdLine:dict,input_file:str) -> int: 120 | s=SafeTensorsFile.open_file(input_file,cmdLine['quiet']) 121 | js=s.get_header() 122 | 123 | _lora_keys:list[tuple(str,bool)]=[] # use list to sort by name 124 | for key in js: 125 | if key=='__metadata__': continue 126 | v=js[key] 127 | isScalar=False 128 | if isinstance(v,dict): 129 | if 'shape' in v: 130 | if 0==len(v['shape']): 131 | isScalar=True 132 | _lora_keys.append((key,isScalar)) 133 | _lora_keys.sort(key=lambda x:x[0]) 134 | 135 | def printkeylist(kl): 136 | firstKey=True 137 | for key in kl: 138 | if firstKey: firstKey=False 139 | else: print(",") 140 | print(key,end='') 141 | print() 142 | 143 | print("# use list to keep insertion order") 144 | print("_lora_keys:list[tuple[str,bool]]=[") 145 | printkeylist(_lora_keys) 146 | print("]") 147 | 148 | return 0 149 | 150 | 151 | def ExtractHeader(cmdLine:dict,input_file:str,output_file:str)->int: 152 | if _need_force_overwrite(output_file,cmdLine): return -1 153 | 154 | s=SafeTensorsFile.open_file(input_file,parseHeader=False) 155 | if s.error!=0: return s.error 156 | 157 | hdrbuf=s.hdrbuf 158 | s.close_file() #close it in case user wants to write back to input_file itself 159 | with open(output_file,"wb") as fo: 160 | wn=fo.write(hdrbuf) 161 | if wn!=len(hdrbuf): 162 | print(f"write output file failed, tried to write {len(hdrbuf)} bytes, only wrote {wn} bytes",file=sys.stderr) 163 | return -1 164 | print(f"raw header saved to file {output_file}") 165 | return 0 166 | 167 | 168 | def _CheckLoRA_internal(s:SafeTensorsFile)->int: 169 | import lora_keys_sd15 as lora_keys 170 | js=s.get_header() 171 | set_scalar=set() 172 | set_nonscalar=set() 173 | for x in lora_keys._lora_keys: 174 | if x[1]==True: set_scalar.add(x[0]) 175 | else: set_nonscalar.add(x[0]) 176 | 177 | bad_unknowns:list[str]=[] # unrecognized keys 178 | bad_scalars:list[str]=[] #bad scalar 179 | bad_nonscalars:list[str]=[] #bad nonscalar 180 | for key in js: 181 | if key in set_nonscalar: 182 | if js[key]['shape']==[]: bad_nonscalars.append(key) 183 | set_nonscalar.remove(key) 184 | elif key in set_scalar: 185 | if js[key]['shape']!=[]: bad_scalars.append(key) 186 | set_scalar.remove(key) 187 | else: 188 | if "__metadata__"!=key: 189 | bad_unknowns.append(key) 190 | 191 | hasError=False 192 | 193 | if len(bad_unknowns)!=0: 194 | print("INFO: unrecognized items:") 195 | for x in bad_unknowns: print(" ",x) 196 | #hasError=True 197 | 198 | if len(set_scalar)>0: 199 | print("missing scalar keys:") 200 | for x in set_scalar: print(" ",x) 201 | hasError=True 202 | if len(set_nonscalar)>0: 203 | print("missing nonscalar keys:") 204 | for x in set_nonscalar: print(" ",x) 205 | hasError=True 206 | 207 | if len(bad_scalars)!=0: 208 | print("keys expected to be scalar but are nonscalar:") 209 | for x in bad_scalars: print(" ",x) 210 | hasError=True 211 | 212 | if len(bad_nonscalars)!=0: 213 | print("keys expected to be nonscalar but are scalar:") 214 | for x in bad_nonscalars: print(" ",x) 215 | hasError=True 216 | 217 | return (1 if hasError else 0) 218 | 219 | def CheckLoRA(cmdLine:dict,input_file:str)->int: 220 | s=SafeTensorsFile.open_file(input_file) 221 | i:int=_CheckLoRA_internal(s) 222 | if i==0: print("looks like an OK SD 1.x LoRA file") 223 | else: print("input file is not a valid SD 1.x LoRA file") 224 | return 0 225 | 226 | def ExtractData(cmdLine:dict,input_file:str,key_name:str,output_file:str)->int: 227 | if _need_force_overwrite(output_file,cmdLine): return -1 228 | 229 | s=SafeTensorsFile.open_file(input_file,cmdLine['quiet']) 230 | if s.error!=0: return s.error 231 | 232 | bindata=s.load_one_tensor(key_name) 233 | s.close_file() #close it just in case user wants to write back to input_file itself 234 | if bindata is None: 235 | print(f'key "{key_name}" not found in header (key names are case-sensitive)',file=sys.stderr) 236 | return -1 237 | 238 | with open(output_file,"wb") as fo: 239 | wn=fo.write(bindata) 240 | if wn!=len(bindata): 241 | print(f"write output file failed, tried to write {len(bindata)} bytes, only wrote {wn} bytes",file=sys.stderr) 242 | return -1 243 | if cmdLine['quiet']==False: print(f"{key_name} saved to {output_file}, len={wn}") 244 | return 0 245 | 246 | def CheckHeader(cmdLine:dict,input_file:str)->int: 247 | rv:int=0 248 | s=SafeTensorsFile.open_file(input_file) 249 | maxoffset=int(s.st.st_size-8-s.headerlen) 250 | h=s.get_header() 251 | for k,v in h.items(): 252 | if k=='__metadata__': continue 253 | #print(k,v) 254 | msgs=[] 255 | if v['data_offsets'][0]>maxoffset or v['data_offsets'][1]>maxoffset: 256 | msgs.append("data past end of file") 257 | lenv=int(v['data_offsets'][1]-v['data_offsets'][0]) 258 | items=int(1) 259 | for i in v['shape']: items*=int(i) 260 | 261 | if v['dtype']=="F16": 262 | item_size=int(2) 263 | elif v['dtype']=="F32": 264 | item_size=int(4) 265 | elif v['dtype']=="F64": 266 | item_size=int(8) 267 | else: 268 | item_size=int(0) 269 | 270 | if item_size==0: 271 | if (lenv % items)!=0: 272 | msgs.append("length not integral multiples of item count") 273 | else: 274 | len2=item_size*items 275 | if len2!=lenv: 276 | msgs.append(f"length should be {len2}, actual length is {lenv}") 277 | 278 | if len(msgs) > 0: 279 | print(f"error in f{k}:{v}:") 280 | for m in msgs: 281 | print(" * ",m,sep='') 282 | rv=1 283 | 284 | if rv==0: print("no error found") 285 | return rv 286 | -------------------------------------------------------------------------------- /lora_keys_sd15.py: -------------------------------------------------------------------------------- 1 | # SD 1.5 LoRA keys 2 | _lora_keys:list[tuple[str,bool]]=[ 3 | ('lora_te_text_model_encoder_layers_0_mlp_fc1.alpha', True), 4 | ('lora_te_text_model_encoder_layers_0_mlp_fc1.lora_down.weight', False), 5 | ('lora_te_text_model_encoder_layers_0_mlp_fc1.lora_up.weight', False), 6 | ('lora_te_text_model_encoder_layers_0_mlp_fc2.alpha', True), 7 | ('lora_te_text_model_encoder_layers_0_mlp_fc2.lora_down.weight', False), 8 | ('lora_te_text_model_encoder_layers_0_mlp_fc2.lora_up.weight', False), 9 | ('lora_te_text_model_encoder_layers_0_self_attn_k_proj.alpha', True), 10 | ('lora_te_text_model_encoder_layers_0_self_attn_k_proj.lora_down.weight', False), 11 | ('lora_te_text_model_encoder_layers_0_self_attn_k_proj.lora_up.weight', False), 12 | ('lora_te_text_model_encoder_layers_0_self_attn_out_proj.alpha', True), 13 | ('lora_te_text_model_encoder_layers_0_self_attn_out_proj.lora_down.weight', False), 14 | ('lora_te_text_model_encoder_layers_0_self_attn_out_proj.lora_up.weight', False), 15 | ('lora_te_text_model_encoder_layers_0_self_attn_q_proj.alpha', True), 16 | ('lora_te_text_model_encoder_layers_0_self_attn_q_proj.lora_down.weight', False), 17 | ('lora_te_text_model_encoder_layers_0_self_attn_q_proj.lora_up.weight', False), 18 | ('lora_te_text_model_encoder_layers_0_self_attn_v_proj.alpha', True), 19 | ('lora_te_text_model_encoder_layers_0_self_attn_v_proj.lora_down.weight', False), 20 | ('lora_te_text_model_encoder_layers_0_self_attn_v_proj.lora_up.weight', False), 21 | ('lora_te_text_model_encoder_layers_1_mlp_fc1.alpha', True), 22 | ('lora_te_text_model_encoder_layers_1_mlp_fc1.lora_down.weight', False), 23 | ('lora_te_text_model_encoder_layers_1_mlp_fc1.lora_up.weight', False), 24 | ('lora_te_text_model_encoder_layers_1_mlp_fc2.alpha', True), 25 | ('lora_te_text_model_encoder_layers_1_mlp_fc2.lora_down.weight', False), 26 | ('lora_te_text_model_encoder_layers_1_mlp_fc2.lora_up.weight', False), 27 | ('lora_te_text_model_encoder_layers_1_self_attn_k_proj.alpha', True), 28 | ('lora_te_text_model_encoder_layers_1_self_attn_k_proj.lora_down.weight', False), 29 | ('lora_te_text_model_encoder_layers_1_self_attn_k_proj.lora_up.weight', False), 30 | ('lora_te_text_model_encoder_layers_1_self_attn_out_proj.alpha', True), 31 | ('lora_te_text_model_encoder_layers_1_self_attn_out_proj.lora_down.weight', False), 32 | ('lora_te_text_model_encoder_layers_1_self_attn_out_proj.lora_up.weight', False), 33 | ('lora_te_text_model_encoder_layers_1_self_attn_q_proj.alpha', True), 34 | ('lora_te_text_model_encoder_layers_1_self_attn_q_proj.lora_down.weight', False), 35 | ('lora_te_text_model_encoder_layers_1_self_attn_q_proj.lora_up.weight', False), 36 | ('lora_te_text_model_encoder_layers_1_self_attn_v_proj.alpha', True), 37 | ('lora_te_text_model_encoder_layers_1_self_attn_v_proj.lora_down.weight', False), 38 | ('lora_te_text_model_encoder_layers_1_self_attn_v_proj.lora_up.weight', False), 39 | ('lora_te_text_model_encoder_layers_2_mlp_fc1.alpha', True), 40 | ('lora_te_text_model_encoder_layers_2_mlp_fc1.lora_down.weight', False), 41 | ('lora_te_text_model_encoder_layers_2_mlp_fc1.lora_up.weight', False), 42 | ('lora_te_text_model_encoder_layers_2_mlp_fc2.alpha', True), 43 | ('lora_te_text_model_encoder_layers_2_mlp_fc2.lora_down.weight', False), 44 | ('lora_te_text_model_encoder_layers_2_mlp_fc2.lora_up.weight', False), 45 | ('lora_te_text_model_encoder_layers_2_self_attn_k_proj.alpha', True), 46 | ('lora_te_text_model_encoder_layers_2_self_attn_k_proj.lora_down.weight', False), 47 | ('lora_te_text_model_encoder_layers_2_self_attn_k_proj.lora_up.weight', False), 48 | ('lora_te_text_model_encoder_layers_2_self_attn_out_proj.alpha', True), 49 | ('lora_te_text_model_encoder_layers_2_self_attn_out_proj.lora_down.weight', False), 50 | ('lora_te_text_model_encoder_layers_2_self_attn_out_proj.lora_up.weight', False), 51 | ('lora_te_text_model_encoder_layers_2_self_attn_q_proj.alpha', True), 52 | ('lora_te_text_model_encoder_layers_2_self_attn_q_proj.lora_down.weight', False), 53 | ('lora_te_text_model_encoder_layers_2_self_attn_q_proj.lora_up.weight', False), 54 | ('lora_te_text_model_encoder_layers_2_self_attn_v_proj.alpha', True), 55 | ('lora_te_text_model_encoder_layers_2_self_attn_v_proj.lora_down.weight', False), 56 | ('lora_te_text_model_encoder_layers_2_self_attn_v_proj.lora_up.weight', False), 57 | ('lora_te_text_model_encoder_layers_3_mlp_fc1.alpha', True), 58 | ('lora_te_text_model_encoder_layers_3_mlp_fc1.lora_down.weight', False), 59 | ('lora_te_text_model_encoder_layers_3_mlp_fc1.lora_up.weight', False), 60 | ('lora_te_text_model_encoder_layers_3_mlp_fc2.alpha', True), 61 | ('lora_te_text_model_encoder_layers_3_mlp_fc2.lora_down.weight', False), 62 | ('lora_te_text_model_encoder_layers_3_mlp_fc2.lora_up.weight', False), 63 | ('lora_te_text_model_encoder_layers_3_self_attn_k_proj.alpha', True), 64 | ('lora_te_text_model_encoder_layers_3_self_attn_k_proj.lora_down.weight', False), 65 | ('lora_te_text_model_encoder_layers_3_self_attn_k_proj.lora_up.weight', False), 66 | ('lora_te_text_model_encoder_layers_3_self_attn_out_proj.alpha', True), 67 | ('lora_te_text_model_encoder_layers_3_self_attn_out_proj.lora_down.weight', False), 68 | ('lora_te_text_model_encoder_layers_3_self_attn_out_proj.lora_up.weight', False), 69 | ('lora_te_text_model_encoder_layers_3_self_attn_q_proj.alpha', True), 70 | ('lora_te_text_model_encoder_layers_3_self_attn_q_proj.lora_down.weight', False), 71 | ('lora_te_text_model_encoder_layers_3_self_attn_q_proj.lora_up.weight', False), 72 | ('lora_te_text_model_encoder_layers_3_self_attn_v_proj.alpha', True), 73 | ('lora_te_text_model_encoder_layers_3_self_attn_v_proj.lora_down.weight', False), 74 | ('lora_te_text_model_encoder_layers_3_self_attn_v_proj.lora_up.weight', False), 75 | ('lora_te_text_model_encoder_layers_4_mlp_fc1.alpha', True), 76 | ('lora_te_text_model_encoder_layers_4_mlp_fc1.lora_down.weight', False), 77 | ('lora_te_text_model_encoder_layers_4_mlp_fc1.lora_up.weight', False), 78 | ('lora_te_text_model_encoder_layers_4_mlp_fc2.alpha', True), 79 | ('lora_te_text_model_encoder_layers_4_mlp_fc2.lora_down.weight', False), 80 | ('lora_te_text_model_encoder_layers_4_mlp_fc2.lora_up.weight', False), 81 | ('lora_te_text_model_encoder_layers_4_self_attn_k_proj.alpha', True), 82 | ('lora_te_text_model_encoder_layers_4_self_attn_k_proj.lora_down.weight', False), 83 | ('lora_te_text_model_encoder_layers_4_self_attn_k_proj.lora_up.weight', False), 84 | ('lora_te_text_model_encoder_layers_4_self_attn_out_proj.alpha', True), 85 | ('lora_te_text_model_encoder_layers_4_self_attn_out_proj.lora_down.weight', False), 86 | ('lora_te_text_model_encoder_layers_4_self_attn_out_proj.lora_up.weight', False), 87 | ('lora_te_text_model_encoder_layers_4_self_attn_q_proj.alpha', True), 88 | ('lora_te_text_model_encoder_layers_4_self_attn_q_proj.lora_down.weight', False), 89 | ('lora_te_text_model_encoder_layers_4_self_attn_q_proj.lora_up.weight', False), 90 | ('lora_te_text_model_encoder_layers_4_self_attn_v_proj.alpha', True), 91 | ('lora_te_text_model_encoder_layers_4_self_attn_v_proj.lora_down.weight', False), 92 | ('lora_te_text_model_encoder_layers_4_self_attn_v_proj.lora_up.weight', False), 93 | ('lora_te_text_model_encoder_layers_5_mlp_fc1.alpha', True), 94 | ('lora_te_text_model_encoder_layers_5_mlp_fc1.lora_down.weight', False), 95 | ('lora_te_text_model_encoder_layers_5_mlp_fc1.lora_up.weight', False), 96 | ('lora_te_text_model_encoder_layers_5_mlp_fc2.alpha', True), 97 | ('lora_te_text_model_encoder_layers_5_mlp_fc2.lora_down.weight', False), 98 | ('lora_te_text_model_encoder_layers_5_mlp_fc2.lora_up.weight', False), 99 | ('lora_te_text_model_encoder_layers_5_self_attn_k_proj.alpha', True), 100 | ('lora_te_text_model_encoder_layers_5_self_attn_k_proj.lora_down.weight', False), 101 | ('lora_te_text_model_encoder_layers_5_self_attn_k_proj.lora_up.weight', False), 102 | ('lora_te_text_model_encoder_layers_5_self_attn_out_proj.alpha', True), 103 | ('lora_te_text_model_encoder_layers_5_self_attn_out_proj.lora_down.weight', False), 104 | ('lora_te_text_model_encoder_layers_5_self_attn_out_proj.lora_up.weight', False), 105 | ('lora_te_text_model_encoder_layers_5_self_attn_q_proj.alpha', True), 106 | ('lora_te_text_model_encoder_layers_5_self_attn_q_proj.lora_down.weight', False), 107 | ('lora_te_text_model_encoder_layers_5_self_attn_q_proj.lora_up.weight', False), 108 | ('lora_te_text_model_encoder_layers_5_self_attn_v_proj.alpha', True), 109 | ('lora_te_text_model_encoder_layers_5_self_attn_v_proj.lora_down.weight', False), 110 | ('lora_te_text_model_encoder_layers_5_self_attn_v_proj.lora_up.weight', False), 111 | ('lora_te_text_model_encoder_layers_6_mlp_fc1.alpha', True), 112 | ('lora_te_text_model_encoder_layers_6_mlp_fc1.lora_down.weight', False), 113 | ('lora_te_text_model_encoder_layers_6_mlp_fc1.lora_up.weight', False), 114 | ('lora_te_text_model_encoder_layers_6_mlp_fc2.alpha', True), 115 | ('lora_te_text_model_encoder_layers_6_mlp_fc2.lora_down.weight', False), 116 | ('lora_te_text_model_encoder_layers_6_mlp_fc2.lora_up.weight', False), 117 | ('lora_te_text_model_encoder_layers_6_self_attn_k_proj.alpha', True), 118 | ('lora_te_text_model_encoder_layers_6_self_attn_k_proj.lora_down.weight', False), 119 | ('lora_te_text_model_encoder_layers_6_self_attn_k_proj.lora_up.weight', False), 120 | ('lora_te_text_model_encoder_layers_6_self_attn_out_proj.alpha', True), 121 | ('lora_te_text_model_encoder_layers_6_self_attn_out_proj.lora_down.weight', False), 122 | ('lora_te_text_model_encoder_layers_6_self_attn_out_proj.lora_up.weight', False), 123 | ('lora_te_text_model_encoder_layers_6_self_attn_q_proj.alpha', True), 124 | ('lora_te_text_model_encoder_layers_6_self_attn_q_proj.lora_down.weight', False), 125 | ('lora_te_text_model_encoder_layers_6_self_attn_q_proj.lora_up.weight', False), 126 | ('lora_te_text_model_encoder_layers_6_self_attn_v_proj.alpha', True), 127 | ('lora_te_text_model_encoder_layers_6_self_attn_v_proj.lora_down.weight', False), 128 | ('lora_te_text_model_encoder_layers_6_self_attn_v_proj.lora_up.weight', False), 129 | ('lora_te_text_model_encoder_layers_7_mlp_fc1.alpha', True), 130 | ('lora_te_text_model_encoder_layers_7_mlp_fc1.lora_down.weight', False), 131 | ('lora_te_text_model_encoder_layers_7_mlp_fc1.lora_up.weight', False), 132 | ('lora_te_text_model_encoder_layers_7_mlp_fc2.alpha', True), 133 | ('lora_te_text_model_encoder_layers_7_mlp_fc2.lora_down.weight', False), 134 | ('lora_te_text_model_encoder_layers_7_mlp_fc2.lora_up.weight', False), 135 | ('lora_te_text_model_encoder_layers_7_self_attn_k_proj.alpha', True), 136 | ('lora_te_text_model_encoder_layers_7_self_attn_k_proj.lora_down.weight', False), 137 | ('lora_te_text_model_encoder_layers_7_self_attn_k_proj.lora_up.weight', False), 138 | ('lora_te_text_model_encoder_layers_7_self_attn_out_proj.alpha', True), 139 | ('lora_te_text_model_encoder_layers_7_self_attn_out_proj.lora_down.weight', False), 140 | ('lora_te_text_model_encoder_layers_7_self_attn_out_proj.lora_up.weight', False), 141 | ('lora_te_text_model_encoder_layers_7_self_attn_q_proj.alpha', True), 142 | ('lora_te_text_model_encoder_layers_7_self_attn_q_proj.lora_down.weight', False), 143 | ('lora_te_text_model_encoder_layers_7_self_attn_q_proj.lora_up.weight', False), 144 | ('lora_te_text_model_encoder_layers_7_self_attn_v_proj.alpha', True), 145 | ('lora_te_text_model_encoder_layers_7_self_attn_v_proj.lora_down.weight', False), 146 | ('lora_te_text_model_encoder_layers_7_self_attn_v_proj.lora_up.weight', False), 147 | ('lora_te_text_model_encoder_layers_8_mlp_fc1.alpha', True), 148 | ('lora_te_text_model_encoder_layers_8_mlp_fc1.lora_down.weight', False), 149 | ('lora_te_text_model_encoder_layers_8_mlp_fc1.lora_up.weight', False), 150 | ('lora_te_text_model_encoder_layers_8_mlp_fc2.alpha', True), 151 | ('lora_te_text_model_encoder_layers_8_mlp_fc2.lora_down.weight', False), 152 | ('lora_te_text_model_encoder_layers_8_mlp_fc2.lora_up.weight', False), 153 | ('lora_te_text_model_encoder_layers_8_self_attn_k_proj.alpha', True), 154 | ('lora_te_text_model_encoder_layers_8_self_attn_k_proj.lora_down.weight', False), 155 | ('lora_te_text_model_encoder_layers_8_self_attn_k_proj.lora_up.weight', False), 156 | ('lora_te_text_model_encoder_layers_8_self_attn_out_proj.alpha', True), 157 | ('lora_te_text_model_encoder_layers_8_self_attn_out_proj.lora_down.weight', False), 158 | ('lora_te_text_model_encoder_layers_8_self_attn_out_proj.lora_up.weight', False), 159 | ('lora_te_text_model_encoder_layers_8_self_attn_q_proj.alpha', True), 160 | ('lora_te_text_model_encoder_layers_8_self_attn_q_proj.lora_down.weight', False), 161 | ('lora_te_text_model_encoder_layers_8_self_attn_q_proj.lora_up.weight', False), 162 | ('lora_te_text_model_encoder_layers_8_self_attn_v_proj.alpha', True), 163 | ('lora_te_text_model_encoder_layers_8_self_attn_v_proj.lora_down.weight', False), 164 | ('lora_te_text_model_encoder_layers_8_self_attn_v_proj.lora_up.weight', False), 165 | ('lora_te_text_model_encoder_layers_9_mlp_fc1.alpha', True), 166 | ('lora_te_text_model_encoder_layers_9_mlp_fc1.lora_down.weight', False), 167 | ('lora_te_text_model_encoder_layers_9_mlp_fc1.lora_up.weight', False), 168 | ('lora_te_text_model_encoder_layers_9_mlp_fc2.alpha', True), 169 | ('lora_te_text_model_encoder_layers_9_mlp_fc2.lora_down.weight', False), 170 | ('lora_te_text_model_encoder_layers_9_mlp_fc2.lora_up.weight', False), 171 | ('lora_te_text_model_encoder_layers_9_self_attn_k_proj.alpha', True), 172 | ('lora_te_text_model_encoder_layers_9_self_attn_k_proj.lora_down.weight', False), 173 | ('lora_te_text_model_encoder_layers_9_self_attn_k_proj.lora_up.weight', False), 174 | ('lora_te_text_model_encoder_layers_9_self_attn_out_proj.alpha', True), 175 | ('lora_te_text_model_encoder_layers_9_self_attn_out_proj.lora_down.weight', False), 176 | ('lora_te_text_model_encoder_layers_9_self_attn_out_proj.lora_up.weight', False), 177 | ('lora_te_text_model_encoder_layers_9_self_attn_q_proj.alpha', True), 178 | ('lora_te_text_model_encoder_layers_9_self_attn_q_proj.lora_down.weight', False), 179 | ('lora_te_text_model_encoder_layers_9_self_attn_q_proj.lora_up.weight', False), 180 | ('lora_te_text_model_encoder_layers_9_self_attn_v_proj.alpha', True), 181 | ('lora_te_text_model_encoder_layers_9_self_attn_v_proj.lora_down.weight', False), 182 | ('lora_te_text_model_encoder_layers_9_self_attn_v_proj.lora_up.weight', False), 183 | ('lora_te_text_model_encoder_layers_10_mlp_fc1.alpha', True), 184 | ('lora_te_text_model_encoder_layers_10_mlp_fc1.lora_down.weight', False), 185 | ('lora_te_text_model_encoder_layers_10_mlp_fc1.lora_up.weight', False), 186 | ('lora_te_text_model_encoder_layers_10_mlp_fc2.alpha', True), 187 | ('lora_te_text_model_encoder_layers_10_mlp_fc2.lora_down.weight', False), 188 | ('lora_te_text_model_encoder_layers_10_mlp_fc2.lora_up.weight', False), 189 | ('lora_te_text_model_encoder_layers_10_self_attn_k_proj.alpha', True), 190 | ('lora_te_text_model_encoder_layers_10_self_attn_k_proj.lora_down.weight', False), 191 | ('lora_te_text_model_encoder_layers_10_self_attn_k_proj.lora_up.weight', False), 192 | ('lora_te_text_model_encoder_layers_10_self_attn_out_proj.alpha', True), 193 | ('lora_te_text_model_encoder_layers_10_self_attn_out_proj.lora_down.weight', False), 194 | ('lora_te_text_model_encoder_layers_10_self_attn_out_proj.lora_up.weight', False), 195 | ('lora_te_text_model_encoder_layers_10_self_attn_q_proj.alpha', True), 196 | ('lora_te_text_model_encoder_layers_10_self_attn_q_proj.lora_down.weight', False), 197 | ('lora_te_text_model_encoder_layers_10_self_attn_q_proj.lora_up.weight', False), 198 | ('lora_te_text_model_encoder_layers_10_self_attn_v_proj.alpha', True), 199 | ('lora_te_text_model_encoder_layers_10_self_attn_v_proj.lora_down.weight', False), 200 | ('lora_te_text_model_encoder_layers_10_self_attn_v_proj.lora_up.weight', False), 201 | ('lora_te_text_model_encoder_layers_11_mlp_fc1.alpha', True), 202 | ('lora_te_text_model_encoder_layers_11_mlp_fc1.lora_down.weight', False), 203 | ('lora_te_text_model_encoder_layers_11_mlp_fc1.lora_up.weight', False), 204 | ('lora_te_text_model_encoder_layers_11_mlp_fc2.alpha', True), 205 | ('lora_te_text_model_encoder_layers_11_mlp_fc2.lora_down.weight', False), 206 | ('lora_te_text_model_encoder_layers_11_mlp_fc2.lora_up.weight', False), 207 | ('lora_te_text_model_encoder_layers_11_self_attn_k_proj.alpha', True), 208 | ('lora_te_text_model_encoder_layers_11_self_attn_k_proj.lora_down.weight', False), 209 | ('lora_te_text_model_encoder_layers_11_self_attn_k_proj.lora_up.weight', False), 210 | ('lora_te_text_model_encoder_layers_11_self_attn_out_proj.alpha', True), 211 | ('lora_te_text_model_encoder_layers_11_self_attn_out_proj.lora_down.weight', False), 212 | ('lora_te_text_model_encoder_layers_11_self_attn_out_proj.lora_up.weight', False), 213 | ('lora_te_text_model_encoder_layers_11_self_attn_q_proj.alpha', True), 214 | ('lora_te_text_model_encoder_layers_11_self_attn_q_proj.lora_down.weight', False), 215 | ('lora_te_text_model_encoder_layers_11_self_attn_q_proj.lora_up.weight', False), 216 | ('lora_te_text_model_encoder_layers_11_self_attn_v_proj.alpha', True), 217 | ('lora_te_text_model_encoder_layers_11_self_attn_v_proj.lora_down.weight', False), 218 | ('lora_te_text_model_encoder_layers_11_self_attn_v_proj.lora_up.weight', False), 219 | ('lora_unet_down_blocks_0_attentions_0_proj_in.alpha', True), 220 | ('lora_unet_down_blocks_0_attentions_0_proj_in.lora_down.weight', False), 221 | ('lora_unet_down_blocks_0_attentions_0_proj_in.lora_up.weight', False), 222 | ('lora_unet_down_blocks_0_attentions_0_proj_out.alpha', True), 223 | ('lora_unet_down_blocks_0_attentions_0_proj_out.lora_down.weight', False), 224 | ('lora_unet_down_blocks_0_attentions_0_proj_out.lora_up.weight', False), 225 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_k.alpha', True), 226 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_k.lora_down.weight', False), 227 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight', False), 228 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha', True), 229 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_down.weight', False), 230 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight', False), 231 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_q.alpha', True), 232 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_q.lora_down.weight', False), 233 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight', False), 234 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_v.alpha', True), 235 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_v.lora_down.weight', False), 236 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight', False), 237 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_k.alpha', True), 238 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_k.lora_down.weight', False), 239 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight', False), 240 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha', True), 241 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_down.weight', False), 242 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight', False), 243 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_q.alpha', True), 244 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_q.lora_down.weight', False), 245 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight', False), 246 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_v.alpha', True), 247 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_v.lora_down.weight', False), 248 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight', False), 249 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha', True), 250 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_down.weight', False), 251 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight', False), 252 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_ff_net_2.alpha', True), 253 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_ff_net_2.lora_down.weight', False), 254 | ('lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight', False), 255 | ('lora_unet_down_blocks_0_attentions_1_proj_in.alpha', True), 256 | ('lora_unet_down_blocks_0_attentions_1_proj_in.lora_down.weight', False), 257 | ('lora_unet_down_blocks_0_attentions_1_proj_in.lora_up.weight', False), 258 | ('lora_unet_down_blocks_0_attentions_1_proj_out.alpha', True), 259 | ('lora_unet_down_blocks_0_attentions_1_proj_out.lora_down.weight', False), 260 | ('lora_unet_down_blocks_0_attentions_1_proj_out.lora_up.weight', False), 261 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_k.alpha', True), 262 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_k.lora_down.weight', False), 263 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight', False), 264 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha', True), 265 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_down.weight', False), 266 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight', False), 267 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_q.alpha', True), 268 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_q.lora_down.weight', False), 269 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight', False), 270 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_v.alpha', True), 271 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_v.lora_down.weight', False), 272 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight', False), 273 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_k.alpha', True), 274 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_k.lora_down.weight', False), 275 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight', False), 276 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha', True), 277 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_down.weight', False), 278 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight', False), 279 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_q.alpha', True), 280 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_q.lora_down.weight', False), 281 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight', False), 282 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_v.alpha', True), 283 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_v.lora_down.weight', False), 284 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight', False), 285 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha', True), 286 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_down.weight', False), 287 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight', False), 288 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_ff_net_2.alpha', True), 289 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_ff_net_2.lora_down.weight', False), 290 | ('lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight', False), 291 | ('lora_unet_down_blocks_1_attentions_0_proj_in.alpha', True), 292 | ('lora_unet_down_blocks_1_attentions_0_proj_in.lora_down.weight', False), 293 | ('lora_unet_down_blocks_1_attentions_0_proj_in.lora_up.weight', False), 294 | ('lora_unet_down_blocks_1_attentions_0_proj_out.alpha', True), 295 | ('lora_unet_down_blocks_1_attentions_0_proj_out.lora_down.weight', False), 296 | ('lora_unet_down_blocks_1_attentions_0_proj_out.lora_up.weight', False), 297 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.alpha', True), 298 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.lora_down.weight', False), 299 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight', False), 300 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha', True), 301 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_down.weight', False), 302 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight', False), 303 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.alpha', True), 304 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.lora_down.weight', False), 305 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight', False), 306 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.alpha', True), 307 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.lora_down.weight', False), 308 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight', False), 309 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.alpha', True), 310 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.lora_down.weight', False), 311 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight', False), 312 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha', True), 313 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_down.weight', False), 314 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight', False), 315 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.alpha', True), 316 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.lora_down.weight', False), 317 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight', False), 318 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.alpha', True), 319 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.lora_down.weight', False), 320 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight', False), 321 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha', True), 322 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_down.weight', False), 323 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight', False), 324 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.alpha', True), 325 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.lora_down.weight', False), 326 | ('lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight', False), 327 | ('lora_unet_down_blocks_1_attentions_1_proj_in.alpha', True), 328 | ('lora_unet_down_blocks_1_attentions_1_proj_in.lora_down.weight', False), 329 | ('lora_unet_down_blocks_1_attentions_1_proj_in.lora_up.weight', False), 330 | ('lora_unet_down_blocks_1_attentions_1_proj_out.alpha', True), 331 | ('lora_unet_down_blocks_1_attentions_1_proj_out.lora_down.weight', False), 332 | ('lora_unet_down_blocks_1_attentions_1_proj_out.lora_up.weight', False), 333 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.alpha', True), 334 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.lora_down.weight', False), 335 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight', False), 336 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha', True), 337 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_down.weight', False), 338 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight', False), 339 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.alpha', True), 340 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.lora_down.weight', False), 341 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight', False), 342 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.alpha', True), 343 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.lora_down.weight', False), 344 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight', False), 345 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.alpha', True), 346 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.lora_down.weight', False), 347 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight', False), 348 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha', True), 349 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_down.weight', False), 350 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight', False), 351 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.alpha', True), 352 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.lora_down.weight', False), 353 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight', False), 354 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.alpha', True), 355 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.lora_down.weight', False), 356 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight', False), 357 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha', True), 358 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_down.weight', False), 359 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight', False), 360 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.alpha', True), 361 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.lora_down.weight', False), 362 | ('lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight', False), 363 | ('lora_unet_down_blocks_2_attentions_0_proj_in.alpha', True), 364 | ('lora_unet_down_blocks_2_attentions_0_proj_in.lora_down.weight', False), 365 | ('lora_unet_down_blocks_2_attentions_0_proj_in.lora_up.weight', False), 366 | ('lora_unet_down_blocks_2_attentions_0_proj_out.alpha', True), 367 | ('lora_unet_down_blocks_2_attentions_0_proj_out.lora_down.weight', False), 368 | ('lora_unet_down_blocks_2_attentions_0_proj_out.lora_up.weight', False), 369 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.alpha', True), 370 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.lora_down.weight', False), 371 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight', False), 372 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha', True), 373 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_down.weight', False), 374 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight', False), 375 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.alpha', True), 376 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.lora_down.weight', False), 377 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight', False), 378 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.alpha', True), 379 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.lora_down.weight', False), 380 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight', False), 381 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.alpha', True), 382 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.lora_down.weight', False), 383 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight', False), 384 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha', True), 385 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_down.weight', False), 386 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight', False), 387 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.alpha', True), 388 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.lora_down.weight', False), 389 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight', False), 390 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.alpha', True), 391 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.lora_down.weight', False), 392 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight', False), 393 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha', True), 394 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_down.weight', False), 395 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight', False), 396 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.alpha', True), 397 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.lora_down.weight', False), 398 | ('lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight', False), 399 | ('lora_unet_down_blocks_2_attentions_1_proj_in.alpha', True), 400 | ('lora_unet_down_blocks_2_attentions_1_proj_in.lora_down.weight', False), 401 | ('lora_unet_down_blocks_2_attentions_1_proj_in.lora_up.weight', False), 402 | ('lora_unet_down_blocks_2_attentions_1_proj_out.alpha', True), 403 | ('lora_unet_down_blocks_2_attentions_1_proj_out.lora_down.weight', False), 404 | ('lora_unet_down_blocks_2_attentions_1_proj_out.lora_up.weight', False), 405 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.alpha', True), 406 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.lora_down.weight', False), 407 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight', False), 408 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha', True), 409 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_down.weight', False), 410 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight', False), 411 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.alpha', True), 412 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.lora_down.weight', False), 413 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight', False), 414 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.alpha', True), 415 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.lora_down.weight', False), 416 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight', False), 417 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.alpha', True), 418 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.lora_down.weight', False), 419 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight', False), 420 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha', True), 421 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_down.weight', False), 422 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight', False), 423 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.alpha', True), 424 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.lora_down.weight', False), 425 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight', False), 426 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.alpha', True), 427 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.lora_down.weight', False), 428 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight', False), 429 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha', True), 430 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_down.weight', False), 431 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight', False), 432 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.alpha', True), 433 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.lora_down.weight', False), 434 | ('lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight', False), 435 | ('lora_unet_mid_block_attentions_0_proj_in.alpha', True), 436 | ('lora_unet_mid_block_attentions_0_proj_in.lora_down.weight', False), 437 | ('lora_unet_mid_block_attentions_0_proj_in.lora_up.weight', False), 438 | ('lora_unet_mid_block_attentions_0_proj_out.alpha', True), 439 | ('lora_unet_mid_block_attentions_0_proj_out.lora_down.weight', False), 440 | ('lora_unet_mid_block_attentions_0_proj_out.lora_up.weight', False), 441 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_k.alpha', True), 442 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_k.lora_down.weight', False), 443 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight', False), 444 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha', True), 445 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_down.weight', False), 446 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight', False), 447 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_q.alpha', True), 448 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_q.lora_down.weight', False), 449 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight', False), 450 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_v.alpha', True), 451 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_v.lora_down.weight', False), 452 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight', False), 453 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_k.alpha', True), 454 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_k.lora_down.weight', False), 455 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight', False), 456 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha', True), 457 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_down.weight', False), 458 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight', False), 459 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_q.alpha', True), 460 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_q.lora_down.weight', False), 461 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight', False), 462 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_v.alpha', True), 463 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_v.lora_down.weight', False), 464 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight', False), 465 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha', True), 466 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_down.weight', False), 467 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight', False), 468 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_2.alpha', True), 469 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_2.lora_down.weight', False), 470 | ('lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight', False), 471 | ('lora_unet_up_blocks_1_attentions_0_proj_in.alpha', True), 472 | ('lora_unet_up_blocks_1_attentions_0_proj_in.lora_down.weight', False), 473 | ('lora_unet_up_blocks_1_attentions_0_proj_in.lora_up.weight', False), 474 | ('lora_unet_up_blocks_1_attentions_0_proj_out.alpha', True), 475 | ('lora_unet_up_blocks_1_attentions_0_proj_out.lora_down.weight', False), 476 | ('lora_unet_up_blocks_1_attentions_0_proj_out.lora_up.weight', False), 477 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.alpha', True), 478 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.lora_down.weight', False), 479 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight', False), 480 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha', True), 481 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_down.weight', False), 482 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight', False), 483 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.alpha', True), 484 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.lora_down.weight', False), 485 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight', False), 486 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.alpha', True), 487 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.lora_down.weight', False), 488 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight', False), 489 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.alpha', True), 490 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.lora_down.weight', False), 491 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight', False), 492 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha', True), 493 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_down.weight', False), 494 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight', False), 495 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.alpha', True), 496 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.lora_down.weight', False), 497 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight', False), 498 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.alpha', True), 499 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.lora_down.weight', False), 500 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight', False), 501 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha', True), 502 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_down.weight', False), 503 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight', False), 504 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.alpha', True), 505 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.lora_down.weight', False), 506 | ('lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight', False), 507 | ('lora_unet_up_blocks_1_attentions_1_proj_in.alpha', True), 508 | ('lora_unet_up_blocks_1_attentions_1_proj_in.lora_down.weight', False), 509 | ('lora_unet_up_blocks_1_attentions_1_proj_in.lora_up.weight', False), 510 | ('lora_unet_up_blocks_1_attentions_1_proj_out.alpha', True), 511 | ('lora_unet_up_blocks_1_attentions_1_proj_out.lora_down.weight', False), 512 | ('lora_unet_up_blocks_1_attentions_1_proj_out.lora_up.weight', False), 513 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.alpha', True), 514 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.lora_down.weight', False), 515 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight', False), 516 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha', True), 517 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_down.weight', False), 518 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight', False), 519 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.alpha', True), 520 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.lora_down.weight', False), 521 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight', False), 522 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.alpha', True), 523 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.lora_down.weight', False), 524 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight', False), 525 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.alpha', True), 526 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.lora_down.weight', False), 527 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight', False), 528 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha', True), 529 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_down.weight', False), 530 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight', False), 531 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.alpha', True), 532 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.lora_down.weight', False), 533 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight', False), 534 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.alpha', True), 535 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.lora_down.weight', False), 536 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight', False), 537 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha', True), 538 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_down.weight', False), 539 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight', False), 540 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.alpha', True), 541 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.lora_down.weight', False), 542 | ('lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight', False), 543 | ('lora_unet_up_blocks_1_attentions_2_proj_in.alpha', True), 544 | ('lora_unet_up_blocks_1_attentions_2_proj_in.lora_down.weight', False), 545 | ('lora_unet_up_blocks_1_attentions_2_proj_in.lora_up.weight', False), 546 | ('lora_unet_up_blocks_1_attentions_2_proj_out.alpha', True), 547 | ('lora_unet_up_blocks_1_attentions_2_proj_out.lora_down.weight', False), 548 | ('lora_unet_up_blocks_1_attentions_2_proj_out.lora_up.weight', False), 549 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_k.alpha', True), 550 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_k.lora_down.weight', False), 551 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_k.lora_up.weight', False), 552 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_out_0.alpha', True), 553 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_out_0.lora_down.weight', False), 554 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_out_0.lora_up.weight', False), 555 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_q.alpha', True), 556 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_q.lora_down.weight', False), 557 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_q.lora_up.weight', False), 558 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_v.alpha', True), 559 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_v.lora_down.weight', False), 560 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_v.lora_up.weight', False), 561 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_k.alpha', True), 562 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_k.lora_down.weight', False), 563 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_k.lora_up.weight', False), 564 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_out_0.alpha', True), 565 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_out_0.lora_down.weight', False), 566 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_out_0.lora_up.weight', False), 567 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_q.alpha', True), 568 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_q.lora_down.weight', False), 569 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_q.lora_up.weight', False), 570 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_v.alpha', True), 571 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_v.lora_down.weight', False), 572 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_v.lora_up.weight', False), 573 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_0_proj.alpha', True), 574 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_0_proj.lora_down.weight', False), 575 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_0_proj.lora_up.weight', False), 576 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_2.alpha', True), 577 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_2.lora_down.weight', False), 578 | ('lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_2.lora_up.weight', False), 579 | ('lora_unet_up_blocks_2_attentions_0_proj_in.alpha', True), 580 | ('lora_unet_up_blocks_2_attentions_0_proj_in.lora_down.weight', False), 581 | ('lora_unet_up_blocks_2_attentions_0_proj_in.lora_up.weight', False), 582 | ('lora_unet_up_blocks_2_attentions_0_proj_out.alpha', True), 583 | ('lora_unet_up_blocks_2_attentions_0_proj_out.lora_down.weight', False), 584 | ('lora_unet_up_blocks_2_attentions_0_proj_out.lora_up.weight', False), 585 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.alpha', True), 586 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.lora_down.weight', False), 587 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight', False), 588 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha', True), 589 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_down.weight', False), 590 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight', False), 591 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.alpha', True), 592 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.lora_down.weight', False), 593 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight', False), 594 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.alpha', True), 595 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.lora_down.weight', False), 596 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight', False), 597 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.alpha', True), 598 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.lora_down.weight', False), 599 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight', False), 600 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha', True), 601 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_down.weight', False), 602 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight', False), 603 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.alpha', True), 604 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.lora_down.weight', False), 605 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight', False), 606 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.alpha', True), 607 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.lora_down.weight', False), 608 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight', False), 609 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha', True), 610 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_down.weight', False), 611 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight', False), 612 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.alpha', True), 613 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.lora_down.weight', False), 614 | ('lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight', False), 615 | ('lora_unet_up_blocks_2_attentions_1_proj_in.alpha', True), 616 | ('lora_unet_up_blocks_2_attentions_1_proj_in.lora_down.weight', False), 617 | ('lora_unet_up_blocks_2_attentions_1_proj_in.lora_up.weight', False), 618 | ('lora_unet_up_blocks_2_attentions_1_proj_out.alpha', True), 619 | ('lora_unet_up_blocks_2_attentions_1_proj_out.lora_down.weight', False), 620 | ('lora_unet_up_blocks_2_attentions_1_proj_out.lora_up.weight', False), 621 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.alpha', True), 622 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.lora_down.weight', False), 623 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight', False), 624 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha', True), 625 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_down.weight', False), 626 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight', False), 627 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.alpha', True), 628 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.lora_down.weight', False), 629 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight', False), 630 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.alpha', True), 631 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.lora_down.weight', False), 632 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight', False), 633 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.alpha', True), 634 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.lora_down.weight', False), 635 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight', False), 636 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha', True), 637 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_down.weight', False), 638 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight', False), 639 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.alpha', True), 640 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.lora_down.weight', False), 641 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight', False), 642 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.alpha', True), 643 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.lora_down.weight', False), 644 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight', False), 645 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha', True), 646 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_down.weight', False), 647 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight', False), 648 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.alpha', True), 649 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.lora_down.weight', False), 650 | ('lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight', False), 651 | ('lora_unet_up_blocks_2_attentions_2_proj_in.alpha', True), 652 | ('lora_unet_up_blocks_2_attentions_2_proj_in.lora_down.weight', False), 653 | ('lora_unet_up_blocks_2_attentions_2_proj_in.lora_up.weight', False), 654 | ('lora_unet_up_blocks_2_attentions_2_proj_out.alpha', True), 655 | ('lora_unet_up_blocks_2_attentions_2_proj_out.lora_down.weight', False), 656 | ('lora_unet_up_blocks_2_attentions_2_proj_out.lora_up.weight', False), 657 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_k.alpha', True), 658 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_k.lora_down.weight', False), 659 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_k.lora_up.weight', False), 660 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_out_0.alpha', True), 661 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_out_0.lora_down.weight', False), 662 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_out_0.lora_up.weight', False), 663 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_q.alpha', True), 664 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_q.lora_down.weight', False), 665 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_q.lora_up.weight', False), 666 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_v.alpha', True), 667 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_v.lora_down.weight', False), 668 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_v.lora_up.weight', False), 669 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_k.alpha', True), 670 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_k.lora_down.weight', False), 671 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_k.lora_up.weight', False), 672 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_out_0.alpha', True), 673 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_out_0.lora_down.weight', False), 674 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_out_0.lora_up.weight', False), 675 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_q.alpha', True), 676 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_q.lora_down.weight', False), 677 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_q.lora_up.weight', False), 678 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_v.alpha', True), 679 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_v.lora_down.weight', False), 680 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_v.lora_up.weight', False), 681 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_ff_net_0_proj.alpha', True), 682 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_ff_net_0_proj.lora_down.weight', False), 683 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_ff_net_0_proj.lora_up.weight', False), 684 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_ff_net_2.alpha', True), 685 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_ff_net_2.lora_down.weight', False), 686 | ('lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_ff_net_2.lora_up.weight', False), 687 | ('lora_unet_up_blocks_3_attentions_0_proj_in.alpha', True), 688 | ('lora_unet_up_blocks_3_attentions_0_proj_in.lora_down.weight', False), 689 | ('lora_unet_up_blocks_3_attentions_0_proj_in.lora_up.weight', False), 690 | ('lora_unet_up_blocks_3_attentions_0_proj_out.alpha', True), 691 | ('lora_unet_up_blocks_3_attentions_0_proj_out.lora_down.weight', False), 692 | ('lora_unet_up_blocks_3_attentions_0_proj_out.lora_up.weight', False), 693 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_k.alpha', True), 694 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_k.lora_down.weight', False), 695 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight', False), 696 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha', True), 697 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_down.weight', False), 698 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight', False), 699 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_q.alpha', True), 700 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_q.lora_down.weight', False), 701 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight', False), 702 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_v.alpha', True), 703 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_v.lora_down.weight', False), 704 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight', False), 705 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_k.alpha', True), 706 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_k.lora_down.weight', False), 707 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight', False), 708 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha', True), 709 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_down.weight', False), 710 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight', False), 711 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_q.alpha', True), 712 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_q.lora_down.weight', False), 713 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight', False), 714 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_v.alpha', True), 715 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_v.lora_down.weight', False), 716 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight', False), 717 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha', True), 718 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_down.weight', False), 719 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight', False), 720 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_ff_net_2.alpha', True), 721 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_ff_net_2.lora_down.weight', False), 722 | ('lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight', False), 723 | ('lora_unet_up_blocks_3_attentions_1_proj_in.alpha', True), 724 | ('lora_unet_up_blocks_3_attentions_1_proj_in.lora_down.weight', False), 725 | ('lora_unet_up_blocks_3_attentions_1_proj_in.lora_up.weight', False), 726 | ('lora_unet_up_blocks_3_attentions_1_proj_out.alpha', True), 727 | ('lora_unet_up_blocks_3_attentions_1_proj_out.lora_down.weight', False), 728 | ('lora_unet_up_blocks_3_attentions_1_proj_out.lora_up.weight', False), 729 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_k.alpha', True), 730 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_k.lora_down.weight', False), 731 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight', False), 732 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha', True), 733 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_down.weight', False), 734 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight', False), 735 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_q.alpha', True), 736 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_q.lora_down.weight', False), 737 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight', False), 738 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_v.alpha', True), 739 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_v.lora_down.weight', False), 740 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight', False), 741 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_k.alpha', True), 742 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_k.lora_down.weight', False), 743 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight', False), 744 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha', True), 745 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_down.weight', False), 746 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight', False), 747 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_q.alpha', True), 748 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_q.lora_down.weight', False), 749 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight', False), 750 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_v.alpha', True), 751 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_v.lora_down.weight', False), 752 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight', False), 753 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha', True), 754 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_down.weight', False), 755 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight', False), 756 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_ff_net_2.alpha', True), 757 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_ff_net_2.lora_down.weight', False), 758 | ('lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight', False), 759 | ('lora_unet_up_blocks_3_attentions_2_proj_in.alpha', True), 760 | ('lora_unet_up_blocks_3_attentions_2_proj_in.lora_down.weight', False), 761 | ('lora_unet_up_blocks_3_attentions_2_proj_in.lora_up.weight', False), 762 | ('lora_unet_up_blocks_3_attentions_2_proj_out.alpha', True), 763 | ('lora_unet_up_blocks_3_attentions_2_proj_out.lora_down.weight', False), 764 | ('lora_unet_up_blocks_3_attentions_2_proj_out.lora_up.weight', False), 765 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_k.alpha', True), 766 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_k.lora_down.weight', False), 767 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_k.lora_up.weight', False), 768 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_out_0.alpha', True), 769 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_out_0.lora_down.weight', False), 770 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_out_0.lora_up.weight', False), 771 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_q.alpha', True), 772 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_q.lora_down.weight', False), 773 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_q.lora_up.weight', False), 774 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_v.alpha', True), 775 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_v.lora_down.weight', False), 776 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_v.lora_up.weight', False), 777 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_k.alpha', True), 778 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_k.lora_down.weight', False), 779 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_k.lora_up.weight', False), 780 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_out_0.alpha', True), 781 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_out_0.lora_down.weight', False), 782 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_out_0.lora_up.weight', False), 783 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_q.alpha', True), 784 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_q.lora_down.weight', False), 785 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_q.lora_up.weight', False), 786 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_v.alpha', True), 787 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_v.lora_down.weight', False), 788 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_v.lora_up.weight', False), 789 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_ff_net_0_proj.alpha', True), 790 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_ff_net_0_proj.lora_down.weight', False), 791 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_ff_net_0_proj.lora_up.weight', False), 792 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_ff_net_2.alpha', True), 793 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_ff_net_2.lora_down.weight', False), 794 | ('lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_ff_net_2.lora_up.weight', False) 795 | ] 796 | --------------------------------------------------------------------------------