21 sys.setdlopenflags(old_flags) 22 23 try: 24 _try_import_with_global_library_symbols() 25 except: 26 pass 27 28 del _try_import_with_global_library_symbols 29
151 def __exit__(self, type, value, traceback): 152 self.close() 153 154 def find_blocks_from_code(self, code): 155 assert type(code) == bytes 156 if code not in self.code_index: 157 return [] 158 return self.code_index[code]
159 160 def find_block_from_offset(self, offset): 161 # same as looking looping over all blocks, 162 # then checking `block.addr_old == offset`. 163 assert type(offset) is int 164 return self.block_from_offset.get(offset) 165 166 def close(self):
360 def dna_type_name(self): 361 return self.dna_type.dna_type_id.decode('ascii') 362 363 def refine_type_from_index(self, sdna_index_next): 364 assert type(sdna_index_next) is int 365 sdna_index_curr = self.sdna_index 366 self.file.ensure_subtype_smaller(sdna_index_curr, sdna_index_next) 367 self.sdna_index = sdna_index_next
366 self.file.ensure_subtype_smaller(sdna_index_curr, sdna_index_next) 367 self.sdna_index = sdna_index_next 368 369 def refine_type(self, dna_type_id): 370 assert type(dna_type_id) is bytes 371 self.refine_type_from_index(self.file.sdna_index_from_id[dna_type_id]) 372 373 def get_file_offset(
378 ): 379 """ 380 Return (offset, length) 381 """ 382 assert type(path) is bytes 383 384 ofs = self.file_offset 385 if base_index != 0:
382 assert type(path) is bytes 383 384 ofs = self.file_offset 385 if base_index != 0: 386 assert base_index < self.count 387 ofs += (self.size // self.count) * base_index 388 self.file.handle.seek(ofs, os.SEEK_SET) 389
407 ): 408 409 ofs = self.file_offset 410 if base_index != 0: 411 assert base_index < self.count 412 ofs += (self.size // self.count) * base_index 413 self.file.handle.seek(ofs, os.SEEK_SET) 414
441 array_size = self.size // dna_size 442 443 ofs = self.file_offset 444 if base_index != 0: 445 assert base_index < array_size 446 ofs += dna_size * base_index 447 self.file.handle.seek(ofs, os.SEEK_SET) 448
544 # default 545 if type(result) is not int: 546 return result 547 548 assert self.file.structs[sdna_index_refine].field_from_path( 549 self.file.header, self.file.handle, path).dna_name.is_pointer 550 if result != 0: 551 # possible (but unlikely) 552 # that this fails and returns None
625 self.pointer_size = 8 626 elif pointer_size_id == b'_': 627 self.pointer_size = 4 628 else: 629 assert 0 630 endian_id = values[2] 631 if endian_id == b'v': 632 self.is_little_endian = True
636 self.is_little_endian = False 637 self.endian_index = 1 638 self.endian_str = b'>' 639 else: 640 assert 0 641 642 version_id = values[3] 643 self.version = int(version_id)
765 name = path[0] 766 if len(path) >= 2 and type(path[1]) is not bytes: 767 name_tail = path[2:] 768 index = path[1] 769 assert type(index) is int 770 else: 771 name_tail = path[1:] 772 index = 0
774 name = path 775 name_tail = None 776 index = 0 777 778 assert type(name) is bytes 779 780 field = self.field_from_name.get(name) 781
785 if field.dna_name.is_pointer: 786 index_offset = header.pointer_size * index 787 else: 788 index_offset = field.dna_type.size * index 789 assert index_offset < field.dna_size 790 handle.seek(index_offset, os.SEEK_CUR) 791 if not name_tail: # None or () 792 return field
823 raise NotImplementedError("%r exists, but can't resolve field %r" % 824 (path, dna_name.name_only), dna_name, dna_type) 825 826 def field_set(self, header, handle, path, value): 827 assert type(path) == bytes 828 829 field = self.field_from_path(header, handle, path) 830 if field is None:
907 raise NotImplementedError("Reading %r type is not implemented" % dna_type_id) 908 909 @staticmethod 910 def write_string(handle, astring, fieldlen): 911 assert isinstance(astring, str) 912 if len(astring) >= fieldlen: 913 stringw = astring[0:fieldlen] 914 else:
916 handle.write(stringw.encode('utf-8')) 917 918 @staticmethod 919 def write_bytes(handle, astring, fieldlen): 920 assert isinstance(astring, (bytes, bytearray)) 921 if len(astring) >= fieldlen: 922 stringw = astring[0:fieldlen] 923 else:
499 except: 500 break 501 self.sock.settimeout(sock_timeout) 502 self.sock.shutdown(socket.SHUT_RDWR) 503 except: 504 pass 505 506 self.shutdown() 507
179 if isinstance(result, str): 180 result = result.encode('utf-8') 181 182 value = (key + "258EAFA5-E914-47DA-95CA-C5AB0DC85B11").encode('utf-8') 183 hashed = base64encode(hashlib.sha1(value).digest()).strip().lower() 184 success = hmac.compare_digest(hashed, result) 185 186 if success:
180 data = str(data, "utf-8") 181 if isinstance(data, bytes) and len(data) > 2 and data[:2] == b'\037\213': # gzip magick 182 try: 183 data = "[gzip] " + str(gzip.decompress(data), "utf-8") 184 except: 185 pass 186 elif isinstance(data, bytes): 187 try: 188 data = "[zlib] " + str(zlib.decompress(data, -zlib.MAX_WBITS), "utf-8")
185 pass 186 elif isinstance(data, bytes): 187 try: 188 data = "[zlib] " + str(zlib.decompress(data, -zlib.MAX_WBITS), "utf-8") 189 except: 190 pass 191 192 if isinstance(data, bytes): 193 data = repr(data)
1 # Refeence: https://github.com/DominikDoom/a1111-sd-webui-tagcomplete 2 from __future__ import annotations 3 import time 4 import pickle 5 from pathlib import Path 6 from threading import Thread 7 from functools import lru_cache 8 DEBUG = True
88 key = word[1] 89 if self.search(key): 90 return 91 # [freq, key, type, content, wtype] 92 assert isinstance(word, tuple), str(word) 93 node: dict = self.root 94 for char in key: 95 if char not in node:
190 def from_cache(self): 191 if not self.CACHE_PATH.exists(): 192 return 193 # ts = time.time() 194 data: dict = pickle.load(open(self.CACHE_PATH.as_posix(), "rb+")) 195 if data.get("version") != CACHE_VERSION: 196 self.CACHE_PATH.unlink() 197 return
1 # Refeence: https://github.com/DominikDoom/a1111-sd-webui-tagcomplete 2 from __future__ import annotations 3 import bpy 4 import time 5 import pickle 6 import csv 7 import traceback 8 from pathlib import Path
30 self.word_map: dict[str, tuple] = {} 31 32 def read_tags(self): 33 if self.CACHE_PATH.exists(): 34 cache = pickle.loads(self.CACHE_PATH.read_bytes()) 35 if isinstance(cache, list): 36 self.read_raw_tags() 37 elif cache.get("VERSION", (0, 0)) < CACHE_VERSION:
1 from __future__ import annotations 2 import pickle 3 import json 4 import time 5 from pathlib import Path 6 from functools import lru_cache 7 import difflib 8
188 ... 189 if with_lzma: 190 import lzma 191 with lzma.open(data_path.as_posix(), "rb") as f: 192 trie = pickle.load(f) 193 else: 194 with open(data_path.as_posix(), "rb") as f: 195 trie = pickle.load(f)
191 with lzma.open(data_path.as_posix(), "rb") as f: 192 trie = pickle.load(f) 193 else: 194 with open(data_path.as_posix(), "rb") as f: 195 trie = pickle.load(f) 196 return trie 197 198
69 return node 70 71 72 def get_fixed_seed(): 73 return int(random.randrange(4294967294)) 74 75 76 def is_bool_list(some_list: list):
1108 data = {"overwrite": "true", "subfolder": "SDN"} 1109 img_type = f"image/{img_path.suffix.replace('.', '')}" 1110 files = {'image': (img_path.name, img_path.read_bytes(), img_type)} 1111 timeout = Timeout(connect=5, read=5) 1112 url = url.replace("0.0.0.0", "127.0.0.1") 1113 response = requests.post(url, data=data, files=files, timeout=timeout) 1114 # 检查响应 1115 if response.status_code == 200:
1127 url_values = urllib.parse.urlencode(data) 1128 from .manager import TaskManager 1129 url = f"{TaskManager.server.get_url()}/view?{url_values}" 1130 # logger.debug(f'requesting {url} for image data') 1131 with urllib.request.urlopen(url) as response: 1132 img_data = response.read() 1133 if not save_path: 1134 suffix = suffix if suffix.startswith(".") else f".{suffix}"
34 elif path.is_dir(): 35 # 移除 .git 36 if path.name == ".git": 37 if platform.system() == "darwin": 38 from subprocess import call 39 call(['rm', '-rf', path.as_posix()]) 40 elif platform.system() == "Windows": 41 os.system(f'rd/s/q "{path.as_posix()}"')
35 # 移除 .git 36 if path.name == ".git": 37 if platform.system() == "darwin": 38 from subprocess import call 39 call(['rm', '-rf', path.as_posix()]) 40 elif platform.system() == "Windows": 41 os.system(f'rd/s/q "{path.as_posix()}"') 42 return
35 # 移除 .git 36 if path.name == ".git": 37 if platform.system() == "darwin": 38 from subprocess import call 39 call(['rm', '-rf', path.as_posix()]) 40 elif platform.system() == "Windows": 41 os.system(f'rd/s/q "{path.as_posix()}"') 42 return
37 if platform.system() == "darwin": 38 from subprocess import call 39 call(['rm', '-rf', path.as_posix()]) 40 elif platform.system() == "Windows": 41 os.system(f'rd/s/q "{path.as_posix()}"') 42 return 43 for child in path.iterdir(): 44 rmtree(child)
15 from urllib import request 16 from urllib.parse import urlparse 17 from urllib.error import URLError 18 from threading import Thread 19 from subprocess import Popen, PIPE, STDOUT 20 from pathlib import Path 21 from queue import Queue 22 from ..utils import rmtree as rt, logger, _T, PkgInstaller, update_screen
34 return TaskManager.server.get_port() 35 36 37 def get_url(): 38 return TaskManager.server.get_url().replace("0.0.0.0", "localhost") 39 40 41 WITH_PROXY = False
689 args = ["/bin/bash", "-c", args] # shell=True will use /bin/sh which can't source 690 # mac 691 if system() == "Darwin": 692 os.environ["PYTORCH_ENABLE_MPS_FALLBACK"] = "1" 693 p = Popen(args, stdout=PIPE, stderr=STDOUT, cwd=Path(model_path).resolve().as_posix()) 694 self.child = p 695 self.pid = p.pid 696 self.exited_status[self.pid] = False
753 if "python" not in process.name(): 754 return 755 process.kill() 756 # os.system(f'taskkill /F /IM {process.name()}') 757 os.system(f'taskkill /pid {pid} -t -f') 758 except psutil.NoSuchProcess: 759 return 760 elif sys.platform == "darwin":
761 try: 762 process = psutil.Process(pid) 763 if "python" in process.name().lower(): 764 # process.kill() 765 os.system(f"kill -9 {pid}") 766 except psutil.NoSuchProcess: 767 return 768 else:
798 command.append("-i") 799 command.append(fast_url) 800 command.append("--trusted-host") 801 command.append(site.netloc) 802 proc = Popen(command, cwd=model_path) 803 proc.wait() 804 805 logger.warning(_T("ControlNet Init Finished."))
1008 @staticmethod 1009 def clear_cache(): 1010 req = request.Request(f"{TaskManager.server.get_url()}/cup/clear_cache", method="POST") 1011 try: 1012 request.urlopen(req) 1013 except URLError: 1014 ... 1015
1027 from http.client import RemoteDisconnected 1028 import traceback 1029 req = request.Request(f"{TaskManager.server.get_url()}/interrupt", method="POST") 1030 try: 1031 request.urlopen(req) 1032 except URLError: 1033 ... 1034 except RemoteDisconnected:
1069 if not TaskManager.is_launched(): 1070 return {"queue_pending": [], "queue_running": []} 1071 try: 1072 req = request.Request(f"{TaskManager.server.get_url()}/queue") 1073 res = request.urlopen(req) 1074 res = json.loads(res.read().decode()) 1075 except BaseException: 1076 res = {"queue_pending": [], "queue_running": []}
1104 History.put_history(task.get("workflow")) 1105 # logger.debug(f'post to {TaskManager.server.get_url()}/{api}:') 1106 # logger.debug(data.decode()) 1107 try: 1108 request.urlopen(req) 1109 except request.HTTPError as e: 1110 print(_T("Invalid Node Connection")) 1111 TaskManager.put_error_msg(_T("Invalid Node Connection"))
131 132 def calc_hash_type(stype): 133 from .blueprints import is_bool_list, is_all_str_list 134 if is_bool_list(stype): 135 hash_type = md5("{True, False}".encode()).hexdigest() 136 elif not is_all_str_list(stype): 137 hash_type = md5(",".join([str(i) for i in stype]).encode()).hexdigest() 138 else:
133 from .blueprints import is_bool_list, is_all_str_list 134 if is_bool_list(stype): 135 hash_type = md5("{True, False}".encode()).hexdigest() 136 elif not is_all_str_list(stype): 137 hash_type = md5(",".join([str(i) for i in stype]).encode()).hexdigest() 138 else: 139 try: 140 hash_type = md5(",".join(stype).encode()).hexdigest()
136 elif not is_all_str_list(stype): 137 hash_type = md5(",".join([str(i) for i in stype]).encode()).hexdigest() 138 else: 139 try: 140 hash_type = md5(",".join(stype).encode()).hexdigest() 141 except TypeError as e: 142 winfo = str(stype) 143 if len(winfo) > 100:
1788 def draw(self, context, layout, node: NodeBase, text): 1789 if not node.is_registered_node_type(): 1790 return 1791 node.draw_socket(self, context, layout, node, text) 1792 rand_color = (rand()**0.5, rand()**0.5, rand()**0.5, 1) 1793 color = bpy.props.FloatVectorProperty(size=4, default=rand_color) 1794 fields = { 1795 "draw": draw,
1788 def draw(self, context, layout, node: NodeBase, text): 1789 if not node.is_registered_node_type(): 1790 return 1791 node.draw_socket(self, context, layout, node, text) 1792 rand_color = (rand()**0.5, rand()**0.5, rand()**0.5, 1) 1793 color = bpy.props.FloatVectorProperty(size=4, default=rand_color) 1794 fields = { 1795 "draw": draw,
1788 def draw(self, context, layout, node: NodeBase, text): 1789 if not node.is_registered_node_type(): 1790 return 1791 node.draw_socket(self, context, layout, node, text) 1792 rand_color = (rand()**0.5, rand()**0.5, rand()**0.5, 1) 1793 color = bpy.props.FloatVectorProperty(size=4, default=rand_color) 1794 fields = { 1795 "draw": draw,
1933 if skip: 1934 logger.warning("Skip Reg Node: %s", nname) 1935 continue 1936 NodeDesc = type(nname, (NodeBase,), fields) 1937 NodeDesc.dcolor = (rand() / 2, rand() / 2, rand() / 2) 1938 node_clss.append(NodeDesc) 1939 return node_clss 1940
1933 if skip: 1934 logger.warning("Skip Reg Node: %s", nname) 1935 continue 1936 NodeDesc = type(nname, (NodeBase,), fields) 1937 NodeDesc.dcolor = (rand() / 2, rand() / 2, rand() / 2) 1938 node_clss.append(NodeDesc) 1939 return node_clss 1940
1933 if skip: 1934 logger.warning("Skip Reg Node: %s", nname) 1935 continue 1936 NodeDesc = type(nname, (NodeBase,), fields) 1937 NodeDesc.dcolor = (rand() / 2, rand() / 2, rand() / 2) 1938 node_clss.append(NodeDesc) 1939 return node_clss 1940
1 import subprocess 2 3 c = """ 4 import sys 5 import time 6 for i in range(5): 7 sys.stdout.write('Processing') 8 sys.stdout.flush()
3 import bpy 4 import typing 5 import time 6 import sys 7 import pickle 8 import traceback 9 import inspect 10 import types
144 def _get_id_pool(self) -> set: 145 if "ID_POOL" not in self.tree: 146 self.tree["ID_POOL"] = pickle.dumps(set()) 147 try: 148 return pickle.loads(self.tree["ID_POOL"]) 149 except pickle.UnpicklingError: 150 self.tree["ID_POOL"] = pickle.dumps(set()) 151 return pickle.loads(self.tree["ID_POOL"])
147 try: 148 return pickle.loads(self.tree["ID_POOL"]) 149 except pickle.UnpicklingError: 150 self.tree["ID_POOL"] = pickle.dumps(set()) 151 return pickle.loads(self.tree["ID_POOL"]) 152 153 def _set_id_pool(self, value): 154 if not isinstance(value, set):
990 registered_menu = registered_menus.pop(class_name, None) 991 if registered_menu and getattr(registered_menu, "is_registered"): 992 try: 993 bpy.utils.unregister_class(registered_menu) 994 except Exception: 995 pass 996 menu_type = type(class_name, (bpy.types.Menu,), __data__) 997 menu_types.append(menu_type) 998 registered_menus[class_name] = menu_type
1026 menus = [] 1027 for item in nodes["items"]: 1028 items.append(CFNodeItem(item)) 1029 menus.extend(load_node(nodes.get("menus", {}), root=cat, proot=f"{proot}/{ocat}")) 1030 hash_root = md5(proot.encode()).hexdigest()[:5] 1031 if not root: 1032 cat_id = cat 1033 else:
270 for piece in config: 271 # --listen 127.0.0.1 --port 8188 272 if ip := re.match(r"--listen\s+([0-9.]+)", piece): 273 ip = ip.group(1) 274 ip = {"0.0.0.0": "127.0.0.1"}.get(ip, ip) 275 self.ip = ip 276 if port := re.match(r".*?--port\s+([0-9]+)", piece): 277 self.port = int(port.group(1))
447 def get_cuda_list(): 448 """ 449 借助nvidia-smi获取CUDA版本列表 450 """ 451 import subprocess 452 try: 453 res = subprocess.check_output("nvidia-smi -L", shell=True).decode("utf-8") 454 # GPU 0: NVIDIA GeForce GTX 1060 5GB (UUID: xxxx)
449 借助nvidia-smi获取CUDA版本列表 450 """ 451 import subprocess 452 try: 453 res = subprocess.check_output("nvidia-smi -L", shell=True).decode("utf-8") 454 # GPU 0: NVIDIA GeForce GTX 1060 5GB (UUID: xxxx) 455 items = [("default", "Auto", "", 0,)] 456 for line in res.split("\n"):
449 借助nvidia-smi获取CUDA版本列表 450 """ 451 import subprocess 452 try: 453 res = subprocess.check_output("nvidia-smi -L", shell=True).decode("utf-8") 454 # GPU 0: NVIDIA GeForce GTX 1060 5GB (UUID: xxxx) 455 items = [("default", "Auto", "", 0,)] 456 for line in res.split("\n"):
489 490 def update_open_dir1(self, context): 491 if self.open_dir1: 492 self.open_dir1 = False 493 os.startfile(Path(self.model_path) / "models/checkpoints") 494 495 open_dir1: bpy.props.BoolProperty(default=False, name="Open CKPT Folder", update=update_open_dir1) 496
496 497 def update_open_dir2(self, context): 498 if self.open_dir2: 499 self.open_dir2 = False 500 os.startfile(Path(self.model_path) / "models/loras") 501 open_dir2: bpy.props.BoolProperty(default=False, name="Open LoRA Folder", update=update_open_dir2) 502 503 def update_open_dir3(self, context):
502 503 def update_open_dir3(self, context): 504 if self.open_dir3: 505 self.open_dir3 = False 506 os.startfile(self.model_path) 507 508 open_dir3: bpy.props.BoolProperty(default=False, name="Open ComfyUI Folder", update=update_open_dir3) 509
509 510 def update_open_dir4(self, context): 511 if self.open_dir4: 512 self.open_dir4 = False 513 os.startfile(Path(self.model_path) / "SDNodeTemp") 514 515 open_dir4: bpy.props.BoolProperty(default=False, 516 name="Open Cache Folder",
91 92 def update_open_presets_dir(self, context): 93 if self.open_presets_dir: 94 self.open_presets_dir = False 95 os.startfile(str(PRESETS_DIR)) 96 97 open_presets_dir: bpy.props.BoolProperty(default=False, name="Open NodeGroup Presets Folder", update=update_open_presets_dir) 98
125 126 def update_open_groups_dir(self, context): 127 if self.open_groups_dir: 128 self.open_groups_dir = False 129 os.startfile(str(GROUPS_DIR)) 130 131 open_groups_dir: bpy.props.BoolProperty(default=False, name="Open NodeTree Presets Folder", update=update_open_groups_dir) 132
127 def new(): 128 import bpy.utils.previews 129 import random 130 prev = bpy.utils.previews.new() 131 while (i := random.randint(0, 999999999)) in PrevMgr.__PREV__: 132 continue 133 PrevMgr.__PREV__[i] = prev 134 return prev
566 @staticmethod 567 def get_nas_mapping(): 568 if platform.system() != "Windows": 569 return {} 570 import subprocess 571 result = subprocess.run("net use", capture_output=True, text=True, encoding="gbk", check=True) 572 if result.returncode != 0 or result.stdout is None: 573 return {}
567 def get_nas_mapping(): 568 if platform.system() != "Windows": 569 return {} 570 import subprocess 571 result = subprocess.run("net use", capture_output=True, text=True, encoding="gbk", check=True) 572 if result.returncode != 0 or result.stdout is None: 573 return {} 574 nas_mapping = {}
567 def get_nas_mapping(): 568 if platform.system() != "Windows": 569 return {} 570 import subprocess 571 result = subprocess.run("net use", capture_output=True, text=True, encoding="gbk", check=True) 572 if result.returncode != 0 or result.stdout is None: 573 return {} 574 nas_mapping = {}
1593 # "Hires upscaler": "ESRGAN_4x", 1594 # "Downcast alphas_cumprod": "True", 1595 "Version": "1.8.0-RC" 1596 } 1597 assert self._parse(in_t0) == out_t0, "Test 0 failed" 1598 in_t1 = """ 1599 masterpiece,ultra high quality,highest quality,super fine,1girl,solo,(black background:1.3),(silhouette:1.1),sparkle,looking at viewer,upper body,simple background,glowing,(dim lighting:1.2),crystal clear,colorful clothes, 1600 Negative prompt: Easy Negative,bad handv4,ng_deepnegative_v1_75t,(worst quality:2),(low quality:2),(normal quality:2),lowres,((monochrome)),((grayscale)),bad anatomy,DeepNegative,skin spots,acnes,skin blemishes,(fat:1.2),facing away,looking away,tilted head,lowres,bad anatomy,bad hands,missing fingers,extra digit,fewer digits,bad feet,poorly drawn hands,poorly drawn face,mutation,deformed,extra fingers,extra limbs,extra arms,extra legs,malformed limbs,fused fingers,too many fingers,long neck,cross-eyed,mutated hands,polar lowres,bad body,bad proportions,gross proportions,missing arms,missing legs,extra digit,extra arms,extra leg,extra foot,teethcroppe,signature,watermark,username,blurry,cropped,jpeg artifacts,text,Lower body exposure,
1615 "TI hashes": "\"ng_deepnegative_v1_75t: 54e7e4826d53\"", 1616 # "Pad conds": "True", 1617 "Version": "v1.9.4" 1618 } 1619 assert self._parse(in_t1) == out_t1, "Test 1 failed" 1620 1621 in_t2 = """ 1622 (official art:1.2),(colorful:1.1),(masterpiece:1.2),best quality,masterpiece,highres,original,extremely detailed wallpaper,1girl,solo,very long hair,(loli:1.3),vibrant color palette,dazzling hues,kaleidoscopic patterns,enchanting young maiden,radiant beauty,chromatic harmony,iridescent hair,sparkling eyes,lush landscapes,vivid blossoms,mesmerizing sunsets,brilliant rainbows,prismatic reflections,whimsical attire,captivating accessories,stunning chromatic display,artful composition,picturesque backdrop,breathtaking scenery,visual symphony,spellbinding chromatic enchantment,
1646 "ControlNet 0": 'Module: tile_resample, Model: control_v11f1e_sd15_tile_fp16 [3b860298], Weight: 0.5, Resize Mode: Crop and Resize, Processor Res: 512, Threshold A: 1.0, Threshold B: 0.5, Guidance Start: 0.0, Guidance End: 1.0, Pixel Perfect: True, Control Mode: Balanced', 1647 # "Pad conds": "True", 1648 "Version": "v1.9.4", 1649 } 1650 assert self._parse(in_t2) == out_t2, "Test 2 failed" 1651 in_t3 = """ 1652 masterpiece, best quality, girl,woman,female, short hair, light smile, closed_eyes, cat_ears, overskirt,white dress,frills, pale blue Clothes,tiara 1653 Negative prompt: easynegative, ng_deepnegative_v1_75t, By bad artist -neg, verybadimagenegative_v1.3
1666 "Model hash": "19dbfda152", 1667 "Model": "二次元_mixProV45Colorbox_v45", 1668 "Clip skip": "2", 1669 } 1670 assert self._parse(in_t3) == out_t3, "Test 3 failed" 1671 in_t4 = """ 1672 masterpiece, best quality, 1girl, solo, voxel art, 1673 gazebo, white girl,
1703 "Size": "640x960", 1704 "Model hash": "149fe7d36c", 1705 "Model": "二次元_meinaalter_v1", 1706 } 1707 assert self._parse(in_t4) == out_t4, "Test 4 failed" 1708 1709 def base_workflow(self): 1710 wk = {