File size: 5,138 Bytes
c19ca42
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
import os
import json
import concurrent
import network
import networks
from modules import shared, ui_extra_networks


class ExtraNetworksPageLora(ui_extra_networks.ExtraNetworksPage):
    def __init__(self):
        super().__init__('Lora')
        self.list_time = 0

    def refresh(self):
        networks.list_available_networks()

    def create_item(self, name):
        l = networks.available_networks.get(name)
        try:
            # path, _ext = os.path.splitext(l.filename)
            name = os.path.splitext(os.path.relpath(l.filename, shared.cmd_opts.lora_dir))[0]
            if shared.backend == shared.Backend.ORIGINAL:
                if l.sd_version == network.SdVersion.SDXL:
                    return None
            elif shared.backend == shared.Backend.DIFFUSERS:
                if shared.sd_model_type == 'none': # return all when model is not loaded
                    pass
                elif shared.sd_model_type == 'sdxl':
                    if l.sd_version == network.SdVersion.SD1 or l.sd_version == network.SdVersion.SD2:
                        return None
                elif shared.sd_model_type == 'sd':
                    if l.sd_version == network.SdVersion.SDXL:
                        return None

            item = {
                "type": 'Lora',
                "name": name,
                "filename": l.filename,
                "hash": l.shorthash,
                "prompt": json.dumps(f" <lora:{l.get_alias()}:{shared.opts.extra_networks_default_multiplier}>"),
                "metadata": json.dumps(l.metadata, indent=4) if l.metadata else None,
                "mtime": os.path.getmtime(l.filename),
                "size": os.path.getsize(l.filename),
            }
            info = self.find_info(l.filename)

            tags = {}
            possible_tags = l.metadata.get('ss_tag_frequency', {}) if l.metadata is not None else {} # tags from model metedata
            if isinstance(possible_tags, str):
                possible_tags = {}
            for k, v in possible_tags.items():
                words = k.split('_', 1) if '_' in k else [v, k]
                words = [str(w).replace('.json', '') for w in words]
                if words[0] == '{}':
                    words[0] = 0
                tag = ' '.join(words[1:]).lower()
                tags[tag] = words[0]


            def find_version():
                found_versions = []
                current_hash = l.hash[:8].upper()
                all_versions = info.get('modelVersions', [])
                for v in info.get('modelVersions', []):
                    for f in v.get('files', []):
                        if any(h.startswith(current_hash) for h in f.get('hashes', {}).values()):
                            found_versions.append(v)
                if len(found_versions) == 0:
                    found_versions = all_versions
                return found_versions

            find_version()
            for v in find_version():  # trigger words from info json
                possible_tags = v.get('trainedWords', [])
                if isinstance(possible_tags, list):
                    for tag_str in possible_tags:
                        for tag in tag_str.split(','):
                            tag = tag.strip().lower()
                            if tag not in tags:
                                tags[tag] = 0

            possible_tags = info.get('tags', []) # tags from info json
            if not isinstance(possible_tags, list):
                possible_tags = [v for v in possible_tags.values()]
            for tag in possible_tags:
                tag = tag.strip().lower()
                if tag not in tags:
                    tags[tag] = 0

            bad_chars = [';', ':', '<', ">", "*", '?', '\'', '\"', '(', ')', '[', ']', '{', '}', '\\', '/']
            clean_tags = {}
            for k, v in tags.items():
                tag = ''.join(i for i in k if not i in bad_chars).strip()
                clean_tags[tag] = v

            clean_tags.pop('img', None)
            clean_tags.pop('dataset', None)

            item["info"] = info
            item["description"] = self.find_description(l.filename, info) # use existing info instead of double-read
            item["tags"] = clean_tags

            return item
        except Exception as e:
            shared.log.debug(f"Extra networks error: type=lora file={name} {e}")
            from modules import errors
            errors.display('e', 'Lora')
            return None

    def list_items(self):
        items = []
        with concurrent.futures.ThreadPoolExecutor(max_workers=shared.max_workers) as executor:
            future_items = {executor.submit(self.create_item, net): net for net in networks.available_networks}
            for future in concurrent.futures.as_completed(future_items):
                item = future.result()
                if item is not None:
                    items.append(item)
        self.update_all_previews(items)
        return items

    def allowed_directories_for_previews(self):
        return [shared.cmd_opts.lora_dir, shared.cmd_opts.lyco_dir]