stable-diffusion-webui

Форк
0
241 строка · 9.1 Кб
1
from __future__ import annotations
2

3
import configparser
4
import os
5
import threading
6
import re
7

8
from modules import shared, errors, cache, scripts
9
from modules.gitpython_hack import Repo
10
from modules.paths_internal import extensions_dir, extensions_builtin_dir, script_path  # noqa: F401
11

12

13
os.makedirs(extensions_dir, exist_ok=True)
14

15

16
def active():
17
    if shared.cmd_opts.disable_all_extensions or shared.opts.disable_all_extensions == "all":
18
        return []
19
    elif shared.cmd_opts.disable_extra_extensions or shared.opts.disable_all_extensions == "extra":
20
        return [x for x in extensions if x.enabled and x.is_builtin]
21
    else:
22
        return [x for x in extensions if x.enabled]
23

24

25
class ExtensionMetadata:
26
    filename = "metadata.ini"
27
    config: configparser.ConfigParser
28
    canonical_name: str
29
    requires: list
30

31
    def __init__(self, path, canonical_name):
32
        self.config = configparser.ConfigParser()
33

34
        filepath = os.path.join(path, self.filename)
35
        # `self.config.read()` will quietly swallow OSErrors (which FileNotFoundError is),
36
        # so no need to check whether the file exists beforehand.
37
        try:
38
            self.config.read(filepath)
39
        except Exception:
40
            errors.report(f"Error reading {self.filename} for extension {canonical_name}.", exc_info=True)
41

42
        self.canonical_name = self.config.get("Extension", "Name", fallback=canonical_name)
43
        self.canonical_name = canonical_name.lower().strip()
44

45
        self.requires = self.get_script_requirements("Requires", "Extension")
46

47
    def get_script_requirements(self, field, section, extra_section=None):
48
        """reads a list of requirements from the config; field is the name of the field in the ini file,
49
        like Requires or Before, and section is the name of the [section] in the ini file; additionally,
50
        reads more requirements from [extra_section] if specified."""
51

52
        x = self.config.get(section, field, fallback='')
53

54
        if extra_section:
55
            x = x + ', ' + self.config.get(extra_section, field, fallback='')
56

57
        return self.parse_list(x.lower())
58

59
    def parse_list(self, text):
60
        """converts a line from config ("ext1 ext2, ext3  ") into a python list (["ext1", "ext2", "ext3"])"""
61

62
        if not text:
63
            return []
64

65
        # both "," and " " are accepted as separator
66
        return [x for x in re.split(r"[,\s]+", text.strip()) if x]
67

68

69
class Extension:
70
    lock = threading.Lock()
71
    cached_fields = ['remote', 'commit_date', 'branch', 'commit_hash', 'version']
72
    metadata: ExtensionMetadata
73

74
    def __init__(self, name, path, enabled=True, is_builtin=False, metadata=None):
75
        self.name = name
76
        self.path = path
77
        self.enabled = enabled
78
        self.status = ''
79
        self.can_update = False
80
        self.is_builtin = is_builtin
81
        self.commit_hash = ''
82
        self.commit_date = None
83
        self.version = ''
84
        self.branch = None
85
        self.remote = None
86
        self.have_info_from_repo = False
87
        self.metadata = metadata if metadata else ExtensionMetadata(self.path, name.lower())
88
        self.canonical_name = metadata.canonical_name
89

90
    def to_dict(self):
91
        return {x: getattr(self, x) for x in self.cached_fields}
92

93
    def from_dict(self, d):
94
        for field in self.cached_fields:
95
            setattr(self, field, d[field])
96

97
    def read_info_from_repo(self):
98
        if self.is_builtin or self.have_info_from_repo:
99
            return
100

101
        def read_from_repo():
102
            with self.lock:
103
                if self.have_info_from_repo:
104
                    return
105

106
                self.do_read_info_from_repo()
107

108
                return self.to_dict()
109

110
        try:
111
            d = cache.cached_data_for_file('extensions-git', self.name, os.path.join(self.path, ".git"), read_from_repo)
112
            self.from_dict(d)
113
        except FileNotFoundError:
114
            pass
115
        self.status = 'unknown' if self.status == '' else self.status
116

117
    def do_read_info_from_repo(self):
118
        repo = None
119
        try:
120
            if os.path.exists(os.path.join(self.path, ".git")):
121
                repo = Repo(self.path)
122
        except Exception:
123
            errors.report(f"Error reading github repository info from {self.path}", exc_info=True)
124

125
        if repo is None or repo.bare:
126
            self.remote = None
127
        else:
128
            try:
129
                self.remote = next(repo.remote().urls, None)
130
                commit = repo.head.commit
131
                self.commit_date = commit.committed_date
132
                if repo.active_branch:
133
                    self.branch = repo.active_branch.name
134
                self.commit_hash = commit.hexsha
135
                self.version = self.commit_hash[:8]
136

137
            except Exception:
138
                errors.report(f"Failed reading extension data from Git repository ({self.name})", exc_info=True)
139
                self.remote = None
140

141
        self.have_info_from_repo = True
142

143
    def list_files(self, subdir, extension):
144
        dirpath = os.path.join(self.path, subdir)
145
        if not os.path.isdir(dirpath):
146
            return []
147

148
        res = []
149
        for filename in sorted(os.listdir(dirpath)):
150
            res.append(scripts.ScriptFile(self.path, filename, os.path.join(dirpath, filename)))
151

152
        res = [x for x in res if os.path.splitext(x.path)[1].lower() == extension and os.path.isfile(x.path)]
153

154
        return res
155

156
    def check_updates(self):
157
        repo = Repo(self.path)
158
        for fetch in repo.remote().fetch(dry_run=True):
159
            if fetch.flags != fetch.HEAD_UPTODATE:
160
                self.can_update = True
161
                self.status = "new commits"
162
                return
163

164
        try:
165
            origin = repo.rev_parse('origin')
166
            if repo.head.commit != origin:
167
                self.can_update = True
168
                self.status = "behind HEAD"
169
                return
170
        except Exception:
171
            self.can_update = False
172
            self.status = "unknown (remote error)"
173
            return
174

175
        self.can_update = False
176
        self.status = "latest"
177

178
    def fetch_and_reset_hard(self, commit='origin'):
179
        repo = Repo(self.path)
180
        # Fix: `error: Your local changes to the following files would be overwritten by merge`,
181
        # because WSL2 Docker set 755 file permissions instead of 644, this results to the error.
182
        repo.git.fetch(all=True)
183
        repo.git.reset(commit, hard=True)
184
        self.have_info_from_repo = False
185

186

187
def list_extensions():
188
    extensions.clear()
189

190
    if shared.cmd_opts.disable_all_extensions:
191
        print("*** \"--disable-all-extensions\" arg was used, will not load any extensions ***")
192
    elif shared.opts.disable_all_extensions == "all":
193
        print("*** \"Disable all extensions\" option was set, will not load any extensions ***")
194
    elif shared.cmd_opts.disable_extra_extensions:
195
        print("*** \"--disable-extra-extensions\" arg was used, will only load built-in extensions ***")
196
    elif shared.opts.disable_all_extensions == "extra":
197
        print("*** \"Disable all extensions\" option was set, will only load built-in extensions ***")
198

199
    loaded_extensions = {}
200

201
    # scan through extensions directory and load metadata
202
    for dirname in [extensions_builtin_dir, extensions_dir]:
203
        if not os.path.isdir(dirname):
204
            continue
205

206
        for extension_dirname in sorted(os.listdir(dirname)):
207
            path = os.path.join(dirname, extension_dirname)
208
            if not os.path.isdir(path):
209
                continue
210

211
            canonical_name = extension_dirname
212
            metadata = ExtensionMetadata(path, canonical_name)
213

214
            # check for duplicated canonical names
215
            already_loaded_extension = loaded_extensions.get(metadata.canonical_name)
216
            if already_loaded_extension is not None:
217
                errors.report(f'Duplicate canonical name "{canonical_name}" found in extensions "{extension_dirname}" and "{already_loaded_extension.name}". Former will be discarded.', exc_info=False)
218
                continue
219

220
            is_builtin = dirname == extensions_builtin_dir
221
            extension = Extension(name=extension_dirname, path=path, enabled=extension_dirname not in shared.opts.disabled_extensions, is_builtin=is_builtin, metadata=metadata)
222
            extensions.append(extension)
223
            loaded_extensions[canonical_name] = extension
224

225
    # check for requirements
226
    for extension in extensions:
227
        if not extension.enabled:
228
            continue
229

230
        for req in extension.metadata.requires:
231
            required_extension = loaded_extensions.get(req)
232
            if required_extension is None:
233
                errors.report(f'Extension "{extension.name}" requires "{req}" which is not installed.', exc_info=False)
234
                continue
235

236
            if not required_extension.enabled:
237
                errors.report(f'Extension "{extension.name}" requires "{required_extension.name}" which is disabled.', exc_info=False)
238
                continue
239

240

241
extensions: list[Extension] = []
242

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.