cython

Форк
0
/
Utils.py 
689 строк · 20.8 Кб
1
"""
2
Cython -- Things that don't belong anywhere else in particular
3
"""
4

5

6
import cython
7

8
cython.declare(
9
    os=object, sys=object, re=object, io=object, glob=object, shutil=object, tempfile=object,
10
    update_wrapper=object, partial=object, wraps=object, cython_version=object,
11
    _cache_function=object, _function_caches=list, _parse_file_version=object, _match_file_encoding=object,
12
)
13

14
import os
15
import sys
16
import re
17
import io
18
import glob
19
import shutil
20
import tempfile
21

22

23

24
if sys.version_info < (3, 9):
25
    # Work around a limited API bug in these Python versions
26
    # where it isn't possible to make __module__ of CyFunction
27
    # writeable. This means that wraps fails when applied to
28
    # cyfunctions.
29
    # The objective here is just to make limited API builds
30
    # testable.
31

32
    from functools import update_wrapper, partial
33

34
    def _update_wrapper(wrapper, wrapped):
35
        try:
36
            return update_wrapper(wrapper, wrapped)
37
        except AttributeError:
38
            return wrapper  # worse, but it still works
39

40
    def wraps(wrapped):
41
        return partial(_update_wrapper, wrapped=wrapped)
42
else:
43
    from functools import wraps
44

45

46
from . import __version__ as cython_version
47

48
PACKAGE_FILES = ("__init__.py", "__init__.pyc", "__init__.pyx", "__init__.pxd")
49

50
_build_cache_name = "__{}_cache".format
51
_CACHE_NAME_PATTERN = re.compile(r"^__(.+)_cache$")
52

53
modification_time = os.path.getmtime
54

55
GENERATED_BY_MARKER = "/* Generated by Cython %s */" % cython_version
56
GENERATED_BY_MARKER_BYTES = GENERATED_BY_MARKER.encode('us-ascii')
57

58

59
class _TryFinallyGeneratorContextManager:
60
    """
61
    Fast, bare minimum @contextmanager, only for try-finally, not for exception handling.
62
    """
63
    def __init__(self, gen):
64
        self._gen = gen
65

66
    def __enter__(self):
67
        return next(self._gen)
68

69
    def __exit__(self, exc_type, exc_val, exc_tb):
70
        try:
71
            next(self._gen)
72
        except (StopIteration, GeneratorExit):
73
            pass
74

75

76
def try_finally_contextmanager(gen_func):
77
    @wraps(gen_func)
78
    def make_gen(*args, **kwargs):
79
        return _TryFinallyGeneratorContextManager(gen_func(*args, **kwargs))
80
    return make_gen
81

82

83
try:
84
    from functools import cache as _cache_function
85
except ImportError:
86
    from functools import lru_cache
87
    _cache_function = lru_cache(maxsize=None)
88

89

90
_function_caches = []
91

92

93
def clear_function_caches():
94
    for cache in _function_caches:
95
        cache.cache_clear()
96

97

98
def cached_function(f):
99
    cf = _cache_function(f)
100
    _function_caches.append(cf)
101
    cf.uncached = f  # needed by coverage plugin
102
    return cf
103

104

105

106
def _find_cache_attributes(obj):
107
    """The function iterates over the attributes of the object and,
108
    if it finds the name of the cache, it returns it and the corresponding method name.
109
    The method may not be present in the object.
110
    """
111
    for attr_name in dir(obj):
112
        match = _CACHE_NAME_PATTERN.match(attr_name)
113
        if match is not None:
114
            yield attr_name, match.group(1)
115

116

117
def clear_method_caches(obj):
118
    """Removes every cache found in the object,
119
    if a corresponding method exists for that cache.
120
    """
121
    for cache_name, method_name in _find_cache_attributes(obj):
122
        if hasattr(obj, method_name):
123
            delattr(obj, cache_name)
124
        # if there is no corresponding method, then we assume
125
        # that this attribute was not created by our cached method
126

127

128
def cached_method(f):
129
    cache_name = _build_cache_name(f.__name__)
130

131
    def wrapper(self, *args):
132
        cache = getattr(self, cache_name, None)
133
        if cache is None:
134
            cache = {}
135
            setattr(self, cache_name, cache)
136
        if args in cache:
137
            return cache[args]
138
        res = cache[args] = f(self, *args)
139
        return res
140

141
    return wrapper
142

143

144
def replace_suffix(path, newsuf):
145
    base, _ = os.path.splitext(path)
146
    return base + newsuf
147

148

149
def open_new_file(path):
150
    if os.path.exists(path):
151
        # Make sure to create a new file here so we can
152
        # safely hard link the output files.
153
        os.unlink(path)
154

155
    # We only write pure ASCII code strings, but need to write file paths in position comments.
156
    # Those are encoded in UTF-8 so that tools can parse them out again.
157
    return open(path, "w", encoding="UTF-8")
158

159

160
def castrate_file(path, st):
161
    #  Remove junk contents from an output file after a
162
    #  failed compilation.
163
    #  Also sets access and modification times back to
164
    #  those specified by st (a stat struct).
165
    if not is_cython_generated_file(path, allow_failed=True, if_not_found=False):
166
        return
167

168
    try:
169
        f = open_new_file(path)
170
    except OSError:
171
        pass
172
    else:
173
        f.write(
174
            "#error Do not use this file, it is the result of a failed Cython compilation.\n")
175
        f.close()
176
        if st:
177
            os.utime(path, (st.st_atime, st.st_mtime-1))
178

179

180
def is_cython_generated_file(path, allow_failed=False, if_not_found=True):
181
    failure_marker = b"#error Do not use this file, it is the result of a failed Cython compilation."
182
    file_content = None
183
    if os.path.exists(path):
184
        try:
185
            with open(path, "rb") as f:
186
                file_content = f.read(len(failure_marker))
187
        except OSError:
188
            pass  # Probably just doesn't exist any more
189

190
    if file_content is None:
191
        # file does not exist (yet)
192
        return if_not_found
193

194
    return (
195
        # Cython C file?
196
        file_content.startswith(b"/* Generated by Cython ") or
197
        # Cython output file after previous failures?
198
        (allow_failed and file_content == failure_marker) or
199
        # Let's allow overwriting empty files as well. They might have resulted from previous failures.
200
        not file_content
201
    )
202

203

204
def file_generated_by_this_cython(path):
205
    file_content = b''
206
    if os.path.exists(path):
207
        try:
208
            with open(path, "rb") as f:
209
                file_content = f.read(len(GENERATED_BY_MARKER_BYTES))
210
        except OSError:
211
            pass  # Probably just doesn't exist any more
212
    return file_content and file_content.startswith(GENERATED_BY_MARKER_BYTES)
213

214

215
def file_newer_than(path, time):
216
    ftime = modification_time(path)
217
    return ftime > time
218

219

220
def safe_makedirs(path):
221
    try:
222
        os.makedirs(path)
223
    except OSError:
224
        if not os.path.isdir(path):
225
            raise
226

227

228
def copy_file_to_dir_if_newer(sourcefile, destdir):
229
    """
230
    Copy file sourcefile to directory destdir (creating it if needed),
231
    preserving metadata. If the destination file exists and is not
232
    older than the source file, the copying is skipped.
233
    """
234
    destfile = os.path.join(destdir, os.path.basename(sourcefile))
235
    try:
236
        desttime = modification_time(destfile)
237
    except OSError:
238
        # New file does not exist, destdir may or may not exist
239
        safe_makedirs(destdir)
240
    else:
241
        # New file already exists
242
        if not file_newer_than(sourcefile, desttime):
243
            return
244
    shutil.copy2(sourcefile, destfile)
245

246

247
@cached_function
248
def find_root_package_dir(file_path):
249
    dir = os.path.dirname(file_path)
250
    if file_path == dir:
251
        return dir
252
    elif is_package_dir(dir):
253
        return find_root_package_dir(dir)
254
    else:
255
        return dir
256

257

258
@cached_function
259
def check_package_dir(dir_path, package_names):
260
    namespace = True
261
    for dirname in package_names:
262
        dir_path = os.path.join(dir_path, dirname)
263
        has_init = contains_init(dir_path)
264
        if has_init:
265
            namespace = False
266
    return dir_path, namespace
267

268

269
@cached_function
270
def contains_init(dir_path):
271
    for filename in PACKAGE_FILES:
272
        path = os.path.join(dir_path, filename)
273
        if path_exists(path):
274
            return 1
275

276

277
def is_package_dir(dir_path):
278
    if contains_init(dir_path):
279
        return 1
280

281

282
@cached_function
283
def path_exists(path):
284
    # try on the filesystem first
285
    if os.path.exists(path):
286
        return True
287
    # figure out if a PEP 302 loader is around
288
    try:
289
        loader = __loader__
290
        # XXX the code below assumes a 'zipimport.zipimporter' instance
291
        # XXX should be easy to generalize, but too lazy right now to write it
292
        archive_path = getattr(loader, 'archive', None)
293
        if archive_path:
294
            normpath = os.path.normpath(path)
295
            if normpath.startswith(archive_path):
296
                arcname = normpath[len(archive_path)+1:]
297
                try:
298
                    loader.get_data(arcname)
299
                    return True
300
                except OSError:
301
                    return False
302
    except NameError:
303
        pass
304
    return False
305

306

307
_parse_file_version = re.compile(r".*[.]cython-([0-9]+)[.][^./\\]+$").findall
308

309

310
@cached_function
311
def find_versioned_file(directory, filename, suffix,
312
                        _current_version=int(re.sub(r"^([0-9]+)[.]([0-9]+).*", r"\1\2", cython_version))):
313
    """
314
    Search a directory for versioned pxd files, e.g. "lib.cython-30.pxd" for a Cython 3.0+ version.
315

316
    @param directory: the directory to search
317
    @param filename: the filename without suffix
318
    @param suffix: the filename extension including the dot, e.g. ".pxd"
319
    @return: the file path if found, or None
320
    """
321
    assert not suffix or suffix[:1] == '.'
322
    path_prefix = os.path.join(directory, filename)
323

324
    matching_files = glob.glob(glob.escape(path_prefix) + ".cython-*" + suffix)
325
    path = path_prefix + suffix
326
    if not os.path.exists(path):
327
        path = None
328
    best_match = (-1, path)  # last resort, if we do not have versioned .pxd files
329

330
    for path in matching_files:
331
        versions = _parse_file_version(path)
332
        if versions:
333
            int_version = int(versions[0])
334
            # Let's assume no duplicates.
335
            if best_match[0] < int_version <= _current_version:
336
                best_match = (int_version, path)
337
    return best_match[1]
338

339

340
# file name encodings
341

342
def decode_filename(filename):
343
    if isinstance(filename, bytes):
344
        try:
345
            filename_encoding = sys.getfilesystemencoding()
346
            if filename_encoding is None:
347
                filename_encoding = sys.getdefaultencoding()
348
            filename = filename.decode(filename_encoding)
349
        except UnicodeDecodeError:
350
            pass
351
    return filename
352

353

354
# support for source file encoding detection
355

356
_match_file_encoding = re.compile(br"(\w*coding)[:=]\s*([-\w.]+)").search
357

358

359
def detect_opened_file_encoding(f, default='UTF-8'):
360
    # PEPs 263 and 3120
361
    # Most of the time the first two lines fall in the first couple of hundred chars,
362
    # and this bulk read/split is much faster.
363
    lines = ()
364
    start = b''
365
    while len(lines) < 3:
366
        data = f.read(500)
367
        start += data
368
        lines = start.split(b"\n")
369
        if not data:
370
            break
371

372
    m = _match_file_encoding(lines[0])
373
    if m and m.group(1) != b'c_string_encoding':
374
        return m.group(2).decode('iso8859-1')
375
    elif len(lines) > 1:
376
        m = _match_file_encoding(lines[1])
377
        if m:
378
            return m.group(2).decode('iso8859-1')
379
    return default
380

381

382
def skip_bom(f):
383
    """
384
    Read past a BOM at the beginning of a source file.
385
    This could be added to the scanner, but it's *substantially* easier
386
    to keep it at this level.
387
    """
388
    if f.read(1) != '\uFEFF':
389
        f.seek(0)
390

391

392
def open_source_file(source_filename, encoding=None, error_handling=None):
393
    stream = None
394
    try:
395
        if encoding is None:
396
            # Most of the time the encoding is not specified, so try hard to open the file only once.
397
            f = open(source_filename, 'rb')
398
            encoding = detect_opened_file_encoding(f)
399
            f.seek(0)
400
            stream = io.TextIOWrapper(f, encoding=encoding, errors=error_handling)
401
        else:
402
            stream = open(source_filename, encoding=encoding, errors=error_handling)
403

404
    except OSError:
405
        if os.path.exists(source_filename):
406
            raise  # File is there, but something went wrong reading from it.
407
        # Allow source files to be in zip files etc.
408
        try:
409
            loader = __loader__
410
            if source_filename.startswith(loader.archive):
411
                stream = open_source_from_loader(
412
                    loader, source_filename,
413
                    encoding, error_handling)
414
        except (NameError, AttributeError):
415
            pass
416

417
    if stream is None:
418
        raise FileNotFoundError(source_filename)
419
    skip_bom(stream)
420
    return stream
421

422

423
def open_source_from_loader(loader,
424
                            source_filename,
425
                            encoding=None, error_handling=None):
426
    nrmpath = os.path.normpath(source_filename)
427
    arcname = nrmpath[len(loader.archive)+1:]
428
    data = loader.get_data(arcname)
429
    return io.TextIOWrapper(io.BytesIO(data),
430
                            encoding=encoding,
431
                            errors=error_handling)
432

433

434
def str_to_number(value):
435
    # note: this expects a string as input that was accepted by the
436
    # parser already, with an optional "-" sign in front
437
    is_neg = False
438
    if value[:1] == '-':
439
        is_neg = True
440
        value = value[1:]
441
    if len(value) < 2:
442
        value = int(value, 0)
443
    elif value[0] == '0':
444
        literal_type = value[1]  # 0'o' - 0'b' - 0'x'
445
        if literal_type in 'xX':
446
            # hex notation ('0x1AF')
447
            value = strip_py2_long_suffix(value)
448
            value = int(value[2:], 16)
449
        elif literal_type in 'oO':
450
            # Py3 octal notation ('0o136')
451
            value = int(value[2:], 8)
452
        elif literal_type in 'bB':
453
            # Py3 binary notation ('0b101')
454
            value = int(value[2:], 2)
455
        else:
456
            # Py2 octal notation ('0136')
457
            value = int(value, 8)
458
    else:
459
        value = int(value, 0)
460
    return -value if is_neg else value
461

462

463
def strip_py2_long_suffix(value_str):
464
    """
465
    Python 2 likes to append 'L' to stringified numbers
466
    which in then can't process when converting them to numbers.
467
    """
468
    if value_str[-1] in 'lL':
469
        return value_str[:-1]
470
    return value_str
471

472

473
def long_literal(value):
474
    if isinstance(value, str):
475
        value = str_to_number(value)
476
    return not -2**31 <= value < 2**31
477

478

479
@try_finally_contextmanager
480
def captured_fd(stream=2, encoding=None):
481
    orig_stream = os.dup(stream)  # keep copy of original stream
482
    try:
483
        with tempfile.TemporaryFile(mode="a+b") as temp_file:
484
            def read_output(_output=[b'']):
485
                if not temp_file.closed:
486
                    temp_file.seek(0)
487
                    _output[0] = temp_file.read()
488
                return _output[0]
489

490
            os.dup2(temp_file.fileno(), stream)  # replace stream by copy of pipe
491
            def get_output():
492
                result = read_output()
493
                return result.decode(encoding) if encoding else result
494

495
            yield get_output
496
            # note: @contextlib.contextmanager requires try-finally here
497
            os.dup2(orig_stream, stream)  # restore original stream
498
            read_output()  # keep the output in case it's used after closing the context manager
499
    finally:
500
        os.close(orig_stream)
501

502

503
def get_encoding_candidates():
504
    candidates = [sys.getdefaultencoding()]
505
    for stream in (sys.stdout, sys.stdin, sys.__stdout__, sys.__stdin__):
506
        encoding = getattr(stream, 'encoding', None)
507
        # encoding might be None (e.g. somebody redirects stdout):
508
        if encoding is not None and encoding not in candidates:
509
            candidates.append(encoding)
510
    return candidates
511

512

513
def prepare_captured(captured):
514
    captured_bytes = captured.strip()
515
    if not captured_bytes:
516
        return None
517
    for encoding in get_encoding_candidates():
518
        try:
519
            return captured_bytes.decode(encoding)
520
        except UnicodeDecodeError:
521
            pass
522
    # last resort: print at least the readable ascii parts correctly.
523
    return captured_bytes.decode('latin-1')
524

525

526
def print_captured(captured, output, header_line=None):
527
    captured = prepare_captured(captured)
528
    if captured:
529
        if header_line:
530
            output.write(header_line)
531
        output.write(captured)
532

533

534
def print_bytes(s, header_text=None, end=b'\n', file=sys.stdout, flush=True):
535
    if header_text:
536
        file.write(header_text)  # note: text! => file.write() instead of out.write()
537
    file.flush()
538
    out = file.buffer
539
    out.write(s)
540
    if end:
541
        out.write(end)
542
    if flush:
543
        out.flush()
544

545

546
class OrderedSet:
547
    def __init__(self, elements=()):
548
        self._list = []
549
        self._set = set()
550
        self.update(elements)
551

552
    def __iter__(self):
553
        return iter(self._list)
554

555
    def update(self, elements):
556
        for e in elements:
557
            self.add(e)
558

559
    def add(self, e):
560
        if e not in self._set:
561
            self._list.append(e)
562
            self._set.add(e)
563

564
    def __bool__(self):
565
        return bool(self._set)
566

567
    __nonzero__ = __bool__
568

569

570
# Class decorator that adds a metaclass and recreates the class with it.
571
# Copied from 'six'.
572
def add_metaclass(metaclass):
573
    """Class decorator for creating a class with a metaclass."""
574
    def wrapper(cls):
575
        orig_vars = cls.__dict__.copy()
576
        slots = orig_vars.get('__slots__')
577
        if slots is not None:
578
            if isinstance(slots, str):
579
                slots = [slots]
580
            for slots_var in slots:
581
                orig_vars.pop(slots_var)
582
        orig_vars.pop('__dict__', None)
583
        orig_vars.pop('__weakref__', None)
584
        return metaclass(cls.__name__, cls.__bases__, orig_vars)
585
    return wrapper
586

587

588
def raise_error_if_module_name_forbidden(full_module_name):
589
    # it is bad idea to call the pyx-file cython.pyx, so fail early
590
    if full_module_name == 'cython' or full_module_name.startswith('cython.'):
591
        raise ValueError('cython is a special module, cannot be used as a module name')
592

593

594
def build_hex_version(version_string):
595
    """
596
    Parse and translate public version identifier like '4.3a1' into the readable hex representation '0x040300A1' (like PY_VERSION_HEX).
597

598
    SEE: https://peps.python.org/pep-0440/#public-version-identifiers
599
    """
600
    # Parse '4.12a1' into [4, 12, 0, 0xA01]
601
    # And ignore .dev, .pre and .post segments
602
    digits = []
603
    release_status = 0xF0
604
    for segment in re.split(r'(\D+)', version_string):
605
        if segment in ('a', 'b', 'rc'):
606
            release_status = {'a': 0xA0, 'b': 0xB0, 'rc': 0xC0}[segment]
607
            digits = (digits + [0, 0])[:3]  # 1.2a1 -> 1.2.0a1
608
        elif segment in ('.dev', '.pre', '.post'):
609
            break  # break since those are the last segments
610
        elif segment != '.':
611
            digits.append(int(segment))
612

613
    digits = (digits + [0] * 3)[:4]
614
    digits[3] += release_status
615

616
    # Then, build a single hex value, two hex digits per version part.
617
    hexversion = 0
618
    for digit in digits:
619
        hexversion = (hexversion << 8) + digit
620

621
    return '0x%08X' % hexversion
622

623

624
def write_depfile(target, source, dependencies):
625
    src_base_dir = os.path.dirname(source)
626
    cwd = os.getcwd()
627
    if not src_base_dir.endswith(os.sep):
628
        src_base_dir += os.sep
629
    # paths below the base_dir are relative, otherwise absolute
630
    paths = []
631
    for fname in dependencies:
632
        if fname.startswith(src_base_dir):
633
            try:
634
                newpath = os.path.relpath(fname, cwd)
635
            except ValueError:
636
                # if they are on different Windows drives, absolute is fine
637
                newpath = os.path.abspath(fname)
638
        else:
639
            newpath = os.path.abspath(fname)
640
        paths.append(newpath)
641

642
    depline = os.path.relpath(target, cwd) + ": \\\n  "
643
    depline += " \\\n  ".join(paths) + "\n"
644

645
    with open(target+'.dep', 'w') as outfile:
646
        outfile.write(depline)
647

648

649
def print_version():
650
    print("Cython version %s" % cython_version)
651
    # For legacy reasons, we also write the version to stderr.
652
    # New tools should expect it in stdout, but existing ones still pipe from stderr, or from both.
653
    if sys.stderr.isatty() or sys.stdout == sys.stderr:
654
        return
655
    if os.fstat(1) == os.fstat(2):
656
        # This is somewhat unsafe since sys.stdout/err might not really be linked to streams 1/2.
657
        # However, in most *relevant* cases, where Cython is run as an external tool, they are linked.
658
        return
659
    sys.stderr.write("Cython version %s\n" % cython_version)
660

661

662
def normalise_float_repr(float_str):
663
    """
664
    Generate a 'normalised', simple digits string representation of a float value
665
    to allow string comparisons.  Examples: '.123', '123.456', '123.'
666
    """
667
    str_value = float_str.lower().lstrip('0')
668

669
    exp = 0
670
    if 'E' in str_value or 'e' in str_value:
671
        str_value, exp = str_value.split('E' if 'E' in str_value else 'e', 1)
672
        exp = int(exp)
673

674
    if '.' in str_value:
675
        num_int_digits = str_value.index('.')
676
        str_value = str_value[:num_int_digits] + str_value[num_int_digits + 1:]
677
    else:
678
        num_int_digits = len(str_value)
679
    exp += num_int_digits
680

681
    result = (
682
        str_value[:exp]
683
        + '0' * (exp - len(str_value))
684
        + '.'
685
        + '0' * -exp
686
        + str_value[exp:]
687
    ).rstrip('0')
688

689
    return result if result != '.' else '.0'
690

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.