scipy

Форк
0
/
dev.py 
1673 строки · 56.0 Кб
1
#! /usr/bin/env python3
2

3
'''
4
Developer CLI: building (meson), tests, benchmark, etc.
5

6
This file contains tasks definitions for doit (https://pydoit.org).
7
And also a CLI interface using click (https://click.palletsprojects.com).
8

9
The CLI is ideal for project contributors while,
10
doit interface is better suited for authoring the development tasks.
11

12
REQUIREMENTS:
13
--------------
14
- see environment.yml: doit, pydevtool, click, rich-click
15

16
# USAGE:
17

18
## 1 - click API
19

20
Commands can added using default Click API. i.e.
21

22
```
23
@cli.command()
24
@click.argument('extra_argv', nargs=-1)
25
@click.pass_obj
26
def python(ctx_obj, extra_argv):
27
    """Start a Python shell with PYTHONPATH set"""
28
```
29

30
## 2 - class based Click command definition
31

32
`CliGroup` provides an alternative class based API to create Click commands.
33

34
Just use the `cls_cmd` decorator. And define a `run()` method
35

36
```
37
@cli.cls_cmd('test')
38
class Test():
39
    """Run tests"""
40

41
    @classmethod
42
    def run(cls):
43
        print('Running tests...')
44
```
45

46
- Command may make use a Click.Group context defining a `ctx` class attribute
47
- Command options are also define as class attributes
48

49
```
50
@cli.cls_cmd('test')
51
class Test():
52
    """Run tests"""
53
    ctx = CONTEXT
54

55
    verbose = Option(
56
        ['--verbose', '-v'], default=False, is_flag=True, help="verbosity")
57

58
    @classmethod
59
    def run(cls, **kwargs): # kwargs contains options from class and CONTEXT
60
        print('Running tests...')
61
```
62

63
## 3 - class based interface can be run as a doit task by subclassing from Task
64

65
- Extra doit task metadata can be defined as class attribute TASK_META.
66
- `run()` method will be used as python-action by task
67

68
```
69
@cli.cls_cmd('test')
70
class Test(Task):   # Task base class, doit will create a task
71
    """Run tests"""
72
    ctx = CONTEXT
73

74
    TASK_META = {
75
        'task_dep': ['build'],
76
    }
77

78
    @classmethod
79
    def run(cls, **kwargs):
80
        pass
81
```
82

83
## 4 - doit tasks with cmd-action "shell" or dynamic metadata
84

85
Define method `task_meta()` instead of `run()`:
86

87
```
88
@cli.cls_cmd('refguide-check')
89
class RefguideCheck(Task):
90
    @classmethod
91
    def task_meta(cls, **kwargs):
92
        return {
93
```
94

95
'''
96

97
import os
98
import subprocess
99
import sys
100
import warnings
101
import shutil
102
import json
103
import datetime
104
import time
105
import importlib
106
import importlib.util
107
import errno
108
import contextlib
109
from sysconfig import get_path
110
import math
111
import traceback
112
from concurrent.futures.process import _MAX_WINDOWS_WORKERS
113

114
from pathlib import Path
115
from collections import namedtuple
116
from types import ModuleType as new_module
117
from dataclasses import dataclass
118

119
import click
120
from click import Option, Argument
121
from doit.cmd_base import ModuleTaskLoader
122
from doit.reporter import ZeroReporter
123
from doit.exceptions import TaskError
124
from doit.api import run_tasks
125
from doit import task_params
126
from pydevtool.cli import UnifiedContext, CliGroup, Task
127
from rich.console import Console
128
from rich.panel import Panel
129
from rich.theme import Theme
130
from rich_click import rich_click
131

132
DOIT_CONFIG = {
133
    'verbosity': 2,
134
    'minversion': '0.36.0',
135
}
136

137

138
console_theme = Theme({
139
    "cmd": "italic gray50",
140
})
141

142
if sys.platform == 'win32':
143
    class EMOJI:
144
        cmd = ">"
145
else:
146
    class EMOJI:
147
        cmd = ":computer:"
148

149

150
rich_click.STYLE_ERRORS_SUGGESTION = "yellow italic"
151
rich_click.SHOW_ARGUMENTS = True
152
rich_click.GROUP_ARGUMENTS_OPTIONS = False
153
rich_click.SHOW_METAVARS_COLUMN = True
154
rich_click.USE_MARKDOWN = True
155
rich_click.OPTION_GROUPS = {
156
    "dev.py": [
157
        {
158
            "name": "Options",
159
            "options": [
160
                "--help", "--build-dir", "--no-build", "--install-prefix"],
161
        },
162
    ],
163

164
    "dev.py test": [
165
        {
166
            "name": "Options",
167
            "options": ["--help", "--verbose", "--parallel", "--coverage",
168
                        "--durations"],
169
        },
170
        {
171
            "name": "Options: test selection",
172
            "options": ["--submodule", "--tests", "--mode"],
173
        },
174
    ],
175
}
176
rich_click.COMMAND_GROUPS = {
177
    "dev.py": [
178
        {
179
            "name": "build & testing",
180
            "commands": ["build", "test"],
181
        },
182
        {
183
            "name": "static checkers",
184
            "commands": ["lint", "mypy"],
185
        },
186
        {
187
            "name": "environments",
188
            "commands": ["shell", "python", "ipython", "show_PYTHONPATH"],
189
        },
190
        {
191
            "name": "documentation",
192
            "commands": ["doc", "refguide-check", "smoke-docs", "smoke-tutorial"],
193
        },
194
        {
195
            "name": "release",
196
            "commands": ["notes", "authors"],
197
        },
198
        {
199
            "name": "benchmarking",
200
            "commands": ["bench"],
201
        },
202
    ]
203
}
204

205

206
class ErrorOnlyReporter(ZeroReporter):
207
    desc = """Report errors only"""
208

209
    def runtime_error(self, msg):
210
        console = Console()
211
        console.print("[red bold] msg")
212

213
    def add_failure(self, task, fail_info):
214
        console = Console()
215
        if isinstance(fail_info, TaskError):
216
            console.print(f'[red]Task Error - {task.name}'
217
                          f' => {fail_info.message}')
218
        if fail_info.traceback:
219
            console.print(Panel(
220
                "".join(fail_info.traceback),
221
                title=f"{task.name}",
222
                subtitle=fail_info.message,
223
                border_style="red",
224
            ))
225

226

227
CONTEXT = UnifiedContext({
228
    'build_dir': Option(
229
        ['--build-dir'], metavar='BUILD_DIR',
230
        default='build', show_default=True,
231
        help=':wrench: Relative path to the build directory.'),
232
    'no_build': Option(
233
        ["--no-build", "-n"], default=False, is_flag=True,
234
        help=(":wrench: Do not build the project"
235
              " (note event python only modification require build).")),
236
    'install_prefix': Option(
237
        ['--install-prefix'], default=None, metavar='INSTALL_DIR',
238
        help=(":wrench: Relative path to the install directory."
239
              " Default is <build-dir>-install.")),
240
})
241

242

243
def run_doit_task(tasks):
244
    """
245
      :param tasks: (dict) task_name -> {options}
246
    """
247
    loader = ModuleTaskLoader(globals())
248
    doit_config = {
249
        'verbosity': 2,
250
        'reporter': ErrorOnlyReporter,
251
    }
252
    return run_tasks(loader, tasks, extra_config={'GLOBAL': doit_config})
253

254

255
class CLI(CliGroup):
256
    context = CONTEXT
257
    run_doit_task = run_doit_task
258

259

260
@click.group(cls=CLI)
261
@click.pass_context
262
def cli(ctx, **kwargs):
263
    """Developer Tool for SciPy
264

265
    \bCommands that require a built/installed instance are marked with :wrench:.
266

267

268
    \b**python dev.py --build-dir my-build test -s stats**
269

270
    """
271
    CLI.update_context(ctx, kwargs)
272

273

274
PROJECT_MODULE = "scipy"
275
PROJECT_ROOT_FILES = ['scipy', 'LICENSE.txt', 'meson.build']
276

277

278
@dataclass
279
class Dirs:
280
    """
281
        root:
282
            Directory where scr, build config and tools are located
283
            (and this file)
284
        build:
285
            Directory where build output files (i.e. *.o) are saved
286
        install:
287
            Directory where .so from build and .py from src are put together.
288
        site:
289
            Directory where the built SciPy version was installed.
290
            This is a custom prefix, followed by a relative path matching
291
            the one the system would use for the site-packages of the active
292
            Python interpreter.
293
    """
294
    # all paths are absolute
295
    root: Path
296
    build: Path
297
    installed: Path
298
    site: Path  # <install>/lib/python<version>/site-packages
299

300
    def __init__(self, args=None):
301
        """:params args: object like Context(build_dir, install_prefix)"""
302
        self.root = Path(__file__).parent.absolute()
303
        if not args:
304
            return
305

306
        self.build = Path(args.build_dir).resolve()
307
        if args.install_prefix:
308
            self.installed = Path(args.install_prefix).resolve()
309
        else:
310
            self.installed = self.build.parent / (self.build.stem + "-install")
311

312
        # relative path for site-package with py version
313
        # i.e. 'lib/python3.10/site-packages'
314
        self.site = self.get_site_packages()
315

316
    def add_sys_path(self):
317
        """Add site dir to sys.path / PYTHONPATH"""
318
        site_dir = str(self.site)
319
        sys.path.insert(0, site_dir)
320
        os.environ['PYTHONPATH'] = \
321
            os.pathsep.join((site_dir, os.environ.get('PYTHONPATH', '')))
322

323
    def get_site_packages(self):
324
        """
325
        Depending on whether we have debian python or not,
326
        return dist_packages path or site_packages path.
327
        """
328
        if sys.version_info >= (3, 12):
329
            plat_path = Path(get_path('platlib'))
330
        else:
331
            # distutils is required to infer meson install path
332
            # for python < 3.12 in debian patched python
333
            with warnings.catch_warnings():
334
                warnings.filterwarnings("ignore", category=DeprecationWarning)
335
                from distutils import dist
336
                from distutils.command.install import INSTALL_SCHEMES
337
            if 'deb_system' in INSTALL_SCHEMES:
338
                # debian patched python in use
339
                install_cmd = dist.Distribution().get_command_obj('install')
340
                install_cmd.select_scheme('deb_system')
341
                install_cmd.finalize_options()
342
                plat_path = Path(install_cmd.install_platlib)
343
            else:
344
                plat_path = Path(get_path('platlib'))
345
        return self.installed / plat_path.relative_to(sys.exec_prefix)
346

347

348
@contextlib.contextmanager
349
def working_dir(new_dir):
350
    current_dir = os.getcwd()
351
    try:
352
        os.chdir(new_dir)
353
        yield
354
    finally:
355
        os.chdir(current_dir)
356

357

358
def import_module_from_path(mod_name, mod_path):
359
    """Import module with name `mod_name` from file path `mod_path`"""
360
    spec = importlib.util.spec_from_file_location(mod_name, mod_path)
361
    mod = importlib.util.module_from_spec(spec)
362
    spec.loader.exec_module(mod)
363
    return mod
364

365

366
def get_test_runner(project_module):
367
    """
368
    get Test Runner from locally installed/built project
369
    """
370
    __import__(project_module)
371
    # scipy._lib._testutils:PytestTester
372
    test = sys.modules[project_module].test
373
    version = sys.modules[project_module].__version__
374
    mod_path = sys.modules[project_module].__file__
375
    mod_path = os.path.abspath(os.path.join(os.path.dirname(mod_path)))
376
    return test, version, mod_path
377

378

379
############
380

381
@cli.cls_cmd('build')
382
class Build(Task):
383
    """:wrench: Build & install package on path.
384

385
    \b
386
    ```shell-session
387
    Examples:
388

389
    $ python dev.py build --asan ;
390
        ASAN_OPTIONS=detect_leaks=0:symbolize=1:strict_init_order=true
391
        LD_PRELOAD=$(gcc --print-file-name=libasan.so)
392
        python dev.py test -v -t
393
        ./scipy/ndimage/tests/test_morphology.py -- -s
394
    ```
395
    """
396
    ctx = CONTEXT
397

398
    werror = Option(
399
        ['--werror'], default=False, is_flag=True,
400
        help="Treat warnings as errors")
401
    gcov = Option(
402
        ['--gcov'], default=False, is_flag=True,
403
        help="enable C code coverage via gcov (requires GCC)."
404
             "gcov output goes to build/**/*.gc*")
405
    asan = Option(
406
        ['--asan'], default=False, is_flag=True,
407
        help=("Build and run with AddressSanitizer support. "
408
              "Note: the build system doesn't check whether "
409
              "the project is already compiled with ASan. "
410
              "If not, you need to do a clean build (delete "
411
              "build and build-install directories)."))
412
    debug = Option(
413
        ['--debug', '-d'], default=False, is_flag=True, help="Debug build")
414
    release = Option(
415
        ['--release', '-r'], default=False, is_flag=True, help="Release build")
416
    parallel = Option(
417
        ['--parallel', '-j'], default=None, metavar='N_JOBS',
418
        help=("Number of parallel jobs for building. "
419
              "This defaults to the number of available physical CPU cores"))
420
    setup_args = Option(
421
        ['--setup-args', '-C'], default=[], multiple=True,
422
        help=("Pass along one or more arguments to `meson setup` "
423
              "Repeat the `-C` in case of multiple arguments."))
424
    show_build_log = Option(
425
        ['--show-build-log'], default=False, is_flag=True,
426
        help="Show build output rather than using a log file")
427
    with_scipy_openblas = Option(
428
        ['--with-scipy-openblas'], default=False, is_flag=True,
429
        help=("If set, use the `scipy-openblas32` wheel installed into the "
430
              "current environment as the BLAS/LAPACK to build against."))
431
    with_accelerate = Option(
432
        ['--with-accelerate'], default=False, is_flag=True,
433
        help=("If set, use `Accelerate` as the BLAS/LAPACK to build against."
434
              " Takes precedence over -with-scipy-openblas (macOS only)")
435
    )
436
    tags = Option(
437
        ['--tags'], default="runtime,python-runtime,tests,devel",
438
        show_default=True, help="Install tags to be used by meson."
439
    )
440

441
    @classmethod
442
    def setup_build(cls, dirs, args):
443
        """
444
        Setting up meson-build
445
        """
446
        for fn in PROJECT_ROOT_FILES:
447
            if not (dirs.root / fn).exists():
448
                print("To build the project, run dev.py in "
449
                      "git checkout or unpacked source")
450
                sys.exit(1)
451

452
        env = dict(os.environ)
453
        cmd = ["meson", "setup", dirs.build, "--prefix", dirs.installed]
454
        build_dir = dirs.build
455
        run_dir = Path()
456
        if build_dir.exists() and not (build_dir / 'meson-info').exists():
457
            if list(build_dir.iterdir()):
458
                raise RuntimeError("Can't build into non-empty directory "
459
                                   f"'{build_dir.absolute()}'")
460

461
        if sys.platform == "cygwin":
462
            # Cygwin only has netlib lapack, but can link against
463
            # OpenBLAS rather than netlib blas at runtime.  There is
464
            # no libopenblas-devel to enable linking against
465
            # openblas-specific functions or OpenBLAS Lapack
466
            cmd.extend(["-Dlapack=lapack", "-Dblas=blas"])
467

468
        build_options_file = (
469
            build_dir / "meson-info" / "intro-buildoptions.json")
470
        if build_options_file.exists():
471
            with open(build_options_file) as f:
472
                build_options = json.load(f)
473
            installdir = None
474
            for option in build_options:
475
                if option["name"] == "prefix":
476
                    installdir = option["value"]
477
                    break
478
            if installdir != str(dirs.installed):
479
                run_dir = build_dir
480
                cmd = ["meson", "setup", "--reconfigure",
481
                       "--prefix", str(dirs.installed)]
482
            else:
483
                return
484
        if args.werror:
485
            cmd += ["--werror"]
486
        if args.debug or args.release:
487
            if args.debug and args.release:
488
                raise ValueError("Set at most one of `--debug` and `--release`!")
489
            if args.debug:
490
                buildtype = 'debug'
491
                cflags_unwanted = ('-O1', '-O2', '-O3')
492
            elif args.release:
493
                buildtype = 'release'
494
                cflags_unwanted = ('-O0', '-O1', '-O2')
495
            cmd += [f"-Dbuildtype={buildtype}"]
496
            if 'CFLAGS' in os.environ.keys():
497
                # Check that CFLAGS doesn't contain something that supercedes -O0
498
                # for a plain debug build (conda envs tend to set -O2)
499
                cflags = os.environ['CFLAGS'].split()
500
                for flag in cflags_unwanted:
501
                    if flag in cflags:
502
                        raise ValueError(f"A {buildtype} build isn't possible, "
503
                                         f"because CFLAGS contains `{flag}`."
504
                                          "Please also check CXXFLAGS and FFLAGS.")
505
        if args.gcov:
506
            cmd += ['-Db_coverage=true']
507
        if args.asan:
508
            cmd += ['-Db_sanitize=address,undefined']
509
        if args.setup_args:
510
            cmd += [str(arg) for arg in args.setup_args]
511
        if args.with_accelerate:
512
            # on a mac you probably want to use accelerate over scipy_openblas
513
            cmd += ["-Dblas=accelerate"]
514
        elif args.with_scipy_openblas:
515
            cls.configure_scipy_openblas()
516
            env['PKG_CONFIG_PATH'] = os.pathsep.join([
517
                    os.getcwd(),
518
                    env.get('PKG_CONFIG_PATH', '')
519
                    ])
520

521
        # Setting up meson build
522
        cmd_str = ' '.join([str(p) for p in cmd])
523
        cls.console.print(f"{EMOJI.cmd} [cmd] {cmd_str}")
524
        ret = subprocess.call(cmd, env=env, cwd=run_dir)
525
        if ret == 0:
526
            print("Meson build setup OK")
527
        else:
528
            print("Meson build setup failed!")
529
            sys.exit(1)
530
        return env
531

532
    @classmethod
533
    def build_project(cls, dirs, args, env):
534
        """
535
        Build a dev version of the project.
536
        """
537
        cmd = ["ninja", "-C", str(dirs.build)]
538
        if args.parallel is None:
539
            # Use number of physical cores rather than ninja's default of 2N+2,
540
            # to avoid out of memory issues (see gh-17941 and gh-18443)
541
            n_cores = cpu_count(only_physical_cores=True)
542
            cmd += [f"-j{n_cores}"]
543
        else:
544
            cmd += ["-j", str(args.parallel)]
545

546
        # Building with ninja-backend
547
        cmd_str = ' '.join([str(p) for p in cmd])
548
        cls.console.print(f"{EMOJI.cmd} [cmd] {cmd_str}")
549
        ret = subprocess.call(cmd, env=env, cwd=dirs.root)
550

551
        if ret == 0:
552
            print("Build OK")
553
        else:
554
            print("Build failed!")
555
            sys.exit(1)
556

557
    @classmethod
558
    def install_project(cls, dirs, args):
559
        """
560
        Installs the project after building.
561
        """
562
        if dirs.installed.exists():
563
            non_empty = len(os.listdir(dirs.installed))
564
            if non_empty and not dirs.site.exists():
565
                raise RuntimeError("Can't install in non-empty directory: "
566
                                   f"'{dirs.installed}'")
567
        cmd = ["meson", "install", "-C", args.build_dir,
568
               "--only-changed", "--tags", args.tags]
569
        log_filename = dirs.root / 'meson-install.log'
570
        start_time = datetime.datetime.now()
571
        cmd_str = ' '.join([str(p) for p in cmd])
572
        cls.console.print(f"{EMOJI.cmd} [cmd] {cmd_str}")
573
        if args.show_build_log:
574
            ret = subprocess.call(cmd, cwd=dirs.root)
575
        else:
576
            print("Installing, see meson-install.log...")
577
            with open(log_filename, 'w') as log:
578
                p = subprocess.Popen(cmd, stdout=log, stderr=log,
579
                                     cwd=dirs.root)
580

581
            try:
582
                # Wait for it to finish, and print something to indicate the
583
                # process is alive, but only if the log file has grown (to
584
                # allow continuous integration environments kill a hanging
585
                # process accurately if it produces no output)
586
                last_blip = time.time()
587
                last_log_size = os.stat(log_filename).st_size
588
                while p.poll() is None:
589
                    time.sleep(0.5)
590
                    if time.time() - last_blip > 60:
591
                        log_size = os.stat(log_filename).st_size
592
                        if log_size > last_log_size:
593
                            elapsed = datetime.datetime.now() - start_time
594
                            print(f"    ... installation in progress ({elapsed} "
595
                                  "elapsed)")
596
                            last_blip = time.time()
597
                            last_log_size = log_size
598

599
                ret = p.wait()
600
            except:  # noqa: E722
601
                p.terminate()
602
                raise
603
        elapsed = datetime.datetime.now() - start_time
604

605
        if ret != 0:
606
            if not args.show_build_log:
607
                with open(log_filename) as f:
608
                    print(f.read())
609
            print(f"Installation failed! ({elapsed} elapsed)")
610
            sys.exit(1)
611

612
        # ignore everything in the install directory.
613
        with open(dirs.installed / ".gitignore", "w") as f:
614
            f.write("*")
615

616
        if sys.platform == "cygwin":
617
            rebase_cmd = ["/usr/bin/rebase", "--database", "--oblivious"]
618
            rebase_cmd.extend(Path(dirs.installed).glob("**/*.dll"))
619
            subprocess.check_call(rebase_cmd)
620

621
        print("Installation OK")
622
        return
623

624
    @classmethod
625
    def configure_scipy_openblas(self, blas_variant='32'):
626
        """Create scipy-openblas.pc and scipy/_distributor_init_local.py
627

628
        Requires a pre-installed scipy-openblas32 wheel from PyPI.
629
        """
630
        basedir = os.getcwd()
631
        pkg_config_fname = os.path.join(basedir, "scipy-openblas.pc")
632

633
        if os.path.exists(pkg_config_fname):
634
            return None
635

636
        module_name = f"scipy_openblas{blas_variant}"
637
        try:
638
            openblas = importlib.import_module(module_name)
639
        except ModuleNotFoundError:
640
            raise RuntimeError(f"Importing '{module_name}' failed. "
641
                               "Make sure it is installed and reachable "
642
                               "by the current Python executable. You can "
643
                               f"install it via 'pip install {module_name}'.")
644

645
        local = os.path.join(basedir, "scipy", "_distributor_init_local.py")
646
        with open(local, "w", encoding="utf8") as fid:
647
            fid.write(f"import {module_name}\n")
648

649
        with open(pkg_config_fname, "w", encoding="utf8") as fid:
650
            fid.write(openblas.get_pkg_config())
651

652
    @classmethod
653
    def run(cls, add_path=False, **kwargs):
654
        kwargs.update(cls.ctx.get(kwargs))
655
        Args = namedtuple('Args', [k for k in kwargs.keys()])
656
        args = Args(**kwargs)
657

658
        cls.console = Console(theme=console_theme)
659
        dirs = Dirs(args)
660
        if args.no_build:
661
            print("Skipping build")
662
        else:
663
            env = cls.setup_build(dirs, args)
664
            cls.build_project(dirs, args, env)
665
            cls.install_project(dirs, args)
666

667
        # add site to sys.path
668
        if add_path:
669
            dirs.add_sys_path()
670

671

672
@cli.cls_cmd('test')
673
class Test(Task):
674
    """:wrench: Run tests.
675

676
    \b
677
    ```python
678
    Examples:
679

680
    $ python dev.py test -s {SAMPLE_SUBMODULE}
681
    $ python dev.py test -t scipy.optimize.tests.test_minimize_constrained
682
    $ python dev.py test -s cluster -m full --durations 20
683
    $ python dev.py test -s stats -- --tb=line  # `--` passes next args to pytest
684
    $ python dev.py test -b numpy -b pytorch -s cluster
685
    ```
686
    """
687
    ctx = CONTEXT
688

689
    verbose = Option(
690
        ['--verbose', '-v'], default=False, is_flag=True,
691
        help="more verbosity")
692
    # removed doctests as currently not supported by _lib/_testutils.py
693
    # doctests = Option(['--doctests'], default=False)
694
    coverage = Option(
695
        ['--coverage', '-c'], default=False, is_flag=True,
696
        help=("report coverage of project code. "
697
              "HTML output goes under build/coverage"))
698
    durations = Option(
699
        ['--durations', '-d'], default=None, metavar="NUM_TESTS",
700
        help="Show timing for the given number of slowest tests"
701
    )
702
    submodule = Option(
703
        ['--submodule', '-s'], default=None, metavar='MODULE_NAME',
704
        help="Submodule whose tests to run (cluster, constants, ...)")
705
    tests = Option(
706
        ['--tests', '-t'], default=None, multiple=True, metavar='TESTS',
707
        help='Specify tests to run')
708
    mode = Option(
709
        ['--mode', '-m'], default='fast', metavar='MODE', show_default=True,
710
        help=("'fast', 'full', or something that could be passed to "
711
              "`pytest -m` as a marker expression"))
712
    parallel = Option(
713
        ['--parallel', '-j'], default=1, metavar='N_JOBS',
714
        help="Number of parallel jobs for testing"
715
    )
716
    array_api_backend = Option(
717
        ['--array-api-backend', '-b'], default=None, metavar='ARRAY_BACKEND',
718
        multiple=True,
719
        help=(
720
            "Array API backend "
721
            "('all', 'numpy', 'pytorch', 'cupy', 'array_api_strict', 'jax.numpy')."
722
        )
723
    )
724
    # Argument can't have `help=`; used to consume all of `-- arg1 arg2 arg3`
725
    pytest_args = Argument(
726
        ['pytest_args'], nargs=-1, metavar='PYTEST-ARGS', required=False
727
    )
728

729
    TASK_META = {
730
        'task_dep': ['build'],
731
    }
732

733
    @classmethod
734
    def scipy_tests(cls, args, pytest_args):
735
        dirs = Dirs(args)
736
        dirs.add_sys_path()
737
        print(f"SciPy from development installed path at: {dirs.site}")
738

739
        # FIXME: support pos-args with doit
740
        extra_argv = list(pytest_args[:]) if pytest_args else []
741
        if extra_argv and extra_argv[0] == '--':
742
            extra_argv = extra_argv[1:]
743

744
        if args.coverage:
745
            dst_dir = dirs.root / args.build_dir / 'coverage'
746
            fn = dst_dir / 'coverage_html.js'
747
            if dst_dir.is_dir() and fn.is_file():
748
                shutil.rmtree(dst_dir)
749
            extra_argv += ['--cov-report=html:' + str(dst_dir)]
750
            shutil.copyfile(dirs.root / '.coveragerc',
751
                            dirs.site / '.coveragerc')
752

753
        if args.durations:
754
            extra_argv += ['--durations', args.durations]
755

756
        # convert options to test selection
757
        if args.submodule:
758
            tests = [PROJECT_MODULE + "." + args.submodule]
759
        elif args.tests:
760
            tests = args.tests
761
        else:
762
            tests = None
763

764
        if len(args.array_api_backend) != 0:
765
            os.environ['SCIPY_ARRAY_API'] = json.dumps(list(args.array_api_backend))
766

767
        runner, version, mod_path = get_test_runner(PROJECT_MODULE)
768
        # FIXME: changing CWD is not a good practice
769
        with working_dir(dirs.site):
770
            print(f"Running tests for {PROJECT_MODULE} version:{version}, "
771
                  f"installed at:{mod_path}")
772
            # runner verbosity - convert bool to int
773
            verbose = int(args.verbose) + 1
774
            result = runner(  # scipy._lib._testutils:PytestTester
775
                args.mode,
776
                verbose=verbose,
777
                extra_argv=extra_argv,
778
                doctests=False,
779
                coverage=args.coverage,
780
                tests=tests,
781
                parallel=args.parallel)
782
        return result
783

784
    @classmethod
785
    def run(cls, pytest_args, **kwargs):
786
        """run unit-tests"""
787
        kwargs.update(cls.ctx.get())
788
        Args = namedtuple('Args', [k for k in kwargs.keys()])
789
        args = Args(**kwargs)
790
        return cls.scipy_tests(args, pytest_args)
791

792

793
@cli.cls_cmd('smoke-docs')
794
class SmokeDocs(Task):
795
    # XXX This essntially is a copy-paste of the Task class. Consider de-duplicating.
796
    ctx = CONTEXT
797

798
    verbose = Option(
799
        ['--verbose', '-v'], default=False, is_flag=True,
800
        help="more verbosity")
801
    durations = Option(
802
        ['--durations', '-d'], default=None, metavar="NUM_TESTS",
803
        help="Show timing for the given number of slowest tests"
804
    )
805
    submodule = Option(
806
        ['--submodule', '-s'], default=None, metavar='MODULE_NAME',
807
        help="Submodule whose tests to run (cluster, constants, ...)")
808
    tests = Option(
809
        ['--tests', '-t'], default=None, multiple=True, metavar='TESTS',
810
        help='Specify tests to run')
811
    parallel = Option(
812
        ['--parallel', '-j'], default=1, metavar='N_JOBS',
813
        help="Number of parallel jobs for testing"
814
    )
815
    # Argument can't have `help=`; used to consume all of `-- arg1 arg2 arg3`
816
    pytest_args = Argument(
817
        ['pytest_args'], nargs=-1, metavar='PYTEST-ARGS', required=False
818
    )
819

820
    TASK_META = {
821
        'task_dep': ['build'],
822
    }
823

824
    @classmethod
825
    def scipy_tests(cls, args, pytest_args):
826
        dirs = Dirs(args)
827
        dirs.add_sys_path()
828
        print(f"SciPy from development installed path at: {dirs.site}")
829

830
        # prevent obscure error later; cf https://github.com/numpy/numpy/pull/26691/
831
        if not importlib.util.find_spec("scipy_doctest"):
832
            raise ModuleNotFoundError("Please install scipy-doctest")
833

834
        # FIXME: support pos-args with doit
835
        extra_argv = list(pytest_args[:]) if pytest_args else []
836
        if extra_argv and extra_argv[0] == '--':
837
            extra_argv = extra_argv[1:]
838

839
        if args.durations:
840
            extra_argv += ['--durations', args.durations]
841

842
        # convert options to test selection
843
        if args.submodule:
844
            tests = [PROJECT_MODULE + "." + args.submodule]
845
        elif args.tests:
846
            tests = args.tests
847
        else:
848
            tests = None
849

850
        # Request doctesting; use strategy=api unless -t path/to/specific/file
851
        # also switch off assertion rewriting: not useful for doctests
852
        extra_argv += ["--doctest-modules", "--assert=plain"]
853
        if not args.tests:
854
            extra_argv += ['--doctest-collect=api']
855

856
        runner, version, mod_path = get_test_runner(PROJECT_MODULE)
857
        # FIXME: changing CWD is not a good practice
858
        with working_dir(dirs.site):
859
            print(f"Running tests for {PROJECT_MODULE} version:{version}, "
860
                  f"installed at:{mod_path}")
861
            # runner verbosity - convert bool to int
862
            verbose = int(args.verbose) + 1
863
            result = runner(  # scipy._lib._testutils:PytestTester
864
                "fast",
865
                verbose=verbose,
866
                extra_argv=extra_argv,
867
                doctests=True,
868
                coverage=False,
869
                tests=tests,
870
                parallel=args.parallel)
871
        return result
872

873
    @classmethod
874
    def run(cls, pytest_args, **kwargs):
875
        """run unit-tests"""
876
        kwargs.update(cls.ctx.get())
877
        Args = namedtuple('Args', [k for k in kwargs.keys()])
878
        args = Args(**kwargs)
879
        return cls.scipy_tests(args, pytest_args)
880

881

882
@cli.cls_cmd('smoke-tutorials')
883
class SmokeTutorials(Task):
884
    """:wrench: Run smoke-tests on tutorial files."""
885
    ctx = CONTEXT
886

887
    tests = Option(
888
        ['--tests', '-t'], default=None, multiple=True, metavar='TESTS',
889
        help='Specify *rst files to smoke test')
890
    verbose = Option(
891
        ['--verbose', '-v'], default=False, is_flag=True, help="verbosity")
892

893
    pytest_args = Argument(
894
        ['pytest_args'], nargs=-1, metavar='PYTEST-ARGS', required=False
895
    )
896

897
    @classmethod
898
    def task_meta(cls, **kwargs):
899
        kwargs.update(cls.ctx.get())
900
        Args = namedtuple('Args', [k for k in kwargs.keys()])
901
        args = Args(**kwargs)
902
        dirs = Dirs(args)
903

904
        cmd = ['pytest']
905
        if args.tests:
906
            cmd += list(args.tests)
907
        else:
908
            cmd += ['doc/source/tutorial', '--doctest-glob=*rst']
909
        if args.verbose:
910
            cmd += ['-v']
911

912
        pytest_args = kwargs.pop('pytest_args', None)
913
        extra_argv = list(pytest_args[:]) if pytest_args else []
914
        if extra_argv and extra_argv[0] == '--':
915
            extra_argv = extra_argv[1:]
916
        cmd += extra_argv
917

918
        cmd_str = ' '.join(cmd)
919
        return {
920
            'actions': [f'env PYTHONPATH={dirs.site} {cmd_str}'],
921
            'task_dep': ['build'],
922
            'io': {'capture': False},
923
        }
924

925

926
@cli.cls_cmd('bench')
927
class Bench(Task):
928
    """:wrench: Run benchmarks.
929

930
    \b
931
    ```python
932
     Examples:
933

934
    $ python dev.py bench -t integrate.SolveBVP
935
    $ python dev.py bench -t linalg.Norm
936
    $ python dev.py bench --compare main
937
    ```
938
    """
939
    ctx = CONTEXT
940
    TASK_META = {
941
        'task_dep': ['build'],
942
    }
943
    submodule = Option(
944
        ['--submodule', '-s'], default=None, metavar='SUBMODULE',
945
        help="Submodule whose tests to run (cluster, constants, ...)")
946
    tests = Option(
947
        ['--tests', '-t'], default=None, multiple=True,
948
        metavar='TESTS', help='Specify tests to run')
949
    compare = Option(
950
        ['--compare', '-c'], default=None, metavar='COMPARE', multiple=True,
951
        help=(
952
            "Compare benchmark results of current HEAD to BEFORE. "
953
            "Use an additional --bench COMMIT to override HEAD with COMMIT. "
954
            "Note that you need to commit your changes first!"))
955

956
    @staticmethod
957
    def run_asv(dirs, cmd):
958
        EXTRA_PATH = ['/usr/lib/ccache', '/usr/lib/f90cache',
959
                      '/usr/local/lib/ccache', '/usr/local/lib/f90cache']
960
        bench_dir = dirs.root / 'benchmarks'
961
        sys.path.insert(0, str(bench_dir))
962
        # Always use ccache, if installed
963
        env = dict(os.environ)
964
        env['PATH'] = os.pathsep.join(EXTRA_PATH +
965
                                      env.get('PATH', '').split(os.pathsep))
966
        # Control BLAS/LAPACK threads
967
        env['OPENBLAS_NUM_THREADS'] = '1'
968
        env['MKL_NUM_THREADS'] = '1'
969

970
        # Limit memory usage
971
        from benchmarks.common import set_mem_rlimit
972
        try:
973
            set_mem_rlimit()
974
        except (ImportError, RuntimeError):
975
            pass
976
        try:
977
            return subprocess.call(cmd, env=env, cwd=bench_dir)
978
        except OSError as err:
979
            if err.errno == errno.ENOENT:
980
                cmd_str = " ".join(cmd)
981
                print(f"Error when running '{cmd_str}': {err}\n")
982
                print("You need to install Airspeed Velocity "
983
                      "(https://airspeed-velocity.github.io/asv/)")
984
                print("to run Scipy benchmarks")
985
                return 1
986
            raise
987

988
    @classmethod
989
    def scipy_bench(cls, args):
990
        dirs = Dirs(args)
991
        dirs.add_sys_path()
992
        print(f"SciPy from development installed path at: {dirs.site}")
993
        with working_dir(dirs.site):
994
            runner, version, mod_path = get_test_runner(PROJECT_MODULE)
995
            extra_argv = []
996
            if args.tests:
997
                extra_argv.append(args.tests)
998
            if args.submodule:
999
                extra_argv.append([args.submodule])
1000

1001
            bench_args = []
1002
            for a in extra_argv:
1003
                bench_args.extend(['--bench', ' '.join(str(x) for x in a)])
1004
            if not args.compare:
1005
                print(f"Running benchmarks for Scipy version {version} at {mod_path}")
1006
                cmd = ['asv', 'run', '--dry-run', '--show-stderr',
1007
                       '--python=same', '--quick'] + bench_args
1008
                retval = cls.run_asv(dirs, cmd)
1009
                sys.exit(retval)
1010
            else:
1011
                if len(args.compare) == 1:
1012
                    commit_a = args.compare[0]
1013
                    commit_b = 'HEAD'
1014
                elif len(args.compare) == 2:
1015
                    commit_a, commit_b = args.compare
1016
                else:
1017
                    print("Too many commits to compare benchmarks for")
1018
                # Check for uncommitted files
1019
                if commit_b == 'HEAD':
1020
                    r1 = subprocess.call(['git', 'diff-index', '--quiet',
1021
                                          '--cached', 'HEAD'])
1022
                    r2 = subprocess.call(['git', 'diff-files', '--quiet'])
1023
                    if r1 != 0 or r2 != 0:
1024
                        print("*" * 80)
1025
                        print("WARNING: you have uncommitted changes --- "
1026
                              "these will NOT be benchmarked!")
1027
                        print("*" * 80)
1028

1029
                # Fix commit ids (HEAD is local to current repo)
1030
                p = subprocess.Popen(['git', 'rev-parse', commit_b],
1031
                                     stdout=subprocess.PIPE)
1032
                out, err = p.communicate()
1033
                commit_b = out.strip()
1034

1035
                p = subprocess.Popen(['git', 'rev-parse', commit_a],
1036
                                     stdout=subprocess.PIPE)
1037
                out, err = p.communicate()
1038
                commit_a = out.strip()
1039
                cmd_compare = [
1040
                    'asv', 'continuous', '--show-stderr', '--factor', '1.05',
1041
                    '--quick', commit_a, commit_b
1042
                ] + bench_args
1043
                cls.run_asv(dirs, cmd_compare)
1044
                sys.exit(1)
1045

1046
    @classmethod
1047
    def run(cls, **kwargs):
1048
        """run benchmark"""
1049
        kwargs.update(cls.ctx.get())
1050
        Args = namedtuple('Args', [k for k in kwargs.keys()])
1051
        args = Args(**kwargs)
1052
        cls.scipy_bench(args)
1053

1054

1055
###################
1056
# linters
1057

1058
def emit_cmdstr(cmd):
1059
    """Print the command that's being run to stdout
1060

1061
    Note: cannot use this in the below tasks (yet), because as is these command
1062
    strings are always echoed to the console, even if the command isn't run
1063
    (but for example the `build` command is run).
1064
    """
1065
    console = Console(theme=console_theme)
1066
    # The [cmd] square brackets controls the font styling, typically in italics
1067
    # to differentiate it from other stdout content
1068
    console.print(f"{EMOJI.cmd} [cmd] {cmd}")
1069

1070

1071
@task_params([{"name": "fix", "default": False}])
1072
def task_lint(fix):
1073
    # Lint just the diff since branching off of main using a
1074
    # stricter configuration.
1075
    # emit_cmdstr(os.path.join('tools', 'lint.py') + ' --diff-against main')
1076
    cmd = str(Dirs().root / 'tools' / 'lint.py') + ' --diff-against=main'
1077
    if fix:
1078
        cmd += ' --fix'
1079
    return {
1080
        'basename': 'lint',
1081
        'actions': [cmd],
1082
        'doc': 'Lint only files modified since last commit (stricter rules)',
1083
    }
1084

1085
@task_params([])
1086
def task_check_python_h_first():
1087
    # Lint just the diff since branching off of main using a
1088
    # stricter configuration.
1089
    # emit_cmdstr(os.path.join('tools', 'lint.py') + ' --diff-against main')
1090
    cmd = "{!s} --diff-against=main".format(
1091
        Dirs().root / 'tools' / 'check_python_h_first.py'
1092
    )
1093
    return {
1094
        'basename': 'check_python_h_first',
1095
        'actions': [cmd],
1096
        'doc': (
1097
            'Check Python.h order only files modified since last commit '
1098
            '(stricter rules)'
1099
        ),
1100
    }
1101

1102

1103
def task_unicode_check():
1104
    # emit_cmdstr(os.path.join('tools', 'unicode-check.py'))
1105
    return {
1106
        'basename': 'unicode-check',
1107
        'actions': [str(Dirs().root / 'tools' / 'unicode-check.py')],
1108
        'doc': 'Check for disallowed Unicode characters in the SciPy Python '
1109
               'and Cython source code.',
1110
    }
1111

1112

1113
def task_check_test_name():
1114
    # emit_cmdstr(os.path.join('tools', 'check_test_name.py'))
1115
    return {
1116
        "basename": "check-testname",
1117
        "actions": [str(Dirs().root / "tools" / "check_test_name.py")],
1118
        "doc": "Check tests are correctly named so that pytest runs them."
1119
    }
1120

1121

1122
@cli.cls_cmd('lint')
1123
class Lint:
1124
    """:dash: Run linter on modified files and check for
1125
    disallowed Unicode characters and possibly-invalid test names."""
1126
    fix = Option(
1127
        ['--fix'], default=False, is_flag=True, help='Attempt to auto-fix errors'
1128
    )
1129

1130
    @classmethod
1131
    def run(cls, fix):
1132
        run_doit_task({
1133
            'lint': {'fix': fix},
1134
            'unicode-check': {},
1135
            'check-testname': {},
1136
            'check_python_h_first': {},
1137
        })
1138

1139

1140
@cli.cls_cmd('mypy')
1141
class Mypy(Task):
1142
    """:wrench: Run mypy on the codebase."""
1143
    ctx = CONTEXT
1144

1145
    TASK_META = {
1146
        'task_dep': ['build'],
1147
    }
1148

1149
    @classmethod
1150
    def run(cls, **kwargs):
1151
        kwargs.update(cls.ctx.get())
1152
        Args = namedtuple('Args', [k for k in kwargs.keys()])
1153
        args = Args(**kwargs)
1154
        dirs = Dirs(args)
1155

1156
        try:
1157
            import mypy.api
1158
        except ImportError as e:
1159
            raise RuntimeError(
1160
                "Mypy not found. Please install it by running "
1161
                "pip install -r mypy_requirements.txt from the repo root"
1162
            ) from e
1163

1164
        config = dirs.root / "mypy.ini"
1165
        check_path = PROJECT_MODULE
1166

1167
        with working_dir(dirs.site):
1168
            # By default mypy won't color the output since it isn't being
1169
            # invoked from a tty.
1170
            os.environ['MYPY_FORCE_COLOR'] = '1'
1171
            # Change to the site directory to make sure mypy doesn't pick
1172
            # up any type stubs in the source tree.
1173
            emit_cmdstr(f"mypy.api.run --config-file {config} {check_path}")
1174
            report, errors, status = mypy.api.run([
1175
                "--config-file",
1176
                str(config),
1177
                check_path,
1178
            ])
1179
        print(report, end='')
1180
        print(errors, end='', file=sys.stderr)
1181
        return status == 0
1182

1183

1184
##########################################
1185
# DOC
1186

1187
@cli.cls_cmd('doc')
1188
class Doc(Task):
1189
    """:wrench: Build documentation.
1190

1191
    TARGETS: Sphinx build targets [default: 'html']
1192

1193
    Running `python dev.py doc -j8 html` is equivalent to:
1194
    1. Execute build command (skip by passing the global `-n` option).
1195
    2. Set the PYTHONPATH environment variable
1196
       (query with `python dev.py -n show_PYTHONPATH`).
1197
    3. Run make on `doc/Makefile`, i.e.: `make -C doc -j8 TARGETS`
1198

1199
    To remove all generated documentation do: `python dev.py -n doc clean`
1200
    """
1201
    ctx = CONTEXT
1202

1203
    args = Argument(['args'], nargs=-1, metavar='TARGETS', required=False)
1204
    list_targets = Option(
1205
        ['--list-targets', '-t'], default=False, is_flag=True,
1206
        help='List doc targets',
1207
    )
1208
    parallel = Option(
1209
        ['--parallel', '-j'], default=1, metavar='N_JOBS',
1210
        help="Number of parallel jobs"
1211
    )
1212
    no_cache = Option(
1213
        ['--no-cache'], default=False, is_flag=True,
1214
        help="Forces a full rebuild of the docs. Note that this may be " + \
1215
             "needed in order to make docstring changes in C/Cython files " + \
1216
             "show up."
1217
    )
1218

1219
    @classmethod
1220
    def task_meta(cls, list_targets, parallel, no_cache, args, **kwargs):
1221
        if list_targets:  # list MAKE targets, remove default target
1222
            task_dep = []
1223
            targets = ''
1224
        else:
1225
            task_dep = ['build']
1226
            targets = ' '.join(args) if args else 'html'
1227

1228
        kwargs.update(cls.ctx.get())
1229
        Args = namedtuple('Args', [k for k in kwargs.keys()])
1230
        build_args = Args(**kwargs)
1231
        dirs = Dirs(build_args)
1232

1233
        make_params = [f'PYTHON="{sys.executable}"']
1234
        if parallel or no_cache:
1235
            sphinxopts = ""
1236
            if parallel:
1237
                sphinxopts += f"-j{parallel} "
1238
            if no_cache:
1239
                sphinxopts += "-E"
1240
            make_params.append(f'SPHINXOPTS="{sphinxopts}"')
1241

1242
        return {
1243
            'actions': [
1244
                # move to doc/ so local scipy does not get imported
1245
                (f'cd doc; env PYTHONPATH="{dirs.site}" '
1246
                 f'make {" ".join(make_params)} {targets}'),
1247
            ],
1248
            'task_dep': task_dep,
1249
            'io': {'capture': False},
1250
        }
1251

1252

1253
@cli.cls_cmd('refguide-check')
1254
class RefguideCheck(Task):
1255
    """:wrench: Run refguide check."""
1256
    ctx = CONTEXT
1257

1258
    submodule = Option(
1259
        ['--submodule', '-s'], default=None, metavar='SUBMODULE',
1260
        help="Submodule whose tests to run (cluster, constants, ...)")
1261
    verbose = Option(
1262
        ['--verbose', '-v'], default=False, is_flag=True, help="verbosity")
1263

1264
    @classmethod
1265
    def task_meta(cls, **kwargs):
1266
        kwargs.update(cls.ctx.get())
1267
        Args = namedtuple('Args', [k for k in kwargs.keys()])
1268
        args = Args(**kwargs)
1269
        dirs = Dirs(args)
1270

1271
        cmd = [f'{sys.executable}',
1272
               str(dirs.root / 'tools' / 'refguide_check.py')]
1273
        if args.verbose:
1274
            cmd += ['-vvv']
1275
        if args.submodule:
1276
            cmd += [args.submodule]
1277
        cmd_str = ' '.join(cmd)
1278
        return {
1279
            'actions': [f'env PYTHONPATH={dirs.site} {cmd_str}'],
1280
            'task_dep': ['build'],
1281
            'io': {'capture': False},
1282
        }
1283

1284

1285
##########################################
1286
# ENVS
1287

1288
@cli.cls_cmd('python')
1289
class Python:
1290
    """:wrench: Start a Python shell with PYTHONPATH set.
1291

1292
    ARGS: Arguments passed to the Python interpreter.
1293
          If not set, an interactive shell is launched.
1294

1295
    Running `python dev.py shell my_script.py` is equivalent to:
1296
    1. Execute build command (skip by passing the global `-n` option).
1297
    2. Set the PYTHONPATH environment variable
1298
       (query with `python dev.py -n show_PYTHONPATH`).
1299
    3. Run interpreter: `python my_script.py`
1300
    """
1301
    ctx = CONTEXT
1302
    pythonpath = Option(
1303
        ['--pythonpath', '-p'], metavar='PYTHONPATH', default=None,
1304
        help='Paths to prepend to PYTHONPATH')
1305
    extra_argv = Argument(
1306
        ['extra_argv'], nargs=-1, metavar='ARGS', required=False)
1307

1308
    @classmethod
1309
    def _setup(cls, pythonpath, **kwargs):
1310
        vals = Build.opt_defaults()
1311
        vals.update(kwargs)
1312
        Build.run(add_path=True, **vals)
1313
        if pythonpath:
1314
            for p in reversed(pythonpath.split(os.pathsep)):
1315
                sys.path.insert(0, p)
1316

1317
    @classmethod
1318
    def run(cls, pythonpath, extra_argv=None, **kwargs):
1319
        cls._setup(pythonpath, **kwargs)
1320
        if extra_argv:
1321
            # Don't use subprocess, since we don't want to include the
1322
            # current path in PYTHONPATH.
1323
            sys.argv = extra_argv
1324
            with open(extra_argv[0]) as f:
1325
                script = f.read()
1326
            sys.modules['__main__'] = new_module('__main__')
1327
            ns = dict(__name__='__main__', __file__=extra_argv[0])
1328
            exec(script, ns)
1329
        else:
1330
            import code
1331
            code.interact()
1332

1333

1334
@cli.cls_cmd('ipython')
1335
class Ipython(Python):
1336
    """:wrench: Start IPython shell with PYTHONPATH set.
1337

1338
    Running `python dev.py ipython` is equivalent to:
1339
    1. Execute build command (skip by passing the global `-n` option).
1340
    2. Set the PYTHONPATH environment variable
1341
       (query with `python dev.py -n show_PYTHONPATH`).
1342
    3. Run the `ipython` interpreter.
1343
    """
1344
    ctx = CONTEXT
1345
    pythonpath = Python.pythonpath
1346

1347
    @classmethod
1348
    def run(cls, pythonpath, **kwargs):
1349
        cls._setup(pythonpath, **kwargs)
1350
        import IPython
1351
        IPython.embed(user_ns={})
1352

1353

1354
@cli.cls_cmd('shell')
1355
class Shell(Python):
1356
    """:wrench: Start Unix shell with PYTHONPATH set.
1357

1358
    Running `python dev.py shell` is equivalent to:
1359
    1. Execute build command (skip by passing the global `-n` option).
1360
    2. Open a new shell.
1361
    3. Set the PYTHONPATH environment variable in shell
1362
       (query with `python dev.py -n show_PYTHONPATH`).
1363
    """
1364
    ctx = CONTEXT
1365
    pythonpath = Python.pythonpath
1366
    extra_argv = Python.extra_argv
1367

1368
    @classmethod
1369
    def run(cls, pythonpath, extra_argv, **kwargs):
1370
        cls._setup(pythonpath, **kwargs)
1371
        shell = os.environ.get('SHELL', 'sh')
1372
        click.echo(f"Spawning a Unix shell '{shell}' ...")
1373
        os.execv(shell, [shell] + list(extra_argv))
1374
        sys.exit(1)
1375

1376

1377
@cli.cls_cmd('show_PYTHONPATH')
1378
class ShowDirs(Python):
1379
    """:information: Show value of the PYTHONPATH environment variable used in
1380
    this script.
1381

1382
    PYTHONPATH sets the default search path for module files for the
1383
    interpreter. Here, it includes the path to the local SciPy build
1384
    (typically `.../build-install/lib/python3.10/site-packages`).
1385

1386
    Use the global option `-n` to skip the building step, e.g.:
1387
    `python dev.py -n show_PYTHONPATH`
1388
    """
1389
    ctx = CONTEXT
1390
    pythonpath = Python.pythonpath
1391
    extra_argv = Python.extra_argv
1392

1393
    @classmethod
1394
    def run(cls, pythonpath, extra_argv, **kwargs):
1395
        cls._setup(pythonpath, **kwargs)
1396
        py_path = os.environ.get('PYTHONPATH', '')
1397
        click.echo(f"PYTHONPATH={py_path}")
1398

1399

1400
@cli.command()
1401
@click.argument('version_args', nargs=2)
1402
@click.pass_obj
1403
def notes(ctx_obj, version_args):
1404
    """:ledger: Release notes and log generation.
1405

1406
    \b
1407
    ```python
1408
     Example:
1409

1410
    $ python dev.py notes v1.7.0 v1.8.0
1411
    ```
1412
    """
1413
    if version_args:
1414
        sys.argv = version_args
1415
        log_start = sys.argv[0]
1416
        log_end = sys.argv[1]
1417
    cmd = f"python tools/write_release_and_log.py {log_start} {log_end}"
1418
    click.echo(cmd)
1419
    try:
1420
        subprocess.run([cmd], check=True, shell=True)
1421
    except subprocess.CalledProcessError:
1422
        print('Error caught: Incorrect log start or log end version')
1423

1424

1425
@cli.command()
1426
@click.argument('revision_args', nargs=2)
1427
@click.pass_obj
1428
def authors(ctx_obj, revision_args):
1429
    """:ledger: Generate list of authors who contributed within revision
1430
    interval.
1431

1432
    \b
1433
    ```python
1434
    Example:
1435

1436
    $ python dev.py authors v1.7.0 v1.8.0
1437
    ```
1438
    """
1439
    if revision_args:
1440
        sys.argv = revision_args
1441
        start_revision = sys.argv[0]
1442
        end_revision = sys.argv[1]
1443
    cmd = f"python tools/authors.py {start_revision}..{end_revision}"
1444
    click.echo(cmd)
1445
    try:
1446
        subprocess.run([cmd], check=True, shell=True)
1447
    except subprocess.CalledProcessError:
1448
        print('Error caught: Incorrect revision start or revision end')
1449

1450

1451
# The following CPU core count functions were taken from loky/backend/context.py
1452
# See https://github.com/joblib/loky
1453

1454
# Cache for the number of physical cores to avoid repeating subprocess calls.
1455
# It should not change during the lifetime of the program.
1456
physical_cores_cache = None
1457

1458

1459
def cpu_count(only_physical_cores=False):
1460
    """Return the number of CPUs the current process can use.
1461

1462
    The returned number of CPUs accounts for:
1463
     * the number of CPUs in the system, as given by
1464
       ``multiprocessing.cpu_count``;
1465
     * the CPU affinity settings of the current process
1466
       (available on some Unix systems);
1467
     * Cgroup CPU bandwidth limit (available on Linux only, typically
1468
       set by docker and similar container orchestration systems);
1469
     * the value of the LOKY_MAX_CPU_COUNT environment variable if defined.
1470
    and is given as the minimum of these constraints.
1471

1472
    If ``only_physical_cores`` is True, return the number of physical cores
1473
    instead of the number of logical cores (hyperthreading / SMT). Note that
1474
    this option is not enforced if the number of usable cores is controlled in
1475
    any other way such as: process affinity, Cgroup restricted CPU bandwidth
1476
    or the LOKY_MAX_CPU_COUNT environment variable. If the number of physical
1477
    cores is not found, return the number of logical cores.
1478

1479
    Note that on Windows, the returned number of CPUs cannot exceed 61 (or 60 for
1480
    Python < 3.10), see:
1481
    https://bugs.python.org/issue26903.
1482

1483
    It is also always larger or equal to 1.
1484
    """
1485
    # Note: os.cpu_count() is allowed to return None in its docstring
1486
    os_cpu_count = os.cpu_count() or 1
1487
    if sys.platform == "win32":
1488
        # On Windows, attempting to use more than 61 CPUs would result in a
1489
        # OS-level error. See https://bugs.python.org/issue26903. According to
1490
        # https://learn.microsoft.com/en-us/windows/win32/procthread/processor-groups
1491
        # it might be possible to go beyond with a lot of extra work but this
1492
        # does not look easy.
1493
        os_cpu_count = min(os_cpu_count, _MAX_WINDOWS_WORKERS)
1494

1495
    cpu_count_user = _cpu_count_user(os_cpu_count)
1496
    aggregate_cpu_count = max(min(os_cpu_count, cpu_count_user), 1)
1497

1498
    if not only_physical_cores:
1499
        return aggregate_cpu_count
1500

1501
    if cpu_count_user < os_cpu_count:
1502
        # Respect user setting
1503
        return max(cpu_count_user, 1)
1504

1505
    cpu_count_physical, exception = _count_physical_cores()
1506
    if cpu_count_physical != "not found":
1507
        return cpu_count_physical
1508

1509
    # Fallback to default behavior
1510
    if exception is not None:
1511
        # warns only the first time
1512
        warnings.warn(
1513
            "Could not find the number of physical cores for the "
1514
            f"following reason:\n{exception}\n"
1515
            "Returning the number of logical cores instead. You can "
1516
            "silence this warning by setting LOKY_MAX_CPU_COUNT to "
1517
            "the number of cores you want to use.",
1518
            stacklevel=2
1519
        )
1520
        traceback.print_tb(exception.__traceback__)
1521

1522
    return aggregate_cpu_count
1523

1524

1525
def _cpu_count_cgroup(os_cpu_count):
1526
    # Cgroup CPU bandwidth limit available in Linux since 2.6 kernel
1527
    cpu_max_fname = "/sys/fs/cgroup/cpu.max"
1528
    cfs_quota_fname = "/sys/fs/cgroup/cpu/cpu.cfs_quota_us"
1529
    cfs_period_fname = "/sys/fs/cgroup/cpu/cpu.cfs_period_us"
1530
    if os.path.exists(cpu_max_fname):
1531
        # cgroup v2
1532
        # https://www.kernel.org/doc/html/latest/admin-guide/cgroup-v2.html
1533
        with open(cpu_max_fname) as fh:
1534
            cpu_quota_us, cpu_period_us = fh.read().strip().split()
1535
    elif os.path.exists(cfs_quota_fname) and os.path.exists(cfs_period_fname):
1536
        # cgroup v1
1537
        # https://www.kernel.org/doc/html/latest/scheduler/sched-bwc.html#management
1538
        with open(cfs_quota_fname) as fh:
1539
            cpu_quota_us = fh.read().strip()
1540
        with open(cfs_period_fname) as fh:
1541
            cpu_period_us = fh.read().strip()
1542
    else:
1543
        # No Cgroup CPU bandwidth limit (e.g. non-Linux platform)
1544
        cpu_quota_us = "max"
1545
        cpu_period_us = 100_000  # unused, for consistency with default values
1546

1547
    if cpu_quota_us == "max":
1548
        # No active Cgroup quota on a Cgroup-capable platform
1549
        return os_cpu_count
1550
    else:
1551
        cpu_quota_us = int(cpu_quota_us)
1552
        cpu_period_us = int(cpu_period_us)
1553
        if cpu_quota_us > 0 and cpu_period_us > 0:
1554
            return math.ceil(cpu_quota_us / cpu_period_us)
1555
        else:  # pragma: no cover
1556
            # Setting a negative cpu_quota_us value is a valid way to disable
1557
            # cgroup CPU bandwidth limits
1558
            return os_cpu_count
1559

1560

1561
def _cpu_count_affinity(os_cpu_count):
1562
    # Number of available CPUs given affinity settings
1563
    if hasattr(os, "sched_getaffinity"):
1564
        try:
1565
            return len(os.sched_getaffinity(0))
1566
        except NotImplementedError:
1567
            pass
1568

1569
    # On PyPy and possibly other platforms, os.sched_getaffinity does not exist
1570
    # or raises NotImplementedError, let's try with the psutil if installed.
1571
    try:
1572
        import psutil
1573

1574
        p = psutil.Process()
1575
        if hasattr(p, "cpu_affinity"):
1576
            return len(p.cpu_affinity())
1577

1578
    except ImportError:  # pragma: no cover
1579
        if (
1580
            sys.platform == "linux"
1581
            and os.environ.get("LOKY_MAX_CPU_COUNT") is None
1582
        ):
1583
            # PyPy does not implement os.sched_getaffinity on Linux which
1584
            # can cause severe oversubscription problems. Better warn the
1585
            # user in this particularly pathological case which can wreck
1586
            # havoc, typically on CI workers.
1587
            warnings.warn(
1588
                "Failed to inspect CPU affinity constraints on this system. "
1589
                "Please install psutil or explicitly set LOKY_MAX_CPU_COUNT.",
1590
                stacklevel=4
1591
            )
1592

1593
    # This can happen for platforms that do not implement any kind of CPU
1594
    # infinity such as macOS-based platforms.
1595
    return os_cpu_count
1596

1597

1598
def _cpu_count_user(os_cpu_count):
1599
    """Number of user defined available CPUs"""
1600
    cpu_count_affinity = _cpu_count_affinity(os_cpu_count)
1601

1602
    cpu_count_cgroup = _cpu_count_cgroup(os_cpu_count)
1603

1604
    # User defined soft-limit passed as a loky specific environment variable.
1605
    cpu_count_loky = int(os.environ.get("LOKY_MAX_CPU_COUNT", os_cpu_count))
1606

1607
    return min(cpu_count_affinity, cpu_count_cgroup, cpu_count_loky)
1608

1609

1610
def _count_physical_cores():
1611
    """Return a tuple (number of physical cores, exception)
1612

1613
    If the number of physical cores is found, exception is set to None.
1614
    If it has not been found, return ("not found", exception).
1615

1616
    The number of physical cores is cached to avoid repeating subprocess calls.
1617
    """
1618
    exception = None
1619

1620
    # First check if the value is cached
1621
    global physical_cores_cache
1622
    if physical_cores_cache is not None:
1623
        return physical_cores_cache, exception
1624

1625
    # Not cached yet, find it
1626
    try:
1627
        if sys.platform == "linux":
1628
            cpu_info = subprocess.run(
1629
                "lscpu --parse=core".split(), capture_output=True, text=True
1630
            )
1631
            cpu_info = cpu_info.stdout.splitlines()
1632
            cpu_info = {line for line in cpu_info if not line.startswith("#")}
1633
            cpu_count_physical = len(cpu_info)
1634
        elif sys.platform == "win32":
1635
            cpu_info = subprocess.run(
1636
                "wmic CPU Get NumberOfCores /Format:csv".split(),
1637
                capture_output=True,
1638
                text=True,
1639
            )
1640
            cpu_info = cpu_info.stdout.splitlines()
1641
            cpu_info = [
1642
                l.split(",")[1]
1643
                for l in cpu_info
1644
                if (l and l != "Node,NumberOfCores")
1645
            ]
1646
            cpu_count_physical = sum(map(int, cpu_info))
1647
        elif sys.platform == "darwin":
1648
            cpu_info = subprocess.run(
1649
                "sysctl -n hw.physicalcpu".split(),
1650
                capture_output=True,
1651
                text=True,
1652
            )
1653
            cpu_info = cpu_info.stdout
1654
            cpu_count_physical = int(cpu_info)
1655
        else:
1656
            raise NotImplementedError(f"unsupported platform: {sys.platform}")
1657

1658
        # if cpu_count_physical < 1, we did not find a valid value
1659
        if cpu_count_physical < 1:
1660
            raise ValueError(f"found {cpu_count_physical} physical cores < 1")
1661

1662
    except Exception as e:
1663
        exception = e
1664
        cpu_count_physical = "not found"
1665

1666
    # Put the result in cache
1667
    physical_cores_cache = cpu_count_physical
1668

1669
    return cpu_count_physical, exception
1670

1671

1672
if __name__ == '__main__':
1673
    cli()
1674

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.