scipy
/
dev.py
1673 строки · 56.0 Кб
1#! /usr/bin/env python3
2
3'''
4Developer CLI: building (meson), tests, benchmark, etc.
5
6This file contains tasks definitions for doit (https://pydoit.org).
7And also a CLI interface using click (https://click.palletsprojects.com).
8
9The CLI is ideal for project contributors while,
10doit interface is better suited for authoring the development tasks.
11
12REQUIREMENTS:
13--------------
14- see environment.yml: doit, pydevtool, click, rich-click
15
16# USAGE:
17
18## 1 - click API
19
20Commands can added using default Click API. i.e.
21
22```
23@cli.command()
24@click.argument('extra_argv', nargs=-1)
25@click.pass_obj
26def python(ctx_obj, extra_argv):
27"""Start a Python shell with PYTHONPATH set"""
28```
29
30## 2 - class based Click command definition
31
32`CliGroup` provides an alternative class based API to create Click commands.
33
34Just use the `cls_cmd` decorator. And define a `run()` method
35
36```
37@cli.cls_cmd('test')
38class Test():
39"""Run tests"""
40
41@classmethod
42def run(cls):
43print('Running tests...')
44```
45
46- Command may make use a Click.Group context defining a `ctx` class attribute
47- Command options are also define as class attributes
48
49```
50@cli.cls_cmd('test')
51class Test():
52"""Run tests"""
53ctx = CONTEXT
54
55verbose = Option(
56['--verbose', '-v'], default=False, is_flag=True, help="verbosity")
57
58@classmethod
59def run(cls, **kwargs): # kwargs contains options from class and CONTEXT
60print('Running tests...')
61```
62
63## 3 - class based interface can be run as a doit task by subclassing from Task
64
65- Extra doit task metadata can be defined as class attribute TASK_META.
66- `run()` method will be used as python-action by task
67
68```
69@cli.cls_cmd('test')
70class Test(Task): # Task base class, doit will create a task
71"""Run tests"""
72ctx = CONTEXT
73
74TASK_META = {
75'task_dep': ['build'],
76}
77
78@classmethod
79def run(cls, **kwargs):
80pass
81```
82
83## 4 - doit tasks with cmd-action "shell" or dynamic metadata
84
85Define method `task_meta()` instead of `run()`:
86
87```
88@cli.cls_cmd('refguide-check')
89class RefguideCheck(Task):
90@classmethod
91def task_meta(cls, **kwargs):
92return {
93```
94
95'''
96
97import os98import subprocess99import sys100import warnings101import shutil102import json103import datetime104import time105import importlib106import importlib.util107import errno108import contextlib109from sysconfig import get_path110import math111import traceback112from concurrent.futures.process import _MAX_WINDOWS_WORKERS113
114from pathlib import Path115from collections import namedtuple116from types import ModuleType as new_module117from dataclasses import dataclass118
119import click120from click import Option, Argument121from doit.cmd_base import ModuleTaskLoader122from doit.reporter import ZeroReporter123from doit.exceptions import TaskError124from doit.api import run_tasks125from doit import task_params126from pydevtool.cli import UnifiedContext, CliGroup, Task127from rich.console import Console128from rich.panel import Panel129from rich.theme import Theme130from rich_click import rich_click131
132DOIT_CONFIG = {133'verbosity': 2,134'minversion': '0.36.0',135}
136
137
138console_theme = Theme({139"cmd": "italic gray50",140})141
142if sys.platform == 'win32':143class EMOJI:144cmd = ">"145else:146class EMOJI:147cmd = ":computer:"148
149
150rich_click.STYLE_ERRORS_SUGGESTION = "yellow italic"151rich_click.SHOW_ARGUMENTS = True152rich_click.GROUP_ARGUMENTS_OPTIONS = False153rich_click.SHOW_METAVARS_COLUMN = True154rich_click.USE_MARKDOWN = True155rich_click.OPTION_GROUPS = {156"dev.py": [157{158"name": "Options",159"options": [160"--help", "--build-dir", "--no-build", "--install-prefix"],161},162],163
164"dev.py test": [165{166"name": "Options",167"options": ["--help", "--verbose", "--parallel", "--coverage",168"--durations"],169},170{171"name": "Options: test selection",172"options": ["--submodule", "--tests", "--mode"],173},174],175}
176rich_click.COMMAND_GROUPS = {177"dev.py": [178{179"name": "build & testing",180"commands": ["build", "test"],181},182{183"name": "static checkers",184"commands": ["lint", "mypy"],185},186{187"name": "environments",188"commands": ["shell", "python", "ipython", "show_PYTHONPATH"],189},190{191"name": "documentation",192"commands": ["doc", "refguide-check", "smoke-docs", "smoke-tutorial"],193},194{195"name": "release",196"commands": ["notes", "authors"],197},198{199"name": "benchmarking",200"commands": ["bench"],201},202]203}
204
205
206class ErrorOnlyReporter(ZeroReporter):207desc = """Report errors only"""208
209def runtime_error(self, msg):210console = Console()211console.print("[red bold] msg")212
213def add_failure(self, task, fail_info):214console = Console()215if isinstance(fail_info, TaskError):216console.print(f'[red]Task Error - {task.name}'217f' => {fail_info.message}')218if fail_info.traceback:219console.print(Panel(220"".join(fail_info.traceback),221title=f"{task.name}",222subtitle=fail_info.message,223border_style="red",224))225
226
227CONTEXT = UnifiedContext({228'build_dir': Option(229['--build-dir'], metavar='BUILD_DIR',230default='build', show_default=True,231help=':wrench: Relative path to the build directory.'),232'no_build': Option(233["--no-build", "-n"], default=False, is_flag=True,234help=(":wrench: Do not build the project"235" (note event python only modification require build).")),236'install_prefix': Option(237['--install-prefix'], default=None, metavar='INSTALL_DIR',238help=(":wrench: Relative path to the install directory."239" Default is <build-dir>-install.")),240})241
242
243def run_doit_task(tasks):244"""245:param tasks: (dict) task_name -> {options}
246"""
247loader = ModuleTaskLoader(globals())248doit_config = {249'verbosity': 2,250'reporter': ErrorOnlyReporter,251}252return run_tasks(loader, tasks, extra_config={'GLOBAL': doit_config})253
254
255class CLI(CliGroup):256context = CONTEXT257run_doit_task = run_doit_task258
259
260@click.group(cls=CLI)261@click.pass_context262def cli(ctx, **kwargs):263"""Developer Tool for SciPy264
265\bCommands that require a built/installed instance are marked with :wrench:.
266
267
268\b**python dev.py --build-dir my-build test -s stats**
269
270"""
271CLI.update_context(ctx, kwargs)272
273
274PROJECT_MODULE = "scipy"275PROJECT_ROOT_FILES = ['scipy', 'LICENSE.txt', 'meson.build']276
277
278@dataclass
279class Dirs:280"""281root:
282Directory where scr, build config and tools are located
283(and this file)
284build:
285Directory where build output files (i.e. *.o) are saved
286install:
287Directory where .so from build and .py from src are put together.
288site:
289Directory where the built SciPy version was installed.
290This is a custom prefix, followed by a relative path matching
291the one the system would use for the site-packages of the active
292Python interpreter.
293"""
294# all paths are absolute295root: Path296build: Path297installed: Path298site: Path # <install>/lib/python<version>/site-packages299
300def __init__(self, args=None):301""":params args: object like Context(build_dir, install_prefix)"""302self.root = Path(__file__).parent.absolute()303if not args:304return305
306self.build = Path(args.build_dir).resolve()307if args.install_prefix:308self.installed = Path(args.install_prefix).resolve()309else:310self.installed = self.build.parent / (self.build.stem + "-install")311
312# relative path for site-package with py version313# i.e. 'lib/python3.10/site-packages'314self.site = self.get_site_packages()315
316def add_sys_path(self):317"""Add site dir to sys.path / PYTHONPATH"""318site_dir = str(self.site)319sys.path.insert(0, site_dir)320os.environ['PYTHONPATH'] = \321os.pathsep.join((site_dir, os.environ.get('PYTHONPATH', '')))322
323def get_site_packages(self):324"""325Depending on whether we have debian python or not,
326return dist_packages path or site_packages path.
327"""
328if sys.version_info >= (3, 12):329plat_path = Path(get_path('platlib'))330else:331# distutils is required to infer meson install path332# for python < 3.12 in debian patched python333with warnings.catch_warnings():334warnings.filterwarnings("ignore", category=DeprecationWarning)335from distutils import dist336from distutils.command.install import INSTALL_SCHEMES337if 'deb_system' in INSTALL_SCHEMES:338# debian patched python in use339install_cmd = dist.Distribution().get_command_obj('install')340install_cmd.select_scheme('deb_system')341install_cmd.finalize_options()342plat_path = Path(install_cmd.install_platlib)343else:344plat_path = Path(get_path('platlib'))345return self.installed / plat_path.relative_to(sys.exec_prefix)346
347
348@contextlib.contextmanager349def working_dir(new_dir):350current_dir = os.getcwd()351try:352os.chdir(new_dir)353yield354finally:355os.chdir(current_dir)356
357
358def import_module_from_path(mod_name, mod_path):359"""Import module with name `mod_name` from file path `mod_path`"""360spec = importlib.util.spec_from_file_location(mod_name, mod_path)361mod = importlib.util.module_from_spec(spec)362spec.loader.exec_module(mod)363return mod364
365
366def get_test_runner(project_module):367"""368get Test Runner from locally installed/built project
369"""
370__import__(project_module)371# scipy._lib._testutils:PytestTester372test = sys.modules[project_module].test373version = sys.modules[project_module].__version__374mod_path = sys.modules[project_module].__file__375mod_path = os.path.abspath(os.path.join(os.path.dirname(mod_path)))376return test, version, mod_path377
378
379############
380
381@cli.cls_cmd('build')382class Build(Task):383""":wrench: Build & install package on path.384
385\b
386```shell-session
387Examples:
388
389$ python dev.py build --asan ;
390ASAN_OPTIONS=detect_leaks=0:symbolize=1:strict_init_order=true
391LD_PRELOAD=$(gcc --print-file-name=libasan.so)
392python dev.py test -v -t
393./scipy/ndimage/tests/test_morphology.py -- -s
394```
395"""
396ctx = CONTEXT397
398werror = Option(399['--werror'], default=False, is_flag=True,400help="Treat warnings as errors")401gcov = Option(402['--gcov'], default=False, is_flag=True,403help="enable C code coverage via gcov (requires GCC)."404"gcov output goes to build/**/*.gc*")405asan = Option(406['--asan'], default=False, is_flag=True,407help=("Build and run with AddressSanitizer support. "408"Note: the build system doesn't check whether "409"the project is already compiled with ASan. "410"If not, you need to do a clean build (delete "411"build and build-install directories)."))412debug = Option(413['--debug', '-d'], default=False, is_flag=True, help="Debug build")414release = Option(415['--release', '-r'], default=False, is_flag=True, help="Release build")416parallel = Option(417['--parallel', '-j'], default=None, metavar='N_JOBS',418help=("Number of parallel jobs for building. "419"This defaults to the number of available physical CPU cores"))420setup_args = Option(421['--setup-args', '-C'], default=[], multiple=True,422help=("Pass along one or more arguments to `meson setup` "423"Repeat the `-C` in case of multiple arguments."))424show_build_log = Option(425['--show-build-log'], default=False, is_flag=True,426help="Show build output rather than using a log file")427with_scipy_openblas = Option(428['--with-scipy-openblas'], default=False, is_flag=True,429help=("If set, use the `scipy-openblas32` wheel installed into the "430"current environment as the BLAS/LAPACK to build against."))431with_accelerate = Option(432['--with-accelerate'], default=False, is_flag=True,433help=("If set, use `Accelerate` as the BLAS/LAPACK to build against."434" Takes precedence over -with-scipy-openblas (macOS only)")435)436tags = Option(437['--tags'], default="runtime,python-runtime,tests,devel",438show_default=True, help="Install tags to be used by meson."439)440
441@classmethod442def setup_build(cls, dirs, args):443"""444Setting up meson-build
445"""
446for fn in PROJECT_ROOT_FILES:447if not (dirs.root / fn).exists():448print("To build the project, run dev.py in "449"git checkout or unpacked source")450sys.exit(1)451
452env = dict(os.environ)453cmd = ["meson", "setup", dirs.build, "--prefix", dirs.installed]454build_dir = dirs.build455run_dir = Path()456if build_dir.exists() and not (build_dir / 'meson-info').exists():457if list(build_dir.iterdir()):458raise RuntimeError("Can't build into non-empty directory "459f"'{build_dir.absolute()}'")460
461if sys.platform == "cygwin":462# Cygwin only has netlib lapack, but can link against463# OpenBLAS rather than netlib blas at runtime. There is464# no libopenblas-devel to enable linking against465# openblas-specific functions or OpenBLAS Lapack466cmd.extend(["-Dlapack=lapack", "-Dblas=blas"])467
468build_options_file = (469build_dir / "meson-info" / "intro-buildoptions.json")470if build_options_file.exists():471with open(build_options_file) as f:472build_options = json.load(f)473installdir = None474for option in build_options:475if option["name"] == "prefix":476installdir = option["value"]477break478if installdir != str(dirs.installed):479run_dir = build_dir480cmd = ["meson", "setup", "--reconfigure",481"--prefix", str(dirs.installed)]482else:483return484if args.werror:485cmd += ["--werror"]486if args.debug or args.release:487if args.debug and args.release:488raise ValueError("Set at most one of `--debug` and `--release`!")489if args.debug:490buildtype = 'debug'491cflags_unwanted = ('-O1', '-O2', '-O3')492elif args.release:493buildtype = 'release'494cflags_unwanted = ('-O0', '-O1', '-O2')495cmd += [f"-Dbuildtype={buildtype}"]496if 'CFLAGS' in os.environ.keys():497# Check that CFLAGS doesn't contain something that supercedes -O0498# for a plain debug build (conda envs tend to set -O2)499cflags = os.environ['CFLAGS'].split()500for flag in cflags_unwanted:501if flag in cflags:502raise ValueError(f"A {buildtype} build isn't possible, "503f"because CFLAGS contains `{flag}`."504"Please also check CXXFLAGS and FFLAGS.")505if args.gcov:506cmd += ['-Db_coverage=true']507if args.asan:508cmd += ['-Db_sanitize=address,undefined']509if args.setup_args:510cmd += [str(arg) for arg in args.setup_args]511if args.with_accelerate:512# on a mac you probably want to use accelerate over scipy_openblas513cmd += ["-Dblas=accelerate"]514elif args.with_scipy_openblas:515cls.configure_scipy_openblas()516env['PKG_CONFIG_PATH'] = os.pathsep.join([517os.getcwd(),518env.get('PKG_CONFIG_PATH', '')519])520
521# Setting up meson build522cmd_str = ' '.join([str(p) for p in cmd])523cls.console.print(f"{EMOJI.cmd} [cmd] {cmd_str}")524ret = subprocess.call(cmd, env=env, cwd=run_dir)525if ret == 0:526print("Meson build setup OK")527else:528print("Meson build setup failed!")529sys.exit(1)530return env531
532@classmethod533def build_project(cls, dirs, args, env):534"""535Build a dev version of the project.
536"""
537cmd = ["ninja", "-C", str(dirs.build)]538if args.parallel is None:539# Use number of physical cores rather than ninja's default of 2N+2,540# to avoid out of memory issues (see gh-17941 and gh-18443)541n_cores = cpu_count(only_physical_cores=True)542cmd += [f"-j{n_cores}"]543else:544cmd += ["-j", str(args.parallel)]545
546# Building with ninja-backend547cmd_str = ' '.join([str(p) for p in cmd])548cls.console.print(f"{EMOJI.cmd} [cmd] {cmd_str}")549ret = subprocess.call(cmd, env=env, cwd=dirs.root)550
551if ret == 0:552print("Build OK")553else:554print("Build failed!")555sys.exit(1)556
557@classmethod558def install_project(cls, dirs, args):559"""560Installs the project after building.
561"""
562if dirs.installed.exists():563non_empty = len(os.listdir(dirs.installed))564if non_empty and not dirs.site.exists():565raise RuntimeError("Can't install in non-empty directory: "566f"'{dirs.installed}'")567cmd = ["meson", "install", "-C", args.build_dir,568"--only-changed", "--tags", args.tags]569log_filename = dirs.root / 'meson-install.log'570start_time = datetime.datetime.now()571cmd_str = ' '.join([str(p) for p in cmd])572cls.console.print(f"{EMOJI.cmd} [cmd] {cmd_str}")573if args.show_build_log:574ret = subprocess.call(cmd, cwd=dirs.root)575else:576print("Installing, see meson-install.log...")577with open(log_filename, 'w') as log:578p = subprocess.Popen(cmd, stdout=log, stderr=log,579cwd=dirs.root)580
581try:582# Wait for it to finish, and print something to indicate the583# process is alive, but only if the log file has grown (to584# allow continuous integration environments kill a hanging585# process accurately if it produces no output)586last_blip = time.time()587last_log_size = os.stat(log_filename).st_size588while p.poll() is None:589time.sleep(0.5)590if time.time() - last_blip > 60:591log_size = os.stat(log_filename).st_size592if log_size > last_log_size:593elapsed = datetime.datetime.now() - start_time594print(f" ... installation in progress ({elapsed} "595"elapsed)")596last_blip = time.time()597last_log_size = log_size598
599ret = p.wait()600except: # noqa: E722601p.terminate()602raise603elapsed = datetime.datetime.now() - start_time604
605if ret != 0:606if not args.show_build_log:607with open(log_filename) as f:608print(f.read())609print(f"Installation failed! ({elapsed} elapsed)")610sys.exit(1)611
612# ignore everything in the install directory.613with open(dirs.installed / ".gitignore", "w") as f:614f.write("*")615
616if sys.platform == "cygwin":617rebase_cmd = ["/usr/bin/rebase", "--database", "--oblivious"]618rebase_cmd.extend(Path(dirs.installed).glob("**/*.dll"))619subprocess.check_call(rebase_cmd)620
621print("Installation OK")622return623
624@classmethod625def configure_scipy_openblas(self, blas_variant='32'):626"""Create scipy-openblas.pc and scipy/_distributor_init_local.py627
628Requires a pre-installed scipy-openblas32 wheel from PyPI.
629"""
630basedir = os.getcwd()631pkg_config_fname = os.path.join(basedir, "scipy-openblas.pc")632
633if os.path.exists(pkg_config_fname):634return None635
636module_name = f"scipy_openblas{blas_variant}"637try:638openblas = importlib.import_module(module_name)639except ModuleNotFoundError:640raise RuntimeError(f"Importing '{module_name}' failed. "641"Make sure it is installed and reachable "642"by the current Python executable. You can "643f"install it via 'pip install {module_name}'.")644
645local = os.path.join(basedir, "scipy", "_distributor_init_local.py")646with open(local, "w", encoding="utf8") as fid:647fid.write(f"import {module_name}\n")648
649with open(pkg_config_fname, "w", encoding="utf8") as fid:650fid.write(openblas.get_pkg_config())651
652@classmethod653def run(cls, add_path=False, **kwargs):654kwargs.update(cls.ctx.get(kwargs))655Args = namedtuple('Args', [k for k in kwargs.keys()])656args = Args(**kwargs)657
658cls.console = Console(theme=console_theme)659dirs = Dirs(args)660if args.no_build:661print("Skipping build")662else:663env = cls.setup_build(dirs, args)664cls.build_project(dirs, args, env)665cls.install_project(dirs, args)666
667# add site to sys.path668if add_path:669dirs.add_sys_path()670
671
672@cli.cls_cmd('test')673class Test(Task):674""":wrench: Run tests.675
676\b
677```python
678Examples:
679
680$ python dev.py test -s {SAMPLE_SUBMODULE}
681$ python dev.py test -t scipy.optimize.tests.test_minimize_constrained
682$ python dev.py test -s cluster -m full --durations 20
683$ python dev.py test -s stats -- --tb=line # `--` passes next args to pytest
684$ python dev.py test -b numpy -b pytorch -s cluster
685```
686"""
687ctx = CONTEXT688
689verbose = Option(690['--verbose', '-v'], default=False, is_flag=True,691help="more verbosity")692# removed doctests as currently not supported by _lib/_testutils.py693# doctests = Option(['--doctests'], default=False)694coverage = Option(695['--coverage', '-c'], default=False, is_flag=True,696help=("report coverage of project code. "697"HTML output goes under build/coverage"))698durations = Option(699['--durations', '-d'], default=None, metavar="NUM_TESTS",700help="Show timing for the given number of slowest tests"701)702submodule = Option(703['--submodule', '-s'], default=None, metavar='MODULE_NAME',704help="Submodule whose tests to run (cluster, constants, ...)")705tests = Option(706['--tests', '-t'], default=None, multiple=True, metavar='TESTS',707help='Specify tests to run')708mode = Option(709['--mode', '-m'], default='fast', metavar='MODE', show_default=True,710help=("'fast', 'full', or something that could be passed to "711"`pytest -m` as a marker expression"))712parallel = Option(713['--parallel', '-j'], default=1, metavar='N_JOBS',714help="Number of parallel jobs for testing"715)716array_api_backend = Option(717['--array-api-backend', '-b'], default=None, metavar='ARRAY_BACKEND',718multiple=True,719help=(720"Array API backend "721"('all', 'numpy', 'pytorch', 'cupy', 'array_api_strict', 'jax.numpy')."722)723)724# Argument can't have `help=`; used to consume all of `-- arg1 arg2 arg3`725pytest_args = Argument(726['pytest_args'], nargs=-1, metavar='PYTEST-ARGS', required=False727)728
729TASK_META = {730'task_dep': ['build'],731}732
733@classmethod734def scipy_tests(cls, args, pytest_args):735dirs = Dirs(args)736dirs.add_sys_path()737print(f"SciPy from development installed path at: {dirs.site}")738
739# FIXME: support pos-args with doit740extra_argv = list(pytest_args[:]) if pytest_args else []741if extra_argv and extra_argv[0] == '--':742extra_argv = extra_argv[1:]743
744if args.coverage:745dst_dir = dirs.root / args.build_dir / 'coverage'746fn = dst_dir / 'coverage_html.js'747if dst_dir.is_dir() and fn.is_file():748shutil.rmtree(dst_dir)749extra_argv += ['--cov-report=html:' + str(dst_dir)]750shutil.copyfile(dirs.root / '.coveragerc',751dirs.site / '.coveragerc')752
753if args.durations:754extra_argv += ['--durations', args.durations]755
756# convert options to test selection757if args.submodule:758tests = [PROJECT_MODULE + "." + args.submodule]759elif args.tests:760tests = args.tests761else:762tests = None763
764if len(args.array_api_backend) != 0:765os.environ['SCIPY_ARRAY_API'] = json.dumps(list(args.array_api_backend))766
767runner, version, mod_path = get_test_runner(PROJECT_MODULE)768# FIXME: changing CWD is not a good practice769with working_dir(dirs.site):770print(f"Running tests for {PROJECT_MODULE} version:{version}, "771f"installed at:{mod_path}")772# runner verbosity - convert bool to int773verbose = int(args.verbose) + 1774result = runner( # scipy._lib._testutils:PytestTester775args.mode,776verbose=verbose,777extra_argv=extra_argv,778doctests=False,779coverage=args.coverage,780tests=tests,781parallel=args.parallel)782return result783
784@classmethod785def run(cls, pytest_args, **kwargs):786"""run unit-tests"""787kwargs.update(cls.ctx.get())788Args = namedtuple('Args', [k for k in kwargs.keys()])789args = Args(**kwargs)790return cls.scipy_tests(args, pytest_args)791
792
793@cli.cls_cmd('smoke-docs')794class SmokeDocs(Task):795# XXX This essntially is a copy-paste of the Task class. Consider de-duplicating.796ctx = CONTEXT797
798verbose = Option(799['--verbose', '-v'], default=False, is_flag=True,800help="more verbosity")801durations = Option(802['--durations', '-d'], default=None, metavar="NUM_TESTS",803help="Show timing for the given number of slowest tests"804)805submodule = Option(806['--submodule', '-s'], default=None, metavar='MODULE_NAME',807help="Submodule whose tests to run (cluster, constants, ...)")808tests = Option(809['--tests', '-t'], default=None, multiple=True, metavar='TESTS',810help='Specify tests to run')811parallel = Option(812['--parallel', '-j'], default=1, metavar='N_JOBS',813help="Number of parallel jobs for testing"814)815# Argument can't have `help=`; used to consume all of `-- arg1 arg2 arg3`816pytest_args = Argument(817['pytest_args'], nargs=-1, metavar='PYTEST-ARGS', required=False818)819
820TASK_META = {821'task_dep': ['build'],822}823
824@classmethod825def scipy_tests(cls, args, pytest_args):826dirs = Dirs(args)827dirs.add_sys_path()828print(f"SciPy from development installed path at: {dirs.site}")829
830# prevent obscure error later; cf https://github.com/numpy/numpy/pull/26691/831if not importlib.util.find_spec("scipy_doctest"):832raise ModuleNotFoundError("Please install scipy-doctest")833
834# FIXME: support pos-args with doit835extra_argv = list(pytest_args[:]) if pytest_args else []836if extra_argv and extra_argv[0] == '--':837extra_argv = extra_argv[1:]838
839if args.durations:840extra_argv += ['--durations', args.durations]841
842# convert options to test selection843if args.submodule:844tests = [PROJECT_MODULE + "." + args.submodule]845elif args.tests:846tests = args.tests847else:848tests = None849
850# Request doctesting; use strategy=api unless -t path/to/specific/file851# also switch off assertion rewriting: not useful for doctests852extra_argv += ["--doctest-modules", "--assert=plain"]853if not args.tests:854extra_argv += ['--doctest-collect=api']855
856runner, version, mod_path = get_test_runner(PROJECT_MODULE)857# FIXME: changing CWD is not a good practice858with working_dir(dirs.site):859print(f"Running tests for {PROJECT_MODULE} version:{version}, "860f"installed at:{mod_path}")861# runner verbosity - convert bool to int862verbose = int(args.verbose) + 1863result = runner( # scipy._lib._testutils:PytestTester864"fast",865verbose=verbose,866extra_argv=extra_argv,867doctests=True,868coverage=False,869tests=tests,870parallel=args.parallel)871return result872
873@classmethod874def run(cls, pytest_args, **kwargs):875"""run unit-tests"""876kwargs.update(cls.ctx.get())877Args = namedtuple('Args', [k for k in kwargs.keys()])878args = Args(**kwargs)879return cls.scipy_tests(args, pytest_args)880
881
882@cli.cls_cmd('smoke-tutorials')883class SmokeTutorials(Task):884""":wrench: Run smoke-tests on tutorial files."""885ctx = CONTEXT886
887tests = Option(888['--tests', '-t'], default=None, multiple=True, metavar='TESTS',889help='Specify *rst files to smoke test')890verbose = Option(891['--verbose', '-v'], default=False, is_flag=True, help="verbosity")892
893pytest_args = Argument(894['pytest_args'], nargs=-1, metavar='PYTEST-ARGS', required=False895)896
897@classmethod898def task_meta(cls, **kwargs):899kwargs.update(cls.ctx.get())900Args = namedtuple('Args', [k for k in kwargs.keys()])901args = Args(**kwargs)902dirs = Dirs(args)903
904cmd = ['pytest']905if args.tests:906cmd += list(args.tests)907else:908cmd += ['doc/source/tutorial', '--doctest-glob=*rst']909if args.verbose:910cmd += ['-v']911
912pytest_args = kwargs.pop('pytest_args', None)913extra_argv = list(pytest_args[:]) if pytest_args else []914if extra_argv and extra_argv[0] == '--':915extra_argv = extra_argv[1:]916cmd += extra_argv917
918cmd_str = ' '.join(cmd)919return {920'actions': [f'env PYTHONPATH={dirs.site} {cmd_str}'],921'task_dep': ['build'],922'io': {'capture': False},923}924
925
926@cli.cls_cmd('bench')927class Bench(Task):928""":wrench: Run benchmarks.929
930\b
931```python
932Examples:
933
934$ python dev.py bench -t integrate.SolveBVP
935$ python dev.py bench -t linalg.Norm
936$ python dev.py bench --compare main
937```
938"""
939ctx = CONTEXT940TASK_META = {941'task_dep': ['build'],942}943submodule = Option(944['--submodule', '-s'], default=None, metavar='SUBMODULE',945help="Submodule whose tests to run (cluster, constants, ...)")946tests = Option(947['--tests', '-t'], default=None, multiple=True,948metavar='TESTS', help='Specify tests to run')949compare = Option(950['--compare', '-c'], default=None, metavar='COMPARE', multiple=True,951help=(952"Compare benchmark results of current HEAD to BEFORE. "953"Use an additional --bench COMMIT to override HEAD with COMMIT. "954"Note that you need to commit your changes first!"))955
956@staticmethod957def run_asv(dirs, cmd):958EXTRA_PATH = ['/usr/lib/ccache', '/usr/lib/f90cache',959'/usr/local/lib/ccache', '/usr/local/lib/f90cache']960bench_dir = dirs.root / 'benchmarks'961sys.path.insert(0, str(bench_dir))962# Always use ccache, if installed963env = dict(os.environ)964env['PATH'] = os.pathsep.join(EXTRA_PATH +965env.get('PATH', '').split(os.pathsep))966# Control BLAS/LAPACK threads967env['OPENBLAS_NUM_THREADS'] = '1'968env['MKL_NUM_THREADS'] = '1'969
970# Limit memory usage971from benchmarks.common import set_mem_rlimit972try:973set_mem_rlimit()974except (ImportError, RuntimeError):975pass976try:977return subprocess.call(cmd, env=env, cwd=bench_dir)978except OSError as err:979if err.errno == errno.ENOENT:980cmd_str = " ".join(cmd)981print(f"Error when running '{cmd_str}': {err}\n")982print("You need to install Airspeed Velocity "983"(https://airspeed-velocity.github.io/asv/)")984print("to run Scipy benchmarks")985return 1986raise987
988@classmethod989def scipy_bench(cls, args):990dirs = Dirs(args)991dirs.add_sys_path()992print(f"SciPy from development installed path at: {dirs.site}")993with working_dir(dirs.site):994runner, version, mod_path = get_test_runner(PROJECT_MODULE)995extra_argv = []996if args.tests:997extra_argv.append(args.tests)998if args.submodule:999extra_argv.append([args.submodule])1000
1001bench_args = []1002for a in extra_argv:1003bench_args.extend(['--bench', ' '.join(str(x) for x in a)])1004if not args.compare:1005print(f"Running benchmarks for Scipy version {version} at {mod_path}")1006cmd = ['asv', 'run', '--dry-run', '--show-stderr',1007'--python=same', '--quick'] + bench_args1008retval = cls.run_asv(dirs, cmd)1009sys.exit(retval)1010else:1011if len(args.compare) == 1:1012commit_a = args.compare[0]1013commit_b = 'HEAD'1014elif len(args.compare) == 2:1015commit_a, commit_b = args.compare1016else:1017print("Too many commits to compare benchmarks for")1018# Check for uncommitted files1019if commit_b == 'HEAD':1020r1 = subprocess.call(['git', 'diff-index', '--quiet',1021'--cached', 'HEAD'])1022r2 = subprocess.call(['git', 'diff-files', '--quiet'])1023if r1 != 0 or r2 != 0:1024print("*" * 80)1025print("WARNING: you have uncommitted changes --- "1026"these will NOT be benchmarked!")1027print("*" * 80)1028
1029# Fix commit ids (HEAD is local to current repo)1030p = subprocess.Popen(['git', 'rev-parse', commit_b],1031stdout=subprocess.PIPE)1032out, err = p.communicate()1033commit_b = out.strip()1034
1035p = subprocess.Popen(['git', 'rev-parse', commit_a],1036stdout=subprocess.PIPE)1037out, err = p.communicate()1038commit_a = out.strip()1039cmd_compare = [1040'asv', 'continuous', '--show-stderr', '--factor', '1.05',1041'--quick', commit_a, commit_b1042] + bench_args1043cls.run_asv(dirs, cmd_compare)1044sys.exit(1)1045
1046@classmethod1047def run(cls, **kwargs):1048"""run benchmark"""1049kwargs.update(cls.ctx.get())1050Args = namedtuple('Args', [k for k in kwargs.keys()])1051args = Args(**kwargs)1052cls.scipy_bench(args)1053
1054
1055###################
1056# linters
1057
1058def emit_cmdstr(cmd):1059"""Print the command that's being run to stdout1060
1061Note: cannot use this in the below tasks (yet), because as is these command
1062strings are always echoed to the console, even if the command isn't run
1063(but for example the `build` command is run).
1064"""
1065console = Console(theme=console_theme)1066# The [cmd] square brackets controls the font styling, typically in italics1067# to differentiate it from other stdout content1068console.print(f"{EMOJI.cmd} [cmd] {cmd}")1069
1070
1071@task_params([{"name": "fix", "default": False}])1072def task_lint(fix):1073# Lint just the diff since branching off of main using a1074# stricter configuration.1075# emit_cmdstr(os.path.join('tools', 'lint.py') + ' --diff-against main')1076cmd = str(Dirs().root / 'tools' / 'lint.py') + ' --diff-against=main'1077if fix:1078cmd += ' --fix'1079return {1080'basename': 'lint',1081'actions': [cmd],1082'doc': 'Lint only files modified since last commit (stricter rules)',1083}1084
1085@task_params([])1086def task_check_python_h_first():1087# Lint just the diff since branching off of main using a1088# stricter configuration.1089# emit_cmdstr(os.path.join('tools', 'lint.py') + ' --diff-against main')1090cmd = "{!s} --diff-against=main".format(1091Dirs().root / 'tools' / 'check_python_h_first.py'1092)1093return {1094'basename': 'check_python_h_first',1095'actions': [cmd],1096'doc': (1097'Check Python.h order only files modified since last commit '1098'(stricter rules)'1099),1100}1101
1102
1103def task_unicode_check():1104# emit_cmdstr(os.path.join('tools', 'unicode-check.py'))1105return {1106'basename': 'unicode-check',1107'actions': [str(Dirs().root / 'tools' / 'unicode-check.py')],1108'doc': 'Check for disallowed Unicode characters in the SciPy Python '1109'and Cython source code.',1110}1111
1112
1113def task_check_test_name():1114# emit_cmdstr(os.path.join('tools', 'check_test_name.py'))1115return {1116"basename": "check-testname",1117"actions": [str(Dirs().root / "tools" / "check_test_name.py")],1118"doc": "Check tests are correctly named so that pytest runs them."1119}1120
1121
1122@cli.cls_cmd('lint')1123class Lint:1124""":dash: Run linter on modified files and check for1125disallowed Unicode characters and possibly-invalid test names."""
1126fix = Option(1127['--fix'], default=False, is_flag=True, help='Attempt to auto-fix errors'1128)1129
1130@classmethod1131def run(cls, fix):1132run_doit_task({1133'lint': {'fix': fix},1134'unicode-check': {},1135'check-testname': {},1136'check_python_h_first': {},1137})1138
1139
1140@cli.cls_cmd('mypy')1141class Mypy(Task):1142""":wrench: Run mypy on the codebase."""1143ctx = CONTEXT1144
1145TASK_META = {1146'task_dep': ['build'],1147}1148
1149@classmethod1150def run(cls, **kwargs):1151kwargs.update(cls.ctx.get())1152Args = namedtuple('Args', [k for k in kwargs.keys()])1153args = Args(**kwargs)1154dirs = Dirs(args)1155
1156try:1157import mypy.api1158except ImportError as e:1159raise RuntimeError(1160"Mypy not found. Please install it by running "1161"pip install -r mypy_requirements.txt from the repo root"1162) from e1163
1164config = dirs.root / "mypy.ini"1165check_path = PROJECT_MODULE1166
1167with working_dir(dirs.site):1168# By default mypy won't color the output since it isn't being1169# invoked from a tty.1170os.environ['MYPY_FORCE_COLOR'] = '1'1171# Change to the site directory to make sure mypy doesn't pick1172# up any type stubs in the source tree.1173emit_cmdstr(f"mypy.api.run --config-file {config} {check_path}")1174report, errors, status = mypy.api.run([1175"--config-file",1176str(config),1177check_path,1178])1179print(report, end='')1180print(errors, end='', file=sys.stderr)1181return status == 01182
1183
1184##########################################
1185# DOC
1186
1187@cli.cls_cmd('doc')1188class Doc(Task):1189""":wrench: Build documentation.1190
1191TARGETS: Sphinx build targets [default: 'html']
1192
1193Running `python dev.py doc -j8 html` is equivalent to:
11941. Execute build command (skip by passing the global `-n` option).
11952. Set the PYTHONPATH environment variable
1196(query with `python dev.py -n show_PYTHONPATH`).
11973. Run make on `doc/Makefile`, i.e.: `make -C doc -j8 TARGETS`
1198
1199To remove all generated documentation do: `python dev.py -n doc clean`
1200"""
1201ctx = CONTEXT1202
1203args = Argument(['args'], nargs=-1, metavar='TARGETS', required=False)1204list_targets = Option(1205['--list-targets', '-t'], default=False, is_flag=True,1206help='List doc targets',1207)1208parallel = Option(1209['--parallel', '-j'], default=1, metavar='N_JOBS',1210help="Number of parallel jobs"1211)1212no_cache = Option(1213['--no-cache'], default=False, is_flag=True,1214help="Forces a full rebuild of the docs. Note that this may be " + \1215"needed in order to make docstring changes in C/Cython files " + \1216"show up."1217)1218
1219@classmethod1220def task_meta(cls, list_targets, parallel, no_cache, args, **kwargs):1221if list_targets: # list MAKE targets, remove default target1222task_dep = []1223targets = ''1224else:1225task_dep = ['build']1226targets = ' '.join(args) if args else 'html'1227
1228kwargs.update(cls.ctx.get())1229Args = namedtuple('Args', [k for k in kwargs.keys()])1230build_args = Args(**kwargs)1231dirs = Dirs(build_args)1232
1233make_params = [f'PYTHON="{sys.executable}"']1234if parallel or no_cache:1235sphinxopts = ""1236if parallel:1237sphinxopts += f"-j{parallel} "1238if no_cache:1239sphinxopts += "-E"1240make_params.append(f'SPHINXOPTS="{sphinxopts}"')1241
1242return {1243'actions': [1244# move to doc/ so local scipy does not get imported1245(f'cd doc; env PYTHONPATH="{dirs.site}" '1246f'make {" ".join(make_params)} {targets}'),1247],1248'task_dep': task_dep,1249'io': {'capture': False},1250}1251
1252
1253@cli.cls_cmd('refguide-check')1254class RefguideCheck(Task):1255""":wrench: Run refguide check."""1256ctx = CONTEXT1257
1258submodule = Option(1259['--submodule', '-s'], default=None, metavar='SUBMODULE',1260help="Submodule whose tests to run (cluster, constants, ...)")1261verbose = Option(1262['--verbose', '-v'], default=False, is_flag=True, help="verbosity")1263
1264@classmethod1265def task_meta(cls, **kwargs):1266kwargs.update(cls.ctx.get())1267Args = namedtuple('Args', [k for k in kwargs.keys()])1268args = Args(**kwargs)1269dirs = Dirs(args)1270
1271cmd = [f'{sys.executable}',1272str(dirs.root / 'tools' / 'refguide_check.py')]1273if args.verbose:1274cmd += ['-vvv']1275if args.submodule:1276cmd += [args.submodule]1277cmd_str = ' '.join(cmd)1278return {1279'actions': [f'env PYTHONPATH={dirs.site} {cmd_str}'],1280'task_dep': ['build'],1281'io': {'capture': False},1282}1283
1284
1285##########################################
1286# ENVS
1287
1288@cli.cls_cmd('python')1289class Python:1290""":wrench: Start a Python shell with PYTHONPATH set.1291
1292ARGS: Arguments passed to the Python interpreter.
1293If not set, an interactive shell is launched.
1294
1295Running `python dev.py shell my_script.py` is equivalent to:
12961. Execute build command (skip by passing the global `-n` option).
12972. Set the PYTHONPATH environment variable
1298(query with `python dev.py -n show_PYTHONPATH`).
12993. Run interpreter: `python my_script.py`
1300"""
1301ctx = CONTEXT1302pythonpath = Option(1303['--pythonpath', '-p'], metavar='PYTHONPATH', default=None,1304help='Paths to prepend to PYTHONPATH')1305extra_argv = Argument(1306['extra_argv'], nargs=-1, metavar='ARGS', required=False)1307
1308@classmethod1309def _setup(cls, pythonpath, **kwargs):1310vals = Build.opt_defaults()1311vals.update(kwargs)1312Build.run(add_path=True, **vals)1313if pythonpath:1314for p in reversed(pythonpath.split(os.pathsep)):1315sys.path.insert(0, p)1316
1317@classmethod1318def run(cls, pythonpath, extra_argv=None, **kwargs):1319cls._setup(pythonpath, **kwargs)1320if extra_argv:1321# Don't use subprocess, since we don't want to include the1322# current path in PYTHONPATH.1323sys.argv = extra_argv1324with open(extra_argv[0]) as f:1325script = f.read()1326sys.modules['__main__'] = new_module('__main__')1327ns = dict(__name__='__main__', __file__=extra_argv[0])1328exec(script, ns)1329else:1330import code1331code.interact()1332
1333
1334@cli.cls_cmd('ipython')1335class Ipython(Python):1336""":wrench: Start IPython shell with PYTHONPATH set.1337
1338Running `python dev.py ipython` is equivalent to:
13391. Execute build command (skip by passing the global `-n` option).
13402. Set the PYTHONPATH environment variable
1341(query with `python dev.py -n show_PYTHONPATH`).
13423. Run the `ipython` interpreter.
1343"""
1344ctx = CONTEXT1345pythonpath = Python.pythonpath1346
1347@classmethod1348def run(cls, pythonpath, **kwargs):1349cls._setup(pythonpath, **kwargs)1350import IPython1351IPython.embed(user_ns={})1352
1353
1354@cli.cls_cmd('shell')1355class Shell(Python):1356""":wrench: Start Unix shell with PYTHONPATH set.1357
1358Running `python dev.py shell` is equivalent to:
13591. Execute build command (skip by passing the global `-n` option).
13602. Open a new shell.
13613. Set the PYTHONPATH environment variable in shell
1362(query with `python dev.py -n show_PYTHONPATH`).
1363"""
1364ctx = CONTEXT1365pythonpath = Python.pythonpath1366extra_argv = Python.extra_argv1367
1368@classmethod1369def run(cls, pythonpath, extra_argv, **kwargs):1370cls._setup(pythonpath, **kwargs)1371shell = os.environ.get('SHELL', 'sh')1372click.echo(f"Spawning a Unix shell '{shell}' ...")1373os.execv(shell, [shell] + list(extra_argv))1374sys.exit(1)1375
1376
1377@cli.cls_cmd('show_PYTHONPATH')1378class ShowDirs(Python):1379""":information: Show value of the PYTHONPATH environment variable used in1380this script.
1381
1382PYTHONPATH sets the default search path for module files for the
1383interpreter. Here, it includes the path to the local SciPy build
1384(typically `.../build-install/lib/python3.10/site-packages`).
1385
1386Use the global option `-n` to skip the building step, e.g.:
1387`python dev.py -n show_PYTHONPATH`
1388"""
1389ctx = CONTEXT1390pythonpath = Python.pythonpath1391extra_argv = Python.extra_argv1392
1393@classmethod1394def run(cls, pythonpath, extra_argv, **kwargs):1395cls._setup(pythonpath, **kwargs)1396py_path = os.environ.get('PYTHONPATH', '')1397click.echo(f"PYTHONPATH={py_path}")1398
1399
1400@cli.command()1401@click.argument('version_args', nargs=2)1402@click.pass_obj1403def notes(ctx_obj, version_args):1404""":ledger: Release notes and log generation.1405
1406\b
1407```python
1408Example:
1409
1410$ python dev.py notes v1.7.0 v1.8.0
1411```
1412"""
1413if version_args:1414sys.argv = version_args1415log_start = sys.argv[0]1416log_end = sys.argv[1]1417cmd = f"python tools/write_release_and_log.py {log_start} {log_end}"1418click.echo(cmd)1419try:1420subprocess.run([cmd], check=True, shell=True)1421except subprocess.CalledProcessError:1422print('Error caught: Incorrect log start or log end version')1423
1424
1425@cli.command()1426@click.argument('revision_args', nargs=2)1427@click.pass_obj1428def authors(ctx_obj, revision_args):1429""":ledger: Generate list of authors who contributed within revision1430interval.
1431
1432\b
1433```python
1434Example:
1435
1436$ python dev.py authors v1.7.0 v1.8.0
1437```
1438"""
1439if revision_args:1440sys.argv = revision_args1441start_revision = sys.argv[0]1442end_revision = sys.argv[1]1443cmd = f"python tools/authors.py {start_revision}..{end_revision}"1444click.echo(cmd)1445try:1446subprocess.run([cmd], check=True, shell=True)1447except subprocess.CalledProcessError:1448print('Error caught: Incorrect revision start or revision end')1449
1450
1451# The following CPU core count functions were taken from loky/backend/context.py
1452# See https://github.com/joblib/loky
1453
1454# Cache for the number of physical cores to avoid repeating subprocess calls.
1455# It should not change during the lifetime of the program.
1456physical_cores_cache = None1457
1458
1459def cpu_count(only_physical_cores=False):1460"""Return the number of CPUs the current process can use.1461
1462The returned number of CPUs accounts for:
1463* the number of CPUs in the system, as given by
1464``multiprocessing.cpu_count``;
1465* the CPU affinity settings of the current process
1466(available on some Unix systems);
1467* Cgroup CPU bandwidth limit (available on Linux only, typically
1468set by docker and similar container orchestration systems);
1469* the value of the LOKY_MAX_CPU_COUNT environment variable if defined.
1470and is given as the minimum of these constraints.
1471
1472If ``only_physical_cores`` is True, return the number of physical cores
1473instead of the number of logical cores (hyperthreading / SMT). Note that
1474this option is not enforced if the number of usable cores is controlled in
1475any other way such as: process affinity, Cgroup restricted CPU bandwidth
1476or the LOKY_MAX_CPU_COUNT environment variable. If the number of physical
1477cores is not found, return the number of logical cores.
1478
1479Note that on Windows, the returned number of CPUs cannot exceed 61 (or 60 for
1480Python < 3.10), see:
1481https://bugs.python.org/issue26903.
1482
1483It is also always larger or equal to 1.
1484"""
1485# Note: os.cpu_count() is allowed to return None in its docstring1486os_cpu_count = os.cpu_count() or 11487if sys.platform == "win32":1488# On Windows, attempting to use more than 61 CPUs would result in a1489# OS-level error. See https://bugs.python.org/issue26903. According to1490# https://learn.microsoft.com/en-us/windows/win32/procthread/processor-groups1491# it might be possible to go beyond with a lot of extra work but this1492# does not look easy.1493os_cpu_count = min(os_cpu_count, _MAX_WINDOWS_WORKERS)1494
1495cpu_count_user = _cpu_count_user(os_cpu_count)1496aggregate_cpu_count = max(min(os_cpu_count, cpu_count_user), 1)1497
1498if not only_physical_cores:1499return aggregate_cpu_count1500
1501if cpu_count_user < os_cpu_count:1502# Respect user setting1503return max(cpu_count_user, 1)1504
1505cpu_count_physical, exception = _count_physical_cores()1506if cpu_count_physical != "not found":1507return cpu_count_physical1508
1509# Fallback to default behavior1510if exception is not None:1511# warns only the first time1512warnings.warn(1513"Could not find the number of physical cores for the "1514f"following reason:\n{exception}\n"1515"Returning the number of logical cores instead. You can "1516"silence this warning by setting LOKY_MAX_CPU_COUNT to "1517"the number of cores you want to use.",1518stacklevel=21519)1520traceback.print_tb(exception.__traceback__)1521
1522return aggregate_cpu_count1523
1524
1525def _cpu_count_cgroup(os_cpu_count):1526# Cgroup CPU bandwidth limit available in Linux since 2.6 kernel1527cpu_max_fname = "/sys/fs/cgroup/cpu.max"1528cfs_quota_fname = "/sys/fs/cgroup/cpu/cpu.cfs_quota_us"1529cfs_period_fname = "/sys/fs/cgroup/cpu/cpu.cfs_period_us"1530if os.path.exists(cpu_max_fname):1531# cgroup v21532# https://www.kernel.org/doc/html/latest/admin-guide/cgroup-v2.html1533with open(cpu_max_fname) as fh:1534cpu_quota_us, cpu_period_us = fh.read().strip().split()1535elif os.path.exists(cfs_quota_fname) and os.path.exists(cfs_period_fname):1536# cgroup v11537# https://www.kernel.org/doc/html/latest/scheduler/sched-bwc.html#management1538with open(cfs_quota_fname) as fh:1539cpu_quota_us = fh.read().strip()1540with open(cfs_period_fname) as fh:1541cpu_period_us = fh.read().strip()1542else:1543# No Cgroup CPU bandwidth limit (e.g. non-Linux platform)1544cpu_quota_us = "max"1545cpu_period_us = 100_000 # unused, for consistency with default values1546
1547if cpu_quota_us == "max":1548# No active Cgroup quota on a Cgroup-capable platform1549return os_cpu_count1550else:1551cpu_quota_us = int(cpu_quota_us)1552cpu_period_us = int(cpu_period_us)1553if cpu_quota_us > 0 and cpu_period_us > 0:1554return math.ceil(cpu_quota_us / cpu_period_us)1555else: # pragma: no cover1556# Setting a negative cpu_quota_us value is a valid way to disable1557# cgroup CPU bandwidth limits1558return os_cpu_count1559
1560
1561def _cpu_count_affinity(os_cpu_count):1562# Number of available CPUs given affinity settings1563if hasattr(os, "sched_getaffinity"):1564try:1565return len(os.sched_getaffinity(0))1566except NotImplementedError:1567pass1568
1569# On PyPy and possibly other platforms, os.sched_getaffinity does not exist1570# or raises NotImplementedError, let's try with the psutil if installed.1571try:1572import psutil1573
1574p = psutil.Process()1575if hasattr(p, "cpu_affinity"):1576return len(p.cpu_affinity())1577
1578except ImportError: # pragma: no cover1579if (1580sys.platform == "linux"1581and os.environ.get("LOKY_MAX_CPU_COUNT") is None1582):1583# PyPy does not implement os.sched_getaffinity on Linux which1584# can cause severe oversubscription problems. Better warn the1585# user in this particularly pathological case which can wreck1586# havoc, typically on CI workers.1587warnings.warn(1588"Failed to inspect CPU affinity constraints on this system. "1589"Please install psutil or explicitly set LOKY_MAX_CPU_COUNT.",1590stacklevel=41591)1592
1593# This can happen for platforms that do not implement any kind of CPU1594# infinity such as macOS-based platforms.1595return os_cpu_count1596
1597
1598def _cpu_count_user(os_cpu_count):1599"""Number of user defined available CPUs"""1600cpu_count_affinity = _cpu_count_affinity(os_cpu_count)1601
1602cpu_count_cgroup = _cpu_count_cgroup(os_cpu_count)1603
1604# User defined soft-limit passed as a loky specific environment variable.1605cpu_count_loky = int(os.environ.get("LOKY_MAX_CPU_COUNT", os_cpu_count))1606
1607return min(cpu_count_affinity, cpu_count_cgroup, cpu_count_loky)1608
1609
1610def _count_physical_cores():1611"""Return a tuple (number of physical cores, exception)1612
1613If the number of physical cores is found, exception is set to None.
1614If it has not been found, return ("not found", exception).
1615
1616The number of physical cores is cached to avoid repeating subprocess calls.
1617"""
1618exception = None1619
1620# First check if the value is cached1621global physical_cores_cache1622if physical_cores_cache is not None:1623return physical_cores_cache, exception1624
1625# Not cached yet, find it1626try:1627if sys.platform == "linux":1628cpu_info = subprocess.run(1629"lscpu --parse=core".split(), capture_output=True, text=True1630)1631cpu_info = cpu_info.stdout.splitlines()1632cpu_info = {line for line in cpu_info if not line.startswith("#")}1633cpu_count_physical = len(cpu_info)1634elif sys.platform == "win32":1635cpu_info = subprocess.run(1636"wmic CPU Get NumberOfCores /Format:csv".split(),1637capture_output=True,1638text=True,1639)1640cpu_info = cpu_info.stdout.splitlines()1641cpu_info = [1642l.split(",")[1]1643for l in cpu_info1644if (l and l != "Node,NumberOfCores")1645]1646cpu_count_physical = sum(map(int, cpu_info))1647elif sys.platform == "darwin":1648cpu_info = subprocess.run(1649"sysctl -n hw.physicalcpu".split(),1650capture_output=True,1651text=True,1652)1653cpu_info = cpu_info.stdout1654cpu_count_physical = int(cpu_info)1655else:1656raise NotImplementedError(f"unsupported platform: {sys.platform}")1657
1658# if cpu_count_physical < 1, we did not find a valid value1659if cpu_count_physical < 1:1660raise ValueError(f"found {cpu_count_physical} physical cores < 1")1661
1662except Exception as e:1663exception = e1664cpu_count_physical = "not found"1665
1666# Put the result in cache1667physical_cores_cache = cpu_count_physical1668
1669return cpu_count_physical, exception1670
1671
1672if __name__ == '__main__':1673cli()1674