diff --git a/.coveragerc b/.coveragerc index 3ababd737..e8f851dca 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,8 +1,11 @@ [run] -include = */benchbuild/* +include = benchbuild/* omit = + ./venv/* + */bin/benchbuild* */benchbuild/lib/* */benchbuild/bin/* + .venv/* branch = True parallel = True diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 43c301594..3f4b65ddc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -9,27 +9,27 @@ jobs: unittests: runs-on: ubuntu-latest strategy: + fail-fast: false matrix: - python-version: ["3.9", "3.10"] + python-version: ["3.11", "3.12", "3.13", "3.14"] steps: - - uses: actions/checkout@v3 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} + - name: Checkout + uses: actions/checkout@v5 - name: Install system dependencies for CI run: | - sudo apt-get install time unionfs-fuse libfuse-dev clang + sudo apt-get install time unionfs-fuse libfuse-dev clang libgit2-dev - - name: Install dependencies + - name: Install uv + uses: astral-sh/setup-uv@v6 + with: + python-version: ${{ matrix.python-version }} + enable-cache: true + + - name: Install the project run: | - python -m pip install --upgrade pip wheel - pip install . - pip install -r requirements.txt - pip install -r test-requirements.txt + uv sync --locked --all-extras --dev - name: Run unittests env: @@ -40,8 +40,8 @@ jobs: run: | git config --global protocol.file.allow always - pytest --cov-config=./.coveragerc --cov=benchbuild --ignore=tests/integration benchbuild tests - pytest --cov-config=./.coveragerc --cov=benchbuild tests/integration + uv run pytest --cov-config=./.coveragerc --cov=benchbuild --ignore=tests/integration benchbuild tests + uv run pytest --cov-config=./.coveragerc --cov=benchbuild tests/integration - uses: actions/upload-artifact@master with: @@ -53,27 +53,26 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.9", "3.10"] + python-version: ["3.11", "3.12", "3.13", "3.14"] db_support: [true, false] steps: - - uses: actions/checkout@v2 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} + - name: Checkout + uses: actions/checkout@v5 - name: Install system dependencies for CI run: | sudo apt-get install time unionfs-fuse libfuse-dev clang slurm-client - - name: Install dependencies + - name: Install uv + uses: astral-sh/setup-uv@v6 + with: + python-version: ${{ matrix.python-version }} + enable-cache: true + + - name: Install the project run: | - python -m pip install --upgrade pip wheel - pip install . - pip install -r requirements.txt - pip install -r test-requirements.txt + uv sync --locked --all-extras --dev - name: Run integration tests env: @@ -86,18 +85,19 @@ jobs: BB_CONTAINER_RUNROOT: ${{ runner.temp }} BB_VERBOSITY: 5 run: | - coverage run -p `which benchbuild` bootstrap -s - coverage run -p `which benchbuild` config view - coverage run -p `which benchbuild` experiment view - coverage run -p `which benchbuild` project view - coverage run -p `which benchbuild` run --full test - coverage run -p `which benchbuild` slurm -E empty test - coverage run -p `which benchbuild` slurm -E raw bzip2/benchbuild -- container --runroot /foo/bar - coverage run -p `which benchbuild` run -E raw bzip2/benchbuild - coverage run -p `which benchbuild` container run -E raw bzip2/benchbuild - coverage run -p `which benchbuild` container bases --export -E raw bzip2/benchbuild - coverage run -p `which benchbuild` container bases --import -E raw bzip2/benchbuild - coverage run -p `which benchbuild` container rmi --with-projects -E raw bzip2/benchbuild + source .venv/bin/activate + coverage run -p .venv/bin/benchbuild bootstrap -s + coverage run -p .venv/bin/benchbuild config view + coverage run -p .venv/bin/benchbuild experiment view + coverage run -p .venv/bin/benchbuild project view + coverage run -p .venv/bin/benchbuild run --full test + coverage run -p .venv/bin/benchbuild slurm -E empty test + coverage run -p .venv/bin/benchbuild slurm -E raw bzip2/benchbuild -- container --runroot /foo/bar + coverage run -p .venv/bin/benchbuild run -E raw bzip2/benchbuild + coverage run -p .venv/bin/benchbuild container run -E raw bzip2/benchbuild + coverage run -p .venv/bin/benchbuild container bases --export -E raw bzip2/benchbuild + coverage run -p .venv/bin/benchbuild container bases --import -E raw bzip2/benchbuild + coverage run -p .venv/bin/benchbuild container rmi --with-projects -E raw bzip2/benchbuild coverage combine coverage report -m @@ -111,15 +111,10 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.10"] + python-version: ["3.11", "3.12", "3.13", "3.14"] steps: - - uses: actions/checkout@v2 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} + - uses: actions/checkout@v5 - uses: actions/download-artifact@master with: @@ -131,15 +126,21 @@ jobs: name: coverage-integration-${{ matrix.python-version }} path: ./.coverage.integration + - name: Install uv + uses: astral-sh/setup-uv@v6 + with: + python-version: ${{ matrix.python-version }} + enable-cache: true + - run: | ls -lah . - python -m pip install --upgrade pip wheel - pip install coverage - coverage combine -a ./.coverage.integration/.coverage ./.coverage.unittests/.coverage - coverage xml + uv sync --locked --all-extras --dev + uv pip install coverage + uv run coverage combine -a ./.coverage.integration/.coverage ./.coverage.unittests/.coverage + uv run coverage xml - name: Upload coverage report - uses: codecov/codecov-action@v1 + uses: codecov/codecov-action@v5 with: token: ${{ secrets.CODECOV_TOKEN }} file: ./coverage.xml @@ -149,25 +150,24 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.10"] + python-version: ["3.11", "3.12", "3.13", "3.14"] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + - name: Install uv + uses: astral-sh/setup-uv@v6 with: python-version: ${{ matrix.python-version }} + enable-cache: true - name: Setup a git user run: | git config user.name github-actions git config user.email github-actions@github.com - - name: Install dependencies + - name: Install the project run: | - python -m pip install --upgrade pip - pip install . - pip install -r docs/requirements.txt + uv sync --locked --all-extras --group docs diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 5b91c2029..2d0276736 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -12,58 +12,30 @@ jobs: runs-on: ubuntu-latest steps: # https://github.com/marketplace/actions/checkout - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 lfs: true # https://github.com/marketplace/actions/setup-python # ^-- This gives info on matrix testing. - - name: Install Python + - name: Set up Python uses: actions/setup-python@v4 with: - python-version: '3.10' + python-version-file: ".python-version" - # https://docs.github.com/en/actions/guides/building-and-testing-python#caching-dependencies - # ^-- How to set up caching for pip on Ubuntu - - name: Cache pip - uses: actions/cache@v2 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - ${{ runner.os }}- - - # https://docs.github.com/en/actions/guides/building-and-testing-python#installing-dependencies - # ^-- This gives info on installing dependencies with pip - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements.txt - pip install -r docs/requirements.txt - pip install . + - name: Install uv + uses: astral-sh/setup-uv@v6 - - name: Debugging information + - name: Install the project run: | - echo "github.ref:" ${{github.ref}} - echo "github.event_name:" ${{github.event_name}} - echo "github.head_ref:" ${{github.head_ref}} - echo "github.base_ref:" ${{github.base_ref}} - set -x - git rev-parse --abbrev-ref HEAD - git branch - git branch -a - git remote -v - python -V - pip list --not-required - pip list + uv sync --locked --all-extras --dev --group docs # Build - uses: ammaraskar/sphinx-problem-matcher@master - name: Build Sphinx docs run: | - make -Cdocs dirhtml + uv run make -Cdocs dirhtml # This fixes broken copy button icons, as explained in # https://github.com/coderefinery/sphinx-lesson/issues/50 # https://github.com/executablebooks/sphinx-copybutton/issues/110 diff --git a/.github/workflows/reviewdog.yml b/.github/workflows/reviewdog.yml index 4a2e2e605..7120ee8b2 100644 --- a/.github/workflows/reviewdog.yml +++ b/.github/workflows/reviewdog.yml @@ -5,12 +5,12 @@ jobs: name: reviewdog runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - name: Set up Python 3.10 + - name: Set up Python 3.12 uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.12" - name: Install dependencies run: | diff --git a/.gitignore b/.gitignore index 1d55170df..7ea8db6a7 100644 --- a/.gitignore +++ b/.gitignore @@ -16,3 +16,4 @@ __pycache__/ .vim/coc-settings.json .pheasant_cache/ build/ +benchbuild/_version.py diff --git a/.isort.cfg b/.isort.cfg index db45450ee..99ee3daa9 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -1,5 +1,5 @@ [settings] -known_third_party = attr,dill,faker,git,jinja2,mock,parse,pkg_resources,plumbum,psutil,pygtrie,pyparsing,pytest,pytest_git,result,rich,schema,setuptools,six,sqlalchemy,yaml +known_third_party = attr,dill,faker,git,jinja2,mock,parse,plumbum,psutil,pygtrie,pyparsing,pytest,pytest_git,result,rich,schema,setuptools,six,sqlalchemy,yaml multi_line_output=3 use_parentheses = True include_trailing_comma: True diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b2c0eacfb..b4a82e26c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,26 +5,23 @@ repos: - id: commitizen stages: [commit-msg] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v6.0.0 hooks: - id: check-yaml - id: end-of-file-fixer - id: trailing-whitespace - id: requirements-txt-fixer -- repo: https://github.com/asottile/seed-isort-config - rev: v2.2.0 - hooks: - - id: seed-isort-config -- repo: https://github.com/timothycrosley/isort.git - rev: 5.12.0 - hooks: - - id: isort - args: ['-nis'] - repo: https://github.com/MarcoGorelli/auto-walrus rev: v0.2.2 hooks: - id: auto-walrus -- repo: https://github.com/pre-commit/mirrors-yapf - rev: 'v0.32.0' +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: "v0.12.5" hooks: - - id: yapf + - id: ruff-check + args: [ "--fix" ] + + - id: ruff-check + args: [ "--select", "I", "--fix"] + + - id: ruff-format diff --git a/.python-version b/.python-version new file mode 100644 index 000000000..e4fba2183 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.12 diff --git a/benchbuild/__init__.py b/benchbuild/__init__.py index 9ff2ae338..f2f9c001a 100644 --- a/benchbuild/__init__.py +++ b/benchbuild/__init__.py @@ -2,6 +2,7 @@ """ Public API of benchbuild. """ + import sys # Project utilities @@ -11,16 +12,20 @@ from . import plugins as __PLUGINS__ from . import source as source from .experiment import Experiment as Experiment + # Export: Project from .project import Project as Project + # Export: Configuration from .settings import CFG as CFG + # Don't Export, just init. # Export: compiler, download, run and wrapping modules from .utils import compiler as compiler from .utils import download as download from .utils import settings as __SETTINGS__ from .utils import wrapping as wrapping + # Wrapping / Execution utilities from .utils.run import watch as watch from .utils.wrapping import wrap as wrap diff --git a/benchbuild/cli/__init__.py b/benchbuild/cli/__init__.py index ac87a90f8..a8ca7c124 100644 --- a/benchbuild/cli/__init__.py +++ b/benchbuild/cli/__init__.py @@ -1,5 +1,12 @@ """The CLI package.""" + __all__ = [ - "main", "bootstrap", "config", "log", "project", "experiment", "run", - "slurm" + "main", + "bootstrap", + "config", + "log", + "project", + "experiment", + "run", + "slurm", ] diff --git a/benchbuild/cli/bootstrap.py b/benchbuild/cli/bootstrap.py index 4ba199031..9469bcc62 100644 --- a/benchbuild/cli/bootstrap.py +++ b/benchbuild/cli/bootstrap.py @@ -11,9 +11,9 @@ class BenchBuildBootstrap(cli.Application): """Bootstrap benchbuild external dependencies, if possible.""" - store_config = cli.Flag(["-s", "--save-config"], - help="Save benchbuild's configuration.", - default=False) + store_config = cli.Flag( + ["-s", "--save-config"], help="Save benchbuild's configuration.", default=False + ) def main(self, *args: str) -> int: del args # Unused @@ -22,10 +22,8 @@ def main(self, *args: str) -> int: bootstrap.provide_package("cmake") bootstrap.provide_package("fusermount") bootstrap.provide_package("unionfs") - bootstrap.provide_package( - 'uchroot', installer=bootstrap.install_uchroot - ) - bootstrap.provide_packages(CFG['bootstrap']['packages'].value) + bootstrap.provide_package("uchroot", installer=bootstrap.install_uchroot) + bootstrap.provide_packages(CFG["bootstrap"]["packages"].value) if self.store_config: config_path = ".benchbuild.yml" diff --git a/benchbuild/cli/config.py b/benchbuild/cli/config.py index ffd92f66a..ece7ba4bf 100644 --- a/benchbuild/cli/config.py +++ b/benchbuild/cli/config.py @@ -1,4 +1,5 @@ """Subcommand for configuration handling.""" + import os from plumbum import cli diff --git a/benchbuild/cli/experiment.py b/benchbuild/cli/experiment.py index 64da9b185..d8749396a 100644 --- a/benchbuild/cli/experiment.py +++ b/benchbuild/cli/experiment.py @@ -1,4 +1,5 @@ """Subcommand for experiment handling.""" + from plumbum import cli from benchbuild import experiment diff --git a/benchbuild/cli/log.py b/benchbuild/cli/log.py index 4471f204e..4c091622b 100644 --- a/benchbuild/cli/log.py +++ b/benchbuild/cli/log.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -""" Analyze the BB database. """ +"""Analyze the BB database.""" from plumbum import cli @@ -7,26 +7,43 @@ def print_runs(query): - """ Print all rows in this result query. """ + """Print all rows in this result query.""" if query is None: return for tup in query: - print(("{0} @ {1} - {2} id: {3} group: {4}".format( - tup.end, tup.experiment_name, tup.project_name, - tup.experiment_group, tup.run_group))) + print( + ( + "{0} @ {1} - {2} id: {3} group: {4}".format( + tup.end, + tup.experiment_name, + tup.project_name, + tup.experiment_group, + tup.run_group, + ) + ) + ) def print_logs(query, types=None): - """ Print status logs. """ + """Print status logs.""" if query is None: return for run, log in query: - print(("{0} @ {1} - {2} id: {3} group: {4} status: {5}".format( - run.end, run.experiment_name, run.project_name, - run.experiment_group, run.run_group, log.status))) + print( + ( + "{0} @ {1} - {2} id: {3} group: {4} status: {5}".format( + run.end, + run.experiment_name, + run.project_name, + run.experiment_group, + run.run_group, + log.status, + ) + ) + ) print(("command: {0}".format(run.command))) if "stderr" in types: print("StdErr:") @@ -39,38 +56,40 @@ def print_logs(query, types=None): @BenchBuild.subcommand("log") class BenchBuildLog(cli.Application): - """ Frontend command to the benchbuild database. """ + """Frontend command to the benchbuild database.""" - @cli.switch(["-E", "--experiment"], - str, - list=True, - help="Experiments to fetch the log for.") + @cli.switch( + ["-E", "--experiment"], str, list=True, help="Experiments to fetch the log for." + ) def experiment(self, experiments): - """ Set the experiments to fetch the log for. """ + """Set the experiments to fetch the log for.""" self._experiments = experiments - @cli.switch(["-e", "--experiment-id"], - str, - list=True, - help="Experiment IDs to fetch the log for.") + @cli.switch( + ["-e", "--experiment-id"], + str, + list=True, + help="Experiment IDs to fetch the log for.", + ) def experiment_ids(self, experiment_ids): - """ Set the experiment ids to fetch the log for. """ + """Set the experiment ids to fetch the log for.""" self._experiment_ids = experiment_ids - @cli.switch(["-p", "--project-id"], - str, - list=True, - help="Project IDs to fetch the log for.") + @cli.switch( + ["-p", "--project-id"], str, list=True, help="Project IDs to fetch the log for." + ) def project_ids(self, project_ids): - """ Set the project ids to fetch the log for. """ + """Set the project ids to fetch the log for.""" self._project_ids = project_ids - @cli.switch(["-t", "--type"], - cli.Set("stdout", "stderr"), - list=True, - help="Set the output types to print.") + @cli.switch( + ["-t", "--type"], + cli.Set("stdout", "stderr"), + list=True, + help="Set the output types to print.", + ) def log_type(self, types): - """ Set the output types to print. """ + """Set the output types to print.""" self._types = types _experiments = None @@ -79,7 +98,7 @@ def log_type(self, types): _types = None def main(self, *projects): - """ Run the log command. """ + """Run the log command.""" from benchbuild.utils.schema import Session, Run, RunLog session = Session() diff --git a/benchbuild/cli/main.py b/benchbuild/cli/main.py index de0cf9028..ac99672a4 100644 --- a/benchbuild/cli/main.py +++ b/benchbuild/cli/main.py @@ -1,4 +1,5 @@ """Main CLI unit of BenchBuild.""" + import os from plumbum import cli @@ -13,12 +14,11 @@ class BenchBuild(cli.Application): VERSION = str(settings.CFG["version"]) _list_env = False - verbosity = cli.CountOf('-v', help="Enable verbose output") - debug = cli.Flag('-d', help="Enable debugging output") - force_tty = cli.Flag('--force-tty', help="Assume an available tty") + verbosity = cli.CountOf("-v", help="Enable verbose output") + debug = cli.Flag("-d", help="Enable debugging output") + force_tty = cli.Flag("--force-tty", help="Assume an available tty") force_watch_unbuffered = cli.Flag( - '--force-watch-unbuffered', - help="Force watched commands to output unbuffered" + "--force-watch-unbuffered", help="Force watched commands to output unbuffered" ) def main(self, *args: str) -> int: @@ -27,7 +27,7 @@ def main(self, *args: str) -> int: self.verbosity = self.verbosity if self.verbosity < 6 else 5 if self.debug: self.verbosity = 3 - verbosity = int(os.getenv('BB_VERBOSITY', self.verbosity)) + verbosity = int(os.getenv("BB_VERBOSITY", self.verbosity)) cfg["verbosity"] = verbosity cfg["debug"] = self.debug @@ -41,6 +41,7 @@ def main(self, *args: str) -> int: if cfg["db"]["create_functions"]: from benchbuild.utils.schema import init_functions, Session + init_functions(Session()) if args: diff --git a/benchbuild/cli/project.py b/benchbuild/cli/project.py index d037af68a..bb32e3009 100644 --- a/benchbuild/cli/project.py +++ b/benchbuild/cli/project.py @@ -1,6 +1,6 @@ """Subcommand for project handling.""" + import typing as tp -from functools import reduce from plumbum import cli @@ -24,10 +24,9 @@ class BBProjectView(cli.Application): groups = None - @cli.switch(["-G", "--group"], - str, - list=True, - help="Include projects of this group.") + @cli.switch( + ["-G", "--group"], str, list=True, help="Include projects of this group." + ) def set_group(self, groups): self.groups = groups @@ -42,17 +41,15 @@ class BBProjectDetails(cli.Application): limit: int = 10 - @cli.switch(["-l", "--limit"], - int, - help="Limit the number of versions to display") + @cli.switch(["-l", "--limit"], int, help="Limit the number of versions to display") def set_limit(self, limit: int) -> None: self.limit = limit def main(self, project: str) -> int: index = bb.populate([project], []) if not index.values(): - print(f'Project named {project} not found in the registry.') - print('Maybe it is not configured to be loaded.') + print(f"Project named {project} not found in the registry.") + print("Maybe it is not configured to be loaded.") return -1 for project_cls in index.values(): print_project(project_cls, self.limit) @@ -67,12 +64,12 @@ def print_project(project: tp.Type[Project], limit: int) -> None: project: The project to print. limit: The maximal number of versions to print. """ - tmp_dir = CFG['tmp_dir'] + tmp_dir = CFG["tmp_dir"] - print(f'project: {project.NAME}') - print(f'group: {project.GROUP}') - print(f'domain: {project.DOMAIN}') - print('source:') + print(f"project: {project.NAME}") + print(f"group: {project.GROUP}") + print(f"domain: {project.DOMAIN}") + print("source:") for source in project.SOURCE: if not source.is_context_free(): @@ -80,23 +77,23 @@ def print_project(project: tp.Type[Project], limit: int) -> None: num_versions = len(source.versions()) - print(' -', f'{source.remote}') - print(' ', 'default:', source.default) - print(' ', f'versions: {num_versions}') - print(' ', 'local:', f'{tmp_dir}/{source.local}') + print(" -", f"{source.remote}") + print(" ", "default:", source.default) + print(" ", f"versions: {num_versions}") + print(" ", "local:", f"{tmp_dir}/{source.local}") for v in list(source.versions())[:limit]: - print(' ' * 2, v) + print(" " * 2, v) def print_layers(container: ContainerImage, indent: int = 1) -> None: for layer in container: - print(' ' * indent, str(layer)) + print(" " * indent, str(layer)) - print('container:') + print("container:") if isinstance(project.CONTAINER, ContainerImage): print_layers(project.CONTAINER, 1) else: for k, container in project.CONTAINER: - print(' ', str(k)) + print(" ", str(k)) print_layers(container, 2) @@ -121,32 +118,29 @@ def print_projects(projects: ProjectIndex) -> None: "{name}/{group}".format(name=prj.NAME, group=prj.GROUP) ) - project_column_width = max([ - len(f'{p.NAME}/{p.GROUP}') for p in projects.values() - ]) + project_column_width = max([len(f"{p.NAME}/{p.GROUP}") for p in projects.values()]) project_header_format = ( "{name_header:<{width}} | {domain_header:^15} | " "{source_header:^15} | {description_header}" ) project_row_format = ( - "{name:<{width}} | {domain:^15} | " - "{num_sources:^15} | {description}" + "{name:<{width}} | {domain:^15} | {num_sources:^15} | {description}" ) for name in grouped_by: group_projects = sorted(grouped_by[name]) print( project_header_format.format( - name_header=f'{name}', + name_header=f"{name}", domain_header="Domain", source_header="# Sources", description_header="Description", - width=project_column_width + width=project_column_width, ) ) for prj_name in group_projects: prj_cls = projects[prj_name] - project_id = f'{prj_cls.NAME}/{prj_cls.GROUP}' + project_id = f"{prj_cls.NAME}/{prj_cls.GROUP}" num_project_sources = len(prj_cls.SOURCE) docstr = "" if prj_cls.__doc__: @@ -157,7 +151,7 @@ def print_projects(projects: ProjectIndex) -> None: domain=prj_cls.DOMAIN, num_sources=num_project_sources, description=docstr, - width=project_column_width + width=project_column_width, ) ) print() diff --git a/benchbuild/cli/run.py b/benchbuild/cli/run.py index a71d8b800..f606d4668 100644 --- a/benchbuild/cli/run.py +++ b/benchbuild/cli/run.py @@ -5,6 +5,7 @@ This subcommand executes experiments on a set of user-controlled projects. See the output of benchbuild run --help for more information. """ + import logging import sys import time @@ -24,32 +25,33 @@ class BenchBuildRun(cli.Application): experiment_names: tp.List[str] = [] group_names = None - test_full = cli.Flag(["-F", "--full"], - help="Test all experiments for the project", - default=False) + test_full = cli.Flag( + ["-F", "--full"], help="Test all experiments for the project", default=False + ) - @cli.switch(["-E", "--experiment"], - str, - list=True, - help="Specify experiments to run") + @cli.switch( + ["-E", "--experiment"], str, list=True, help="Specify experiments to run" + ) def set_experiments(self, names): self.experiment_names = names - @cli.switch(["-D", "--description"], - str, - help="A description for this experiment run") + @cli.switch( + ["-D", "--description"], str, help="A description for this experiment run" + ) def set_experiment_tag(self, description): CFG["experiment_description"] = description - @cli.switch(["-G", "--group"], - str, - list=True, - requires=["--experiment"], - help="Run a group of projects under the given experiments") + @cli.switch( + ["-G", "--group"], + str, + list=True, + requires=["--experiment"], + help="Run a group of projects under the given experiments", + ) def set_group(self, groups): self.group_names = groups - pretend = cli.Flag(['p', 'pretend'], default=False) + pretend = cli.Flag(["p", "pretend"], default=False) def main(self, *projects: str) -> int: """Main entry point of benchbuild run.""" @@ -63,22 +65,14 @@ def main(self, *projects: str) -> int: exps = all_exps else: exps = dict( - filter( - lambda pair: pair[0] in set(experiment_names), - all_exps.items() - ) + filter(lambda pair: pair[0] in set(experiment_names), all_exps.items()) ) unknown_exps = list( - filter( - lambda name: name not in all_exps.keys(), set(experiment_names) - ) + filter(lambda name: name not in all_exps.keys(), set(experiment_names)) ) if unknown_exps: - print( - 'Could not find ', str(unknown_exps), - ' in the experiment registry.' - ) + print("Could not find ", str(unknown_exps), " in the experiment registry.") prjs = project.populate(list(projects), group_names) if not prjs: print("Could not find any project. Exiting.") @@ -122,7 +116,5 @@ def print_summary(num_actions, failed, duration): {num_failed} actions failed to execute. This run took: {elapsed_time:8.3f} seconds. - """.format( - num_total=num_actions, num_failed=num_failed, elapsed_time=duration - ) + """.format(num_total=num_actions, num_failed=num_failed, elapsed_time=duration) ) diff --git a/benchbuild/cli/slurm.py b/benchbuild/cli/slurm.py index 9c818bd0b..37f22f203 100644 --- a/benchbuild/cli/slurm.py +++ b/benchbuild/cli/slurm.py @@ -6,6 +6,7 @@ dumps a slurm batch script that executes everything as an array job on a configurable SLURM cluster. """ + import itertools import os import typing as tp @@ -22,35 +23,36 @@ @BenchBuild.subcommand("slurm") class Slurm(cli.Application): - """ Generate a SLURM script. """ + """Generate a SLURM script.""" group_args: tp.List[str] = [] def __init__(self, executable): super().__init__(executable) - self._experiment: str = '' + self._experiment: str = "" self._description = None - @cli.switch(["-E", "--experiment"], - str, - mandatory=True, - help="Specify experiments to run") + @cli.switch( + ["-E", "--experiment"], str, mandatory=True, help="Specify experiments to run" + ) def experiment(self, cfg_experiment): """Specify experiments to run""" self._experiment = cfg_experiment - @cli.switch(["-D", "--description"], - str, - help="A description for this experiment run") + @cli.switch( + ["-D", "--description"], str, help="A description for this experiment run" + ) def experiment_tag(self, description): """A description for this experiment run""" self._description = description - @cli.switch(["-G", "--group"], - str, - list=True, - requires=["--experiment"], - help="Run a group of projects under the given experiments") + @cli.switch( + ["-G", "--group"], + str, + list=True, + requires=["--experiment"], + help="Run a group of projects under the given experiments", + ) def group(self, groups: tp.List[str]) -> None: # type: ignore """Run a group of projects under the given experiments""" self.group_args = groups @@ -71,15 +73,14 @@ def main(self, *args: str) -> int: CFG["experiment_description"] = self._description CFG["slurm"]["logs"] = os.path.abspath( - os.path.join(str(CFG['build_dir']), str(CFG['slurm']['logs'])) + os.path.join(str(CFG["build_dir"]), str(CFG["slurm"]["logs"])) ) CFG["build_dir"] = str(CFG["slurm"]["node_dir"]) if CFG["slurm"]["container_root"].value is not None: CFG["container"]["root"] = CFG["slurm"]["container_root"].value if CFG["slurm"]["container_runroot"].value is not None: - CFG["container"]["runroot"] = CFG["slurm"]["container_runroot" - ].value + CFG["container"]["runroot"] = CFG["slurm"]["container_runroot"].value if not wanted_experiments: print("Could not find any experiment. Exiting.") @@ -96,16 +97,14 @@ def main(self, *args: str) -> int: return 0 -def split_args( - args: tp.Iterable[str] -) -> tp.Tuple[tp.Iterable[str], tp.Iterable[str]]: +def split_args(args: tp.Iterable[str]) -> tp.Tuple[tp.Iterable[str], tp.Iterable[str]]: """ Split our CLI arguments at the '--' into two groups. The first group will be our projects. The second (optional) group will be a custom slurm command to use as subcommand to benchbuild. """ - subcommands: tp.Set[str] = {'run', 'container'} + subcommands: tp.Set[str] = {"run", "container"} prj_or_slurm = list(args) cli_projects = list( itertools.takewhile(lambda x: x not in subcommands, prj_or_slurm) @@ -114,14 +113,15 @@ def split_args( itertools.dropwhile(lambda x: x not in subcommands, prj_or_slurm) ) if not cli_slurm_command: - cli_slurm_command = ['run'] + cli_slurm_command = ["run"] return (cli_projects, cli_slurm_command) def cli_process( - cli_experiments: tp.Iterable[str], cli_projects: tp.Iterable[str], - cli_groups: tp.Iterable[str] + cli_experiments: tp.Iterable[str], + cli_projects: tp.Iterable[str], + cli_groups: tp.Iterable[str], ) -> tp.Tuple[ExperimentIndex, ProjectIndex]: """ Shared CLI processing of projects/experiment selection. @@ -133,13 +133,13 @@ def cli_process( if name in set(cli_experiments) } unknown_experiments = [ - name for name in cli_experiments + name + for name in cli_experiments if name not in set(discovered_experiments.keys()) ] if unknown_experiments: print( - 'Could not find ', str(unknown_experiments), - ' in the experiment registry.' + "Could not find ", str(unknown_experiments), " in the experiment registry." ) wanted_projects = project.populate(list(cli_projects), list(cli_groups)) diff --git a/benchbuild/command.py b/benchbuild/command.py index b20692f0f..c08d28f50 100644 --- a/benchbuild/command.py +++ b/benchbuild/command.py @@ -1,6 +1,5 @@ import logging import shutil -import sys import typing as tp from contextlib import contextmanager from pathlib import Path @@ -77,6 +76,7 @@ class ConstStrRenderer: """ Renders a constant string defined by the user. """ + value: str def __init__(self, value: str) -> None: @@ -108,8 +108,8 @@ def unrendered(self) -> str: def rendered( self, - project: tp.Optional['benchbuild.project.Project'] = None, - **kwargs: tp.Any + project: tp.Optional["benchbuild.project.Project"] = None, + **kwargs: tp.Any, ) -> Path: """ Render the project's build directory. @@ -141,6 +141,7 @@ class SourceRootRenderer: If the local name cannot be found inside the project's source definition, it will concatenate the project's builddir with the given name. """ + local: str def __init__(self, local_name: str) -> None: @@ -152,8 +153,8 @@ def unrendered(self) -> str: def rendered( self, - project: tp.Optional['benchbuild.project.Project'] = None, - **kwargs: tp.Any + project: tp.Optional["benchbuild.project.Project"] = None, + **kwargs: tp.Any, ) -> Path: """ Render the project's source directory. @@ -170,7 +171,7 @@ def rendered( LOG.error("Cannot render a source directory without a project.") return Path(self.unrendered) - if (src_path := project.source_of(self.local)): + if src_path := project.source_of(self.local): return Path(src_path) return Path(project.builddir) / self.local @@ -182,12 +183,11 @@ class ArgsToken: """ Base class for tokens that can be rendered into command-line arguments. """ + renderer: ArgsRenderStrategy @classmethod - def make_token( - cls, renderer: ArgsRenderStrategy - ) -> 'ArgsToken': + def make_token(cls, renderer: ArgsRenderStrategy) -> "ArgsToken": return ArgsToken(renderer) def __init__(self, renderer: ArgsRenderStrategy) -> None: @@ -215,15 +215,16 @@ class PathToken: A path token can use similar to pathlib's Path components. However, each token can render dynamically based on the given render context. """ + renderer: PathRenderStrategy - left: tp.Optional['PathToken'] - right: tp.Optional['PathToken'] + left: tp.Optional["PathToken"] + right: tp.Optional["PathToken"] @classmethod def make_token( cls, renderer: tp.Optional[PathRenderStrategy] = None - ) -> 'PathToken': + ) -> "PathToken": if renderer: return PathToken(renderer) return PathToken(RootRenderer()) @@ -231,10 +232,9 @@ def make_token( def __init__( self, renderer: PathRenderStrategy, - left: tp.Optional['PathToken'] = None, - right: tp.Optional['PathToken'] = None + left: tp.Optional["PathToken"] = None, + right: tp.Optional["PathToken"] = None, ) -> None: - self.renderer = renderer self.left = left self.right = right @@ -269,7 +269,7 @@ def render(self, **kwargs: tp.Any) -> Path: return p - def __truediv__(self, rhs: tp.Union[str, 'PathToken']) -> 'PathToken': + def __truediv__(self, rhs: tp.Union[str, "PathToken"]) -> "PathToken": if isinstance(rhs, str): render_str = ConstStrRenderer(rhs) rhs_token = PathToken(render_str) @@ -316,8 +316,7 @@ class SupportsUnwrap(Protocol): Unwrapping ensures access to a WorkloadSet from any wrapper object. """ - def unwrap(self, project: "benchbuild.project.Project") -> "WorkloadSet": - ... + def unwrap(self, project: "benchbuild.project.Project") -> "WorkloadSet": ... class WorkloadSet: @@ -396,12 +395,11 @@ class OnlyIn: iff, the Project's revision is included in the range specified by the RevisionRange. """ + rev_range: RevisionRange workload_set: WorkloadSet - def __init__( - self, rev_range: RevisionRange, workload_set: WorkloadSet - ) -> None: + def __init__(self, rev_range: RevisionRange, workload_set: WorkloadSet) -> None: self.rev_range = rev_range self.workload_set = workload_set @@ -501,7 +499,6 @@ def __init__( consumes: tp.Optional[tp.Sequence[ArtefactPath]] = None, **kwargs: str, ) -> None: - def _to_pathtoken(token: ArtefactPath) -> PathToken: if isinstance(token, str): return ProjectRoot() / token @@ -572,7 +569,7 @@ def __getitem__(self, args: tp.Tuple[tp.Any, ...]) -> "Command": output_param=self._output_param, creates=self._creates, consumes=self._consumes, - **self._env + **self._env, ) def __call__(self, *args: tp.Any, **kwargs: tp.Any) -> tp.Any: @@ -670,9 +667,7 @@ class ProjectCommand: project: "benchbuild.project.Project" command: Command - def __init__( - self, project: "benchbuild.project.Project", command: Command - ) -> None: + def __init__(self, project: "benchbuild.project.Project", command: Command) -> None: self.project = project self.command = command @@ -716,8 +711,7 @@ def _default_prune(project_command: ProjectCommand) -> None: def _default_backup( - project_command: ProjectCommand, - _suffix: str = ".benchbuild_backup" + project_command: ProjectCommand, _suffix: str = ".benchbuild_backup" ) -> tp.List[Path]: command = project_command.command project = project_command.project @@ -753,24 +747,21 @@ def _default_restore(backup_paths: tp.List[Path]) -> None: class PruneFn(Protocol): """Prune function protocol.""" - def __call__(self, project_command: ProjectCommand) -> None: - ... + def __call__(self, project_command: ProjectCommand) -> None: ... class BackupFn(Protocol): """Backup callback function protocol.""" - def __call__(self, - project_command: ProjectCommand, - _suffix: str = ...) -> tp.List[Path]: - ... + def __call__( + self, project_command: ProjectCommand, _suffix: str = ... + ) -> tp.List[Path]: ... class RestoreFn(Protocol): """Restore function protocol.""" - def __call__(self, backup_paths: tp.List[Path]) -> None: - ... + def __call__(self, backup_paths: tp.List[Path]) -> None: ... @contextmanager @@ -778,7 +769,7 @@ def cleanup( project_command: ProjectCommand, backup: BackupFn = _default_backup, restore: RestoreFn = _default_restore, - prune: PruneFn = _default_prune + prune: PruneFn = _default_prune, ): """ Encapsulate a command in automatic backup, restore and prune. @@ -800,7 +791,7 @@ def cleanup( def unwrap( - index: WorkloadIndex, project: 'benchbuild.project.Project' + index: WorkloadIndex, project: "benchbuild.project.Project" ) -> WorkloadIndex: """ Unwrap all keys in a workload index. diff --git a/benchbuild/container.py b/benchbuild/container.py index 6c3c06a0a..de1607234 100644 --- a/benchbuild/container.py +++ b/benchbuild/container.py @@ -5,6 +5,7 @@ You can define strategies and apply them on a given container base-image to have a fixed way of creating a user-space environment. """ + import logging import os import sys @@ -58,15 +59,26 @@ def setup_container(builddir, _container): uchrt = uchroot.no_args() with local.cwd("container-in"): - uchrt = uchrt["-E", "-A", "-u", "0", "-g", "0", "-C", "-r", "/", - "-w", - os.path.abspath("."), "--"] + uchrt = uchrt[ + "-E", + "-A", + "-u", + "0", + "-g", + "0", + "-C", + "-r", + "/", + "-w", + os.path.abspath("."), + "--", + ] # Check, if we need erlent support for this archive. - has_erlent = bash["-c", - "tar --list -f './{0}' | grep --silent '.erlent'". - format(container_in)] - has_erlent = (has_erlent & TF) + has_erlent = bash[ + "-c", "tar --list -f './{0}' | grep --silent '.erlent'".format(container_in) + ] + has_erlent = has_erlent & TF # Unpack input container to: container-in if not has_erlent: @@ -92,16 +104,15 @@ def run_in_container(command, container_dir): container_p = local.path(container_dir) with local.cwd(container_p): uchrt = uchroot.with_mounts() - uchrt = uchrt["-E", "-A", "-u", "0", "-g", "0", "-C", "-w", "/", "-r", - container_p] + uchrt = uchrt[ + "-E", "-A", "-u", "0", "-g", "0", "-C", "-w", "/", "-r", container_p + ] uchrt = uchrt["--"] - cmd_path = container_p / command[0].lstrip('/') + cmd_path = container_p / command[0].lstrip("/") if not cmd_path.exists(): - LOG.error( - "The command does not exist inside the container! %s", cmd_path - ) - raise ValueError('The command does not exist inside the container!') + LOG.error("The command does not exist inside the container! %s", cmd_path) + raise ValueError("The command does not exist inside the container!") cmd = uchrt[command] return cmd & FG @@ -209,8 +220,7 @@ def run(self, context): "command to leave the container." ) setup_bash_in_container( - context.builddir, context.in_container, context.out_container, - context.shell + context.builddir, context.in_container, context.out_container, context.shell ) @@ -225,6 +235,7 @@ def run(self, context): with local.cwd(context.in_container): from benchbuild.projects.gentoo import gentoo + gentoo.setup_networking() gentoo.configure_portage() @@ -234,16 +245,13 @@ def run(self, context): emerge_in_chroot = run.watch(emerge_in_chroot) has_pkg = uchroot.uchroot()["/usr/bin/qlist", "-I"] - sed_in_chroot("-i", '/CC=/d', "/etc/portage/make.conf") - sed_in_chroot("-i", '/CXX=/d', "/etc/portage/make.conf") + sed_in_chroot("-i", "/CC=/d", "/etc/portage/make.conf") + sed_in_chroot("-i", "/CXX=/d", "/etc/portage/make.conf") want_sync = bool(CFG["container"]["strategy"]["polyjit"]["sync"]) - want_upgrade = bool( - CFG["container"]["strategy"]["polyjit"]["upgrade"] - ) + want_upgrade = bool(CFG["container"]["strategy"]["polyjit"]["upgrade"]) - packages = \ - CFG["container"]["strategy"]["polyjit"]["packages"].value + packages = CFG["container"]["strategy"]["polyjit"]["packages"].value with local.env(MAKEOPTS="-j{0}".format(get_number_of_jobs(CFG))): if want_sync: LOG.debug("Synchronizing portage.") @@ -251,8 +259,7 @@ def run(self, context): if want_upgrade: LOG.debug("Upgrading world.") emerge_in_chroot( - "--autounmask-only=y", "-uUDN", "--with-bdeps=y", - "@world" + "--autounmask-only=y", "-uUDN", "--with-bdeps=y", "@world" ) for pkg in packages: if has_pkg[pkg["name"]] & TF: @@ -291,20 +298,18 @@ def output_file(self, _container): """Find and writes the output path of a chroot container.""" p = local.path(_container) if p.exists(): - if not ui.ask("Path '{0}' already exists." " Overwrite?".format(p)): + if not ui.ask("Path '{0}' already exists. Overwrite?".format(p)): sys.exit(0) CFG["container"]["output"] = str(p) - @cli.switch(["-s", "--shell"], - str, - help="The shell command we invoke inside the container.") + @cli.switch( + ["-s", "--shell"], str, help="The shell command we invoke inside the container." + ) def shell(self, custom_shell): """The command to run inside the container.""" CFG["container"]["shell"] = custom_shell - @cli.switch(["-t", "-tmp-dir"], - cli.ExistingDirectory, - help="Temporary directory") + @cli.switch(["-t", "-tmp-dir"], cli.ExistingDirectory, help="Temporary directory") def builddir(self, tmpdir): """Set the current builddir of the container.""" CFG["build_dir"] = tmpdir @@ -313,13 +318,13 @@ def builddir(self, tmpdir): ["m", "--mount"], cli.ExistingDirectory, list=True, - help="Mount the given directory under / inside the uchroot container" + help="Mount the given directory under / inside the uchroot container", ) def mounts(self, user_mount): """Save the current mount of the container into the settings.""" CFG["container"]["mounts"] = user_mount - verbosity = cli.CountOf('-v', help="Enable verbose output") + verbosity = cli.CountOf("-v", help="Enable verbose output") def main(self, *args): log.configure() @@ -372,10 +377,12 @@ class ContainerCreate(cli.Application): _strategy = BashStrategy() - @cli.switch(["-S", "--strategy"], - cli.Set("bash", "polyjit", case_sensitive=False), - help="Defines the strategy used to create a new container.", - mandatory=False) + @cli.switch( + ["-S", "--strategy"], + cli.Set("bash", "polyjit", case_sensitive=False), + help="Defines the strategy used to create a new container.", + mandatory=False, + ) def strategy(self, strategy): """Select strategy based on key. @@ -387,7 +394,7 @@ def strategy(self, strategy): """ self._strategy = { "bash": BashStrategy(), - "polyjit": SetupPolyJITGentooStrategy() + "polyjit": SetupPolyJITGentooStrategy(), }[strategy] def main(self, *args): @@ -410,7 +417,7 @@ def main(self, *args): in_container=in_container, out_container=out_container, mounts=mounts, - shell=shell + shell=shell, ) ) clean_directories(builddir, in_is_file, True) @@ -422,10 +429,7 @@ class ContainerBootstrap(cli.Application): def install_cmake_and_exit(self): """Tell the user to install cmake and aborts the current process.""" - print( - "You need to install cmake via your package manager manually." - " Exiting." - ) + print("You need to install cmake via your package manager manually. Exiting.") sys.exit(-1) def main(self, *args): diff --git a/benchbuild/driver.py b/benchbuild/driver.py index 643348e7c..f20072c85 100644 --- a/benchbuild/driver.py +++ b/benchbuild/driver.py @@ -14,13 +14,13 @@ def main(*args): """Main function.""" - BenchBuild.subcommand('bootstrap', BenchBuildBootstrap) - BenchBuild.subcommand('config', BBConfig) - BenchBuild.subcommand('container', cli.BenchBuildContainer) - BenchBuild.subcommand('experiment', BBExperiment) - BenchBuild.subcommand('log', BenchBuildLog) - BenchBuild.subcommand('project', BBProject) - BenchBuild.subcommand('run', BenchBuildRun) - BenchBuild.subcommand('slurm', Slurm) + BenchBuild.subcommand("bootstrap", BenchBuildBootstrap) + BenchBuild.subcommand("config", BBConfig) + BenchBuild.subcommand("container", cli.BenchBuildContainer) + BenchBuild.subcommand("experiment", BBExperiment) + BenchBuild.subcommand("log", BenchBuildLog) + BenchBuild.subcommand("project", BBProject) + BenchBuild.subcommand("run", BenchBuildRun) + BenchBuild.subcommand("slurm", Slurm) return BenchBuild.run(*args) diff --git a/benchbuild/engine.py b/benchbuild/engine.py index f2f3e2812..921fae744 100644 --- a/benchbuild/engine.py +++ b/benchbuild/engine.py @@ -1,6 +1,7 @@ """ Orchestrate experiment execution. """ + import typing as tp import attr diff --git a/benchbuild/environments/adapters/buildah.py b/benchbuild/environments/adapters/buildah.py index 4897118a7..26dae6406 100644 --- a/benchbuild/environments/adapters/buildah.py +++ b/benchbuild/environments/adapters/buildah.py @@ -23,12 +23,9 @@ def _spawn_add_layer( container: model.Container, layer: model.AddLayer ) -> Result[str, ProcessExecutionError]: with local.cwd(container.context): - sources = [ - os.path.join(container.context, source) for source in layer.sources - ] - buildah_add = bb_buildah('add', '--add-history') - buildah_add = buildah_add[container.container_id][sources][ - layer.destination] + sources = [os.path.join(container.context, source) for source in layer.sources] + buildah_add = bb_buildah("add", "--add-history") + buildah_add = buildah_add[container.container_id][sources][layer.destination] return run(buildah_add) @@ -44,12 +41,13 @@ def _spawn_run_layer( ) -> Result[str, ProcessExecutionError]: kws = [] for name, value in layer.kwargs: - kws.append(f'--{name}') - kws.append(f'{str(value)}') + kws.append(f"--{name}") + kws.append(f"{str(value)}") - buildah_run = bb_buildah('run', '--add-history') - buildah_run = buildah_run[kws][container.container_id, '--', - layer.command][layer.args] + buildah_run = bb_buildah("run", "--add-history") + buildah_run = buildah_run[kws][container.container_id, "--", layer.command][ + layer.args + ] return run(buildah_run) @@ -65,9 +63,9 @@ def _spawn_in_context( def _update_env_layer( container: model.Container, layer: model.UpdateEnv ) -> Result[str, ProcessExecutionError]: - buildah_config = bb_buildah('config') + buildah_config = bb_buildah("config") for key, value in layer.env: - buildah_config = buildah_config['-e', f'{key}={value}'] + buildah_config = buildah_config["-e", f"{key}={value}"] return run(buildah_config[container.container_id]) @@ -82,19 +80,19 @@ def fetch_image_env(image: model.Image) -> None: Args: image: The image to fetch the env for. """ - buildah_inspect = bb_buildah('inspect') + buildah_inspect = bb_buildah("inspect") results = json.loads(buildah_inspect(image.name)) oci_config = {} try: if results: - oci_config = results['OCIv1']['config'] + oci_config = results["OCIv1"]["config"] if oci_config: - env_list = oci_config.get('Env') + env_list = oci_config.get("Env") if env_list: for env_item in env_list: - k, v = env_item.split('=') + k, v = env_item.split("=") image.env[k] = v except KeyError: @@ -104,13 +102,14 @@ def fetch_image_env(image: model.Image) -> None: def _set_entry_point( container: model.Container, layer: model.EntryPoint ) -> Result[str, ProcessExecutionError]: - cmd = bb_buildah('config')['--entrypoint', json.dumps(list(layer.command))] + cmd = bb_buildah("config")["--entrypoint", json.dumps(list(layer.command))] return run(cmd[container.container_id]) -def _set_command(container: model.Container, - layer: model.SetCommand) -> Result[str, ProcessExecutionError]: - cmd = bb_buildah('config')['--cmd', json.dumps(list(layer.command))] +def _set_command( + container: model.Container, layer: model.SetCommand +) -> Result[str, ProcessExecutionError]: + cmd = bb_buildah("config")["--cmd", json.dumps(list(layer.command))] return run(cmd[container.container_id]) @@ -118,13 +117,13 @@ def _set_working_directory( container: model.Container, layer: model.WorkingDirectory ) -> Result[str, ProcessExecutionError]: return run( - bb_buildah('config')['--workingdir', layer.directory, - container.container_id] + bb_buildah("config")["--workingdir", layer.directory, container.container_id] ) -LayerHandlerT = tp.Callable[[model.Container, model.Layer], - Result[str, ProcessExecutionError]] +LayerHandlerT = tp.Callable[ + [model.Container, model.Layer], Result[str, ProcessExecutionError] +] _LAYER_HANDLERS = { model.AddLayer: _spawn_add_layer, @@ -134,24 +133,24 @@ def _set_working_directory( model.UpdateEnv: _update_env_layer, model.EntryPoint: _set_entry_point, model.WorkingDirectory: _set_working_directory, - model.SetCommand: _set_command + model.SetCommand: _set_command, } -def spawn_layer(container: model.Container, - layer: model.Layer) -> Result[str, ProcessExecutionError]: +def spawn_layer( + container: model.Container, layer: model.Layer +) -> Result[str, ProcessExecutionError]: if layer == container.image.from_: return Ok("") - handler: LayerHandlerT = tp.cast( - LayerHandlerT, _LAYER_HANDLERS[type(layer)] - ) + handler: LayerHandlerT = tp.cast(LayerHandlerT, _LAYER_HANDLERS[type(layer)]) res = handler(container, layer) if isinstance(res, Err): LOG.error( - "Could not spawn layer %s in container %s", str(layer), - str(container.container_id) + "Could not spawn layer %s in container %s", + str(layer), + str(container.container_id), ) LOG.error("Reason: %s", str(res.unwrap_err)) @@ -159,8 +158,7 @@ def spawn_layer(container: model.Container, def handle_layer_error( - err: Err[ProcessExecutionError], container: model.Container, - layer: model.Layer + err: Err[ProcessExecutionError], container: model.Container, layer: model.Layer ) -> None: """ Process a layer error gracefully. @@ -174,12 +172,10 @@ def handle_layer_error( layer: the layer we tried to build. """ image = container.image - image.events.append( - events.LayerCreationFailed(str(layer), image.name, str(err)) - ) + image.events.append(events.LayerCreationFailed(str(layer), image.name, str(err))) def can_keep(layer: model.Layer) -> bool: - keep = bool(CFG['container']['keep']) + keep = bool(CFG["container"]["keep"]) return keep and not isinstance(layer, model.FromLayer) if can_keep(layer): @@ -195,8 +191,9 @@ def can_keep(layer: model.Layer) -> bool: ) -def store_failed_build(tag: str, - container_id: str) -> Result[str, ProcessExecutionError]: +def store_failed_build( + tag: str, container_id: str +) -> Result[str, ProcessExecutionError]: """ Store a failed build container. @@ -207,16 +204,17 @@ def store_failed_build(tag: str, Returns: A tuple of the new image tag and the command error state """ - suffix = str(CFG['container']['keep_suffix']) - failed_tag = f'{tag}-{suffix}' + suffix = str(CFG["container"]["keep_suffix"]) + failed_tag = f"{tag}-{suffix}" - commit = bb_buildah('commit')[container_id, failed_tag.lower()] + commit = bb_buildah("commit")[container_id, failed_tag.lower()] res = run(commit) if isinstance(res, Err): LOG.error( - "Could not store failed build %s in tag %s", str(container_id), - str(failed_tag.lower()) + "Could not store failed build %s in tag %s", + str(container_id), + str(failed_tag.lower()), ) LOG.error("Reason: %s", str(res.unwrap_err)) return res @@ -236,14 +234,14 @@ def find_entrypoint(tag: str) -> str: A tuple of the configured entrypoint joined with whitespace and the command's error state. """ - inspect_str = bb_buildah('inspect')(tag) + inspect_str = bb_buildah("inspect")(tag) json_output = json.loads(inspect_str) - config = json_output['OCIv1']['config'] + config = json_output["OCIv1"]["config"] if not config: raise ValueError("Could not find the container image config") - return str(' '.join(config.get('Entrypoint', []))) + return str(" ".join(config.get("Entrypoint", []))) class ImageRegistry(abc.ABC): @@ -304,7 +302,7 @@ def find(self, tag: str) -> model.MaybeImage: if tag in self.images: return self.images[tag] - if (image := self._find(tag)): + if image := self._find(tag): self.images[tag] = image return image @@ -388,29 +386,24 @@ def _env(self, tag: str, name: str) -> tp.Optional[str]: raise NotImplementedError def temporary_mount(self, tag: str, source: str, target: str) -> None: - if (image := self.find(tag)): + if image := self.find(tag): image.mounts.append(model.Mount(source, target)) class BuildahImageRegistry(ImageRegistry): - def _create(self, tag: str, from_: model.FromLayer) -> model.MaybeContainer: image = model.Image(tag, from_, []) # container_id, err = run(bb_buildah('from')[from_.base]) - res = run(bb_buildah('from')[from_.base.lower()]) + res = run(bb_buildah("from")[from_.base.lower()]) if isinstance(res, Err): raise ImageCreateError(tag, message=str(res.unwrap_err())) if isinstance(res, Ok): container_id = res.unwrap() - context = local.path( - mktemp('-dt', '-p', str(CFG['build_dir'])).strip() - ) - container = model.Container( - container_id, image, context, image.name - ) + context = local.path(mktemp("-dt", "-p", str(CFG["build_dir"])).strip()) + container = model.Container(container_id, image, context, image.name) return container @@ -432,25 +425,23 @@ def _add(self, image: model.Image) -> bool: handle_layer_error(res, container, layer) else: image.events.append( - events.LayerCreated( - str(layer), container.container_id, image.name - ) + events.LayerCreated(str(layer), container.container_id, image.name) ) image.present(layer) return res.is_ok() def _find(self, tag: str) -> model.MaybeImage: - res = run(bb_buildah('images')['--json', tag.lower()], retcode=[0, 125]) + res = run(bb_buildah("images")["--json", tag.lower()], retcode=[0, 125]) if isinstance(res, Err): LOG.debug("Could not find the image %s", tag) return None - if (results := res.unwrap()): + if results := res.unwrap(): json_results = json.loads(results) if json_results: - #json_image = json_results.pop(0) + # json_image = json_results.pop(0) image = model.Image(tag, model.FromLayer(tag), []) fetch_image_env(image) return image @@ -458,14 +449,15 @@ def _find(self, tag: str) -> model.MaybeImage: return None def _env(self, tag: str, name: str) -> tp.Optional[str]: - if (image := self.find(tag)): + if image := self.find(tag): return image.env.get(name) return None def _remove(self, image: model.Image) -> None: - res = run(bb_buildah('rmi')[image.name.lower()], retcode=[0]) + res = run(bb_buildah("rmi")[image.name.lower()], retcode=[0]) if isinstance(res, Err): LOG.error( - "Could not delete image %s. Reason: %s", image.name.lower(), - str(res.unwrap_err()) + "Could not delete image %s. Reason: %s", + image.name.lower(), + str(res.unwrap_err()), ) diff --git a/benchbuild/environments/adapters/common.py b/benchbuild/environments/adapters/common.py index 7a50cd813..3a66c36d5 100644 --- a/benchbuild/environments/adapters/common.py +++ b/benchbuild/environments/adapters/common.py @@ -11,8 +11,8 @@ LOG = logging.getLogger(__name__) __MSG_SHORTER_PATH_REQUIRED = ( - 'needs to be shorter than 50 chars, if you ' - 'experience errors with the following command.' + "needs to be shorter than 50 chars, if you " + "experience errors with the following command." ) @@ -21,8 +21,8 @@ def buildah_version() -> tp.Tuple[int, int, int]: Returns the local buildah version. """ raw_version_string = buildah("version") - version_str = raw_version_string.split('\n')[0].split(":")[1].strip() - major, minor, patch = version_str.split('.') + version_str = raw_version_string.split("\n")[0].split(":")[1].strip() + major, minor, patch = version_str.split(".") return (int(major), int(minor), int(patch)) @@ -42,30 +42,26 @@ def container_cmd(base: BaseCommand) -> BaseCommand: def path_longer_than_50_chars(path: str) -> bool: if len(path) > 50: - LOG.debug('A path-length > 50 is not supported by libpod.') + LOG.debug("A path-length > 50 is not supported by libpod.") return True return False def wrapped_cmd(*args: str) -> BaseCommand: - root = CFG['container']['root'] - runroot = CFG['container']['runroot'] - storage_driver = CFG['container']['storage_driver'].value - storage_opts = CFG['container']['storage_opts'].value + root = CFG["container"]["root"] + runroot = CFG["container"]["runroot"] + storage_driver = CFG["container"]["storage_driver"].value + storage_opts = CFG["container"]["storage_opts"].value if path_longer_than_50_chars(str(root)): - LOG.error( - '%s - %s', root.__to_env_var__(), __MSG_SHORTER_PATH_REQUIRED - ) + LOG.error("%s - %s", root.__to_env_var__(), __MSG_SHORTER_PATH_REQUIRED) if path_longer_than_50_chars(str(runroot)): - LOG.error( - '%s - %s', runroot.__to_env_var__(), __MSG_SHORTER_PATH_REQUIRED - ) + LOG.error("%s - %s", runroot.__to_env_var__(), __MSG_SHORTER_PATH_REQUIRED) - opts = ['--root', root, '--runroot', runroot] + opts = ["--root", root, "--runroot", runroot] if storage_driver: - opts.append('--storage-driver') + opts.append("--storage-driver") opts.append(storage_driver) if storage_opts is None: @@ -73,7 +69,7 @@ def wrapped_cmd(*args: str) -> BaseCommand: opts.append("--storage-opt=''") for opt in storage_opts: - opts.append('--storage-opt') + opts.append("--storage-opt") opts.append(opt) cmd = base[opts] @@ -87,7 +83,6 @@ def wrapped_cmd(*args: str) -> BaseCommand: class ImageCreateError(Exception): - def __init__(self, name: str, message: str): super().__init__() @@ -95,24 +90,23 @@ def __init__(self, name: str, message: str): self.message = message -def run(cmd: BaseCommand, - **kwargs: tp.Any) -> Result[str, ProcessExecutionError]: +def run(cmd: BaseCommand, **kwargs: tp.Any) -> Result[str, ProcessExecutionError]: try: return Ok(str(cmd(**kwargs)).strip()) except ProcessExecutionError as err: return Err(err) -def run_tee(cmd: BaseCommand, - **kwargs: tp.Any) -> Result[tp.Any, ProcessExecutionError]: +def run_tee( + cmd: BaseCommand, **kwargs: tp.Any +) -> Result[tp.Any, ProcessExecutionError]: try: return Ok(cmd.run_tee(**kwargs)) except ProcessExecutionError as err: return Err(err) -def run_fg(cmd: BaseCommand, - **kwargs: tp.Any) -> Result[tp.Any, ProcessExecutionError]: +def run_fg(cmd: BaseCommand, **kwargs: tp.Any) -> Result[tp.Any, ProcessExecutionError]: try: return Ok(cmd.run_fg(**kwargs)) except ProcessExecutionError as err: diff --git a/benchbuild/environments/adapters/podman.py b/benchbuild/environments/adapters/podman.py index b99066798..3abf4d43b 100644 --- a/benchbuild/environments/adapters/podman.py +++ b/benchbuild/environments/adapters/podman.py @@ -20,7 +20,6 @@ class ContainerCreateError(Exception): - def __init__(self, name: str, message: str): super().__init__() @@ -33,7 +32,7 @@ def save(image_id: str, out_path: str) -> Result[bool, ProcessExecutionError]: LOG.warning("No image exported. Image exists.") return Ok(True) - res = run(bb_podman('save')['-o', out_path, image_id]) + res = run(bb_podman("save")["-o", out_path, image_id]) if isinstance(res, Err): LOG.error("Could not save the image %s to %s.", image_id, out_path) @@ -43,7 +42,7 @@ def save(image_id: str, out_path: str) -> Result[bool, ProcessExecutionError]: def load(load_path: str) -> Result[bool, ProcessExecutionError]: - res = run(bb_podman('load')['-i', load_path]) + res = run(bb_podman("load")["-i", load_path]) if isinstance(res, Err): LOG.error("Could not load the image from %s", load_path) LOG.error("Reason: %s", str(res.unwrap_err())) @@ -52,8 +51,8 @@ def load(load_path: str) -> Result[bool, ProcessExecutionError]: def run_container(name: str) -> Result[str, ProcessExecutionError]: - container_start = bb_podman('container', 'start') - res = run_tee(container_start['-ai', name]) + container_start = bb_podman("container", "start") + res = run_tee(container_start["-ai", name]) if isinstance(res, Err): LOG.error("Could not run the container %s", name) LOG.error("Reason: %s", str(res.unwrap_err())) @@ -62,7 +61,7 @@ def run_container(name: str) -> Result[str, ProcessExecutionError]: def remove_container(container_id: str) -> Result[str, ProcessExecutionError]: - podman_rm = bb_podman('rm') + podman_rm = bb_podman("rm") res = run(podman_rm[container_id]) if isinstance(res, Err): LOG.error("Could not remove the container %s", container_id) @@ -121,7 +120,7 @@ def _create( def start(self, container: model.Container) -> None: if container.name not in self.containers: - raise ValueError('container must be created first') + raise ValueError("container must be created first") self._start(container) @@ -146,15 +145,13 @@ def _start(self, container: model.Container) -> None: class PodmanRegistry(ContainerRegistry): - def _create( self, image: model.Image, name: str, args: tp.Sequence[str] ) -> model.Container: mounts = [ - f'type=bind,src={mnt.source},target={mnt.target}' - for mnt in image.mounts + f"type=bind,src={mnt.source},target={mnt.target}" for mnt in image.mounts ] - interactive = bool(CFG['container']['interactive']) + interactive = bool(CFG["container"]["interactive"]) create_cmd = bb_podman('create', '--replace') @@ -164,16 +161,17 @@ def _create( 'PERFMON'] if interactive: - create_cmd = create_cmd['-it', '--entrypoint', '/bin/sh'] + create_cmd = create_cmd["-it", "--entrypoint", "/bin/sh"] if mounts: for mount in mounts: - create_cmd = create_cmd['--mount', mount] + create_cmd = create_cmd["--mount", mount] - if (cfg_mounts := list(CFG['container']['mounts'].value)): + if cfg_mounts := list(CFG["container"]["mounts"].value): for source, target in cfg_mounts: create_cmd = create_cmd[ - '--mount', f'type=bind,src={source},target={target}'] + "--mount", f"type=bind,src={source},target={target}" + ] if interactive: # pylint: disable=import-outside-toplevel @@ -185,19 +183,17 @@ def _create( _DEBUG_CONTAINER_SESSION_INTRO.format( container_name=name, entrypoint=entrypoint, - arguments=' '.join(args) + arguments=" ".join(args), ) ) ) - res = run(create_cmd['--name', name, image.name]) + res = run(create_cmd["--name", name, image.name]) else: - res = run(create_cmd['--name', name, image.name][args]) + res = run(create_cmd["--name", name, image.name][args]) if isinstance(res, Err): - LOG.error( - "Could not create the container %s from %s", name, image.name - ) + LOG.error("Could not create the container %s from %s", name, image.name) LOG.error("Reason: %s", str(res.unwrap_err())) raise ContainerCreateError(name, " ".join(res.unwrap_err().argv)) @@ -206,19 +202,19 @@ def _create( # The first ID is the old (replaced) container. # The second ID is the new container. container_id = res.unwrap() - new_container_id = container_id.split('\n')[-1] + new_container_id = container_id.split("\n")[-1] - return model.Container(new_container_id, image, '', name) + return model.Container(new_container_id, image, "", name) def _start(self, container: model.Container) -> None: container_id = container.container_id - container_start = bb_podman('container', 'start') - interactive = bool(CFG['container']['interactive']) + container_start = bb_podman("container", "start") + interactive = bool(CFG["container"]["interactive"]) if interactive: - res = run_fg(container_start['-ai', container_id]) + res = run_fg(container_start["-ai", container_id]) else: - res = run_tee(container_start['-ai', container_id]) + res = run_tee(container_start["-ai", container_id]) if isinstance(res, Err): LOG.error("Could not start the container %s", container_id) @@ -226,8 +222,7 @@ def _start(self, container: model.Container) -> None: container.events.append( events.ContainerStartFailed( - container.name, container_id, - " ".join(res.unwrap_err().argv) + container.name, container_id, " ".join(res.unwrap_err().argv) ) ) else: diff --git a/benchbuild/environments/bootstrap.py b/benchbuild/environments/bootstrap.py index 340a8520f..041f351ff 100644 --- a/benchbuild/environments/bootstrap.py +++ b/benchbuild/environments/bootstrap.py @@ -18,15 +18,11 @@ def bus() -> Messagebus: containers_uow = uow.PodmanContainerUOW() evt_handlers = { - events.LayerCreated: [ - handlers.bootstrap(ui.print_layer_created, images_uow) - ], - events.ImageCreated: [ - handlers.bootstrap(ui.print_image_created, images_uow) - ], + events.LayerCreated: [handlers.bootstrap(ui.print_layer_created, images_uow)], + events.ImageCreated: [handlers.bootstrap(ui.print_image_created, images_uow)], events.ContainerCreated: [ handlers.bootstrap(ui.print_container_created, containers_uow) - ] + ], } evt_handlers[events.ContainerStartFailed] = [ @@ -39,29 +35,35 @@ def bus() -> Messagebus: evt_handlers[events.LayerCreationFailed] = [ handlers.bootstrap(ui.print_layer_creation_failed, images_uow) ] - evt_handlers[events.LayerCreated - ] = [handlers.bootstrap(ui.print_layer_created, images_uow)] + evt_handlers[events.LayerCreated] = [ + handlers.bootstrap(ui.print_layer_created, images_uow) + ] - evt_handlers[events.DebugImageKept - ] = [handlers.bootstrap(debug.debug_image_kept, images_uow)] + evt_handlers[events.DebugImageKept] = [ + handlers.bootstrap(debug.debug_image_kept, images_uow) + ] evt_handlers[events.ImageCreationFailed] = [ handlers.bootstrap(ui.print_image_creation_failed, images_uow) ] cmd_handlers = { - commands.CreateImage: - handlers.bootstrap(handlers.create_image, images_uow), - commands.CreateBenchbuildBase: - handlers.bootstrap(handlers.create_benchbuild_base, images_uow), - commands.RunProjectContainer: - handlers.bootstrap(handlers.run_project_container, containers_uow), - commands.ExportImage: - handlers.bootstrap(handlers.export_image_handler, images_uow), - commands.ImportImage: - handlers.bootstrap(handlers.import_image_handler, images_uow), - commands.DeleteImage: - handlers.bootstrap(handlers.delete_image_handler, images_uow), + commands.CreateImage: handlers.bootstrap(handlers.create_image, images_uow), + commands.CreateBenchbuildBase: handlers.bootstrap( + handlers.create_benchbuild_base, images_uow + ), + commands.RunProjectContainer: handlers.bootstrap( + handlers.run_project_container, containers_uow + ), + commands.ExportImage: handlers.bootstrap( + handlers.export_image_handler, images_uow + ), + commands.ImportImage: handlers.bootstrap( + handlers.import_image_handler, images_uow + ), + commands.DeleteImage: handlers.bootstrap( + handlers.delete_image_handler, images_uow + ), } return partial(messagebus.handle, cmd_handlers, evt_handlers) diff --git a/benchbuild/environments/domain/commands.py b/benchbuild/environments/domain/commands.py index 3f6cb3a9c..53d6c87e2 100644 --- a/benchbuild/environments/domain/commands.py +++ b/benchbuild/environments/domain/commands.py @@ -12,11 +12,11 @@ def fs_compliant_name(name: str) -> str: Convert a name to a valid filename. """ value = str(name) - value = unicodedata.normalize('NFKD', - value).encode('ascii', - 'ignore').decode('ascii') - value = re.sub(r'[^\w\s-]', '', value.lower()) - return re.sub(r'[-\s]+', '-', value).strip('-_') + value = ( + unicodedata.normalize("NFKD", value).encode("ascii", "ignore").decode("ascii") + ) + value = re.sub(r"[^\w\s-]", "", value.lower()) + return re.sub(r"[-\s]+", "-", value).strip("-_") def oci_compliant_name(name: str) -> str: diff --git a/benchbuild/environments/domain/declarative.py b/benchbuild/environments/domain/declarative.py index 5c2ea1582..0a6efe1d4 100644 --- a/benchbuild/environments/domain/declarative.py +++ b/benchbuild/environments/domain/declarative.py @@ -40,9 +40,9 @@ def base(self) -> str: layers = [l for l in self if isinstance(l, model.FromLayer)] if layers: return layers.pop(0).base - return '' + return "" - def env(self, **kwargs: str) -> 'ContainerImage': + def env(self, **kwargs: str) -> "ContainerImage": """ Create an environment layer in this image. @@ -55,7 +55,7 @@ def env(self, **kwargs: str) -> 'ContainerImage': self.append(model.UpdateEnv(kwargs)) return self - def from_(self, base_image: str) -> 'ContainerImage': + def from_(self, base_image: str) -> "ContainerImage": """ Specify a new base layer for this image. @@ -67,7 +67,7 @@ def from_(self, base_image: str) -> 'ContainerImage': self.append(model.FromLayer(base_image)) return self - def context(self, func: tp.Callable[[], None]) -> 'ContainerImage': + def context(self, func: tp.Callable[[], None]) -> "ContainerImage": """ Interact with the build context of the container. @@ -84,7 +84,7 @@ def context(self, func: tp.Callable[[], None]) -> 'ContainerImage': self.append(model.ContextLayer(func)) return self - def add(self, sources: tp.Iterable[str], tgt: str) -> 'ContainerImage': + def add(self, sources: tp.Iterable[str], tgt: str) -> "ContainerImage": """ Add given files from the source to the container image. @@ -97,7 +97,7 @@ def add(self, sources: tp.Iterable[str], tgt: str) -> 'ContainerImage': self.append(model.AddLayer(tuple(sources), tgt)) return self - def copy_(self, sources: tp.Iterable[str], tgt: str) -> 'ContainerImage': + def copy_(self, sources: tp.Iterable[str], tgt: str) -> "ContainerImage": """ Copy given files from the source to the container image. @@ -110,7 +110,7 @@ def copy_(self, sources: tp.Iterable[str], tgt: str) -> 'ContainerImage': self.append(model.CopyLayer(tuple(sources), tgt)) return self - def run(self, command: str, *args: str, **kwargs: str) -> 'ContainerImage': + def run(self, command: str, *args: str, **kwargs: str) -> "ContainerImage": """ Run a command in the container image. @@ -125,7 +125,7 @@ def run(self, command: str, *args: str, **kwargs: str) -> 'ContainerImage': self.append(model.RunLayer(command, args, kwargs)) return self - def workingdir(self, directory: str) -> 'ContainerImage': + def workingdir(self, directory: str) -> "ContainerImage": """ Change the working directory in the container. @@ -140,7 +140,7 @@ def workingdir(self, directory: str) -> 'ContainerImage': self.append(model.WorkingDirectory(directory)) return self - def entrypoint(self, *args: str) -> 'ContainerImage': + def entrypoint(self, *args: str) -> "ContainerImage": """ Set the entrypoint of the container. @@ -154,7 +154,7 @@ def entrypoint(self, *args: str) -> 'ContainerImage': self.append(model.EntryPoint(args)) return self - def command(self, *args: str) -> 'ContainerImage': + def command(self, *args: str) -> "ContainerImage": """ Set the default command the container runs. @@ -168,12 +168,25 @@ def command(self, *args: str) -> 'ContainerImage': DEFAULT_BASES: tp.Dict[str, ContainerImage] = { - 'benchbuild:alpine': ContainerImage() \ - .from_("docker.io/alpine:3.17") \ - .run('apk', 'update') \ - .run('apk', 'add', 'python3', 'python3-dev', 'postgresql-dev', - 'linux-headers', 'musl-dev', 'git', 'gcc', 'g++', - 'sqlite-libs', 'libgit2-dev', 'libffi-dev', 'py3-pip') + "benchbuild:alpine": ContainerImage() + .from_("docker.io/alpine:3.17") + .run("apk", "update") + .run( + "apk", + "add", + "python3", + "python3-dev", + "postgresql-dev", + "linux-headers", + "musl-dev", + "git", + "gcc", + "g++", + "sqlite-libs", + "libgit2-dev", + "libffi-dev", + "py3-pip", + ) } @@ -194,34 +207,29 @@ def add_benchbuild_layers(layers: ContainerImage) -> ContainerImage: Returns: the modified container image. """ - crun = str(CFG['container']['runtime']) - src_dir = str(CFG['container']['source']) - tgt_dir = '/benchbuild' + crun = str(CFG["container"]["runtime"]) + src_dir = str(CFG["container"]["source"]) + tgt_dir = "/benchbuild" def from_source(image: ContainerImage) -> None: - LOG.debug('BenchBuild will be installed from source.') + LOG.debug("BenchBuild will be installed from source.") - mount = f'type=bind,src={src_dir},target={tgt_dir}' + mount = f"type=bind,src={src_dir},target={tgt_dir}" if buildah_version() >= (1, 24, 0): - mount += ',rw' + mount += ",rw" # The image requires git, pip and a working python3.7 or better. - image.run('mkdir', f'{tgt_dir}', runtime=crun) - image.run('pip3', 'install', 'setuptools', runtime=crun) + image.run("mkdir", f"{tgt_dir}", runtime=crun) + image.run("pip3", "install", "setuptools", runtime=crun) image.run( - 'pip3', - 'install', - '--ignore-installed', - tgt_dir, - mount=mount, - runtime=crun + "pip3", "install", "--ignore-installed", tgt_dir, mount=mount, runtime=crun ) def from_pip(image: ContainerImage) -> None: - LOG.debug('installing benchbuild from pip release.') - image.run('pip3', 'install', 'benchbuild', runtime=crun) + LOG.debug("installing benchbuild from pip release.") + image.run("pip3", "install", "benchbuild", runtime=crun) - if bool(CFG['container']['from_source']): + if bool(CFG["container"]["from_source"]): from_source(layers) else: from_pip(layers) diff --git a/benchbuild/environments/domain/model.py b/benchbuild/environments/domain/model.py index 1c6001ad1..12b3f0425 100644 --- a/benchbuild/environments/domain/model.py +++ b/benchbuild/environments/domain/model.py @@ -53,7 +53,7 @@ class FromLayer(Layer): base: str = attr.ib() def __str__(self) -> str: - return f'FROM {self.base}' + return f"FROM {self.base}" @attr.s(frozen=True) @@ -62,8 +62,8 @@ class AddLayer(Layer): destination: str = attr.ib() def __str__(self) -> str: - sources = ' '.join(self.sources) - return f'ADD {sources} self.destination' + sources = " ".join(self.sources) + return f"ADD {sources} self.destination" @attr.s(frozen=True) @@ -72,13 +72,11 @@ class CopyLayer(Layer): destination: str = attr.ib() def __str__(self) -> str: - sources = ' '.join(self.sources) - return f'COPY {sources} {self.destination}' + sources = " ".join(self.sources) + return f"COPY {sources} {self.destination}" -def immutable_kwargs( - kwargs: tp.Dict[str, str] -) -> tp.Tuple[tp.Tuple[str, str], ...]: +def immutable_kwargs(kwargs: tp.Dict[str, str]) -> tp.Tuple[tp.Tuple[str, str], ...]: """ Convert str-typed kwargs into a hashable tuple. """ @@ -89,12 +87,11 @@ def immutable_kwargs( class RunLayer(Layer): command: str = attr.ib() args: tp.Tuple[str, ...] = attr.ib() - kwargs: tp.Tuple[tp.Tuple[str, str], - ...] = attr.ib(converter=immutable_kwargs) + kwargs: tp.Tuple[tp.Tuple[str, str], ...] = attr.ib(converter=immutable_kwargs) def __str__(self) -> str: - args = ' '.join(self.args) - return f'RUN {self.command} {args}' + args = " ".join(self.args) + return f"RUN {self.command} {args}" @attr.s(frozen=True) @@ -102,7 +99,7 @@ class ContextLayer(Layer): func: tp.Callable[[], None] = attr.ib() def __str__(self) -> str: - return 'CONTEXT custom build context modification' + return "CONTEXT custom build context modification" @attr.s(frozen=True) @@ -110,7 +107,7 @@ class UpdateEnv(Layer): env: tp.Tuple[tp.Tuple[str, str], ...] = attr.ib(converter=immutable_kwargs) def __str__(self) -> str: - return f'ENV {len(self.env)} entries' + return f"ENV {len(self.env)} entries" @attr.s(frozen=True) @@ -118,7 +115,7 @@ class WorkingDirectory(Layer): directory: str = attr.ib() def __str__(self) -> str: - return f'CWD {self.directory}' + return f"CWD {self.directory}" @attr.s(frozen=True) @@ -126,8 +123,8 @@ class EntryPoint(Layer): command: tp.Tuple[str, ...] = attr.ib() def __str__(self) -> str: - command = ' '.join(self.command) - return f'ENTRYPOINT {command}' + command = " ".join(self.command) + return f"ENTRYPOINT {command}" @attr.s(frozen=True) @@ -135,8 +132,8 @@ class SetCommand(Layer): command: tp.Tuple[str, ...] = attr.ib() def __str__(self) -> str: - command = ' '.join(self.command) - return f'CMD {command}' + command = " ".join(self.command) + return f"CMD {command}" @attr.s(frozen=True) @@ -145,7 +142,7 @@ class Mount: target: str = attr.ib() def __str__(self) -> str: - return f'{self.source}:{self.target}' + return f"{self.source}:{self.target}" @attr.s(eq=False) @@ -171,13 +168,12 @@ def present(self, layer: Layer) -> None: self.layer_index[layer] = LayerState.PRESENT def is_present(self, layer: Layer) -> bool: - return layer in self.layer_index and self.layer_index[ - layer] == LayerState.PRESENT + return ( + layer in self.layer_index and self.layer_index[layer] == LayerState.PRESENT + ) def is_complete(self) -> bool: - return all([ - state == LayerState.PRESENT for state in self.layer_index.values() - ]) + return all([state == LayerState.PRESENT for state in self.layer_index.values()]) def prepend(self, layer: Layer) -> None: old_layers = self.layers diff --git a/benchbuild/environments/entrypoints/cli.py b/benchbuild/environments/entrypoints/cli.py index 9d3a106d7..35dcf3cba 100644 --- a/benchbuild/environments/entrypoints/cli.py +++ b/benchbuild/environments/entrypoints/cli.py @@ -34,43 +34,50 @@ class BenchBuildContainerRun(cli.Application): experiment_args: tp.List[str] = [] group_args: tp.List[str] = [] - @cli.switch(["-E", "--experiment"], - str, - list=True, - help="Specify experiments to run") + @cli.switch( + ["-E", "--experiment"], str, list=True, help="Specify experiments to run" + ) def set_experiments(self, names: tp.List[str]) -> None: self.experiment_args = names - @cli.switch(["-G", "--group"], - str, - list=True, - requires=["--experiment"], - help="Run a group of projects under the given experiments") + @cli.switch( + ["-G", "--group"], + str, + list=True, + requires=["--experiment"], + help="Run a group of projects under the given experiments", + ) def set_group(self, groups: tp.List[str]) -> None: self.group_args = groups - image_export = cli.Flag(['export'], - default=False, - help="Export container images to EXPORT_DIR") + image_export = cli.Flag( + ["export"], default=False, help="Export container images to EXPORT_DIR" + ) - image_import = cli.Flag(['import'], - default=False, - help="Import container images from EXPORT_DIR") + image_import = cli.Flag( + ["import"], default=False, help="Import container images from EXPORT_DIR" + ) - replace = cli.Flag(['replace'], - default=False, - requires=['experiment'], - help='Replace existing container images.') + replace = cli.Flag( + ["replace"], + default=False, + requires=["experiment"], + help="Replace existing container images.", + ) - debug = cli.Flag(['debug'], - default=False, - requires=['experiment'], - help='Debug failed image builds interactively.') + debug = cli.Flag( + ["debug"], + default=False, + requires=["experiment"], + help="Debug failed image builds interactively.", + ) - interactive = cli.Flag(['interactive'], - default=False, - requires=['experiment', 'debug'], - help='Run a container interactively.') + interactive = cli.Flag( + ["interactive"], + default=False, + requires=["experiment", "debug"], + help="Run a container interactively.", + ) no_mount_build_dir = cli.Flag( ['no-mount-build-dir'], @@ -86,13 +93,13 @@ def main(self, *projects: str) -> int: plugins.discover() if self.replace: - CFG['container']['replace'] = self.replace + CFG["container"]["replace"] = self.replace if self.debug: - CFG['container']['keep'] = self.debug + CFG["container"]["keep"] = self.debug if self.interactive: - CFG['container']['interactive'] = self.interactive + CFG["container"]["interactive"] = self.interactive cli_experiments = self.experiment_args cli_groups = self.group_args @@ -135,7 +142,7 @@ def main(self, *projects: str) -> int: def run_tasks() -> None: for name, task in tasks.items(): - print(f'Working on: {name}') + print(f"Working on: {name}") task() if not self.debug: @@ -152,40 +159,44 @@ class BenchBuildContainerBase(cli.Application): """ Prepare all base images for the selected projects and experiments. """ + experiment_args: tp.List[str] = [] group_args: tp.List[str] = [] - @cli.switch(["-E", "--experiment"], - str, - list=True, - help="Specify experiments to run") + @cli.switch( + ["-E", "--experiment"], str, list=True, help="Specify experiments to run" + ) def set_experiments(self, names: tp.List[str]) -> None: self.experiment_args = names - @cli.switch(["-G", "--group"], - str, - list=True, - requires=["--experiment"], - help="Run a group of projects under the given experiments") + @cli.switch( + ["-G", "--group"], + str, + list=True, + requires=["--experiment"], + help="Run a group of projects under the given experiments", + ) def set_group(self, groups: tp.List[str]) -> None: self.group_args = groups - image_export = cli.Flag(['export'], - default=False, - help="Export container images to EXPORT_DIR") - image_import = cli.Flag(['import'], - default=False, - help="Import container images from EXPORT_DIR") - debug = cli.Flag(['debug'], - default=False, - requires=['experiment'], - help='Debug failed image builds interactively.') + image_export = cli.Flag( + ["export"], default=False, help="Export container images to EXPORT_DIR" + ) + image_import = cli.Flag( + ["import"], default=False, help="Import container images from EXPORT_DIR" + ) + debug = cli.Flag( + ["debug"], + default=False, + requires=["experiment"], + help="Debug failed image builds interactively.", + ) def main(self, *projects: str) -> int: plugins.discover() if self.debug: - CFG['container']['keep'] = self.debug + CFG["container"]["keep"] = self.debug cli_experiments = self.experiment_args cli_groups = self.group_args @@ -203,18 +214,20 @@ def main(self, *projects: str) -> int: return -2 tasks = { - "Base images": - partial( - create_base_images, wanted_experiments, wanted_projects, - self.image_export, self.image_import - ), + "Base images": partial( + create_base_images, + wanted_experiments, + wanted_projects, + self.image_export, + self.image_import, + ), } console = rich.get_console() def run_tasks() -> None: for name, task in tasks.items(): - print(f'Working on: {name}') + print(f"Working on: {name}") task() if not self.debug: @@ -231,27 +244,29 @@ class BenchBuildContainerRemoveImages(cli.Application): """ Prepare all base images for the selected projects and experiments. """ + experiment_args: tp.List[str] = [] group_args: tp.List[str] = [] - @cli.switch(["-E", "--experiment"], - str, - list=True, - help="Specify experiments to run") + @cli.switch( + ["-E", "--experiment"], str, list=True, help="Specify experiments to run" + ) def set_experiments(self, names: tp.List[str]) -> None: self.experiment_args = names - @cli.switch(["-G", "--group"], - str, - list=True, - requires=["--experiment"], - help="Run a group of projects under the given experiments") + @cli.switch( + ["-G", "--group"], + str, + list=True, + requires=["--experiment"], + help="Run a group of projects under the given experiments", + ) def set_group(self, groups: tp.List[str]) -> None: self.group_args = groups - delete_project_images = cli.Flag(['with-projects'], - default=False, - help="Delete project images too") + delete_project_images = cli.Flag( + ["with-projects"], default=False, help="Delete project images too" + ) def main(self, *projects: str) -> int: plugins.discover() @@ -272,18 +287,19 @@ def main(self, *projects: str) -> int: return -1 tasks = { - "Remove selected images": - partial( - remove_images, wanted_experiments, wanted_projects, - self.delete_project_images - ) + "Remove selected images": partial( + remove_images, + wanted_experiments, + wanted_projects, + self.delete_project_images, + ) } console = rich.get_console() def run_tasks() -> None: for name, task in tasks.items(): - print(f'Working on: {name}') + print(f"Working on: {name}") task() with console.status("[bold green]Deleting images."): @@ -293,8 +309,9 @@ def run_tasks() -> None: def cli_process( - cli_experiments: tp.Iterable[str], cli_projects: tp.Iterable[str], - cli_groups: tp.Iterable[str] + cli_experiments: tp.Iterable[str], + cli_projects: tp.Iterable[str], + cli_groups: tp.Iterable[str], ) -> tp.Tuple[ExperimentIndex, ProjectIndex]: """ Shared CLI processing of projects/experiment selection. @@ -306,13 +323,13 @@ def cli_process( if name in set(cli_experiments) } unknown_experiments = [ - name for name in cli_experiments + name + for name in cli_experiments if name not in set(discovered_experiments.keys()) ] if unknown_experiments: print( - 'Could not find ', str(unknown_experiments), - ' in the experiment registry.' + "Could not find ", str(unknown_experiments), " in the experiment registry." ) wanted_projects = project.populate(list(cli_projects), list(cli_groups)) @@ -331,9 +348,7 @@ def enumerate_projects( yield prj else: version = make_version_tag(prj.revision) - image_tag = make_image_name( - f'{prj.name}/{prj.group}', version - ) + image_tag = make_image_name(f"{prj.name}/{prj.group}", version) rich.get_console().print( f"Skipping empty container image declaration for: {image_tag}" @@ -341,11 +356,11 @@ def enumerate_projects( def make_version_tag(revision: source.Revision) -> str: - return '-'.join([str(v) for v in revision.variants]) + return "-".join([str(v) for v in revision.variants]) def make_image_name(name: str, tag: str) -> str: - return commands.oci_compliant_name(f'{name}:{tag}') + return commands.oci_compliant_name(f"{name}:{tag}") def export_image(image_name: str) -> None: @@ -354,9 +369,7 @@ def export_image(image_name: str) -> None: """ publish = bootstrap.bus() export_name = commands.fs_compliant_name(image_name) - export_path = local.path( - CFG["container"]["export"].value - ) / export_name + ".tar" + export_path = local.path(CFG["container"]["export"].value) / export_name + ".tar" publish(commands.ExportImage(image_name, str(export_path))) @@ -366,15 +379,15 @@ def import_image(image_name: str) -> None: """ publish = bootstrap.bus() import_name = commands.fs_compliant_name(image_name) - import_path = local.path( - CFG["container"]["import"].value - ) / import_name + ".tar" + import_path = local.path(CFG["container"]["import"].value) / import_name + ".tar" publish(commands.ImportImage(image_name, str(import_path))) def create_base_images( - experiments: ExperimentIndex, projects: ProjectIndex, do_export: bool, - do_import: bool + experiments: ExperimentIndex, + projects: ProjectIndex, + do_export: bool, + do_import: bool, ) -> None: """ Create base images requested by all selected projects. @@ -396,7 +409,7 @@ def create_base_images( if do_import: import_image(image.base) - if not image.base in declarative.DEFAULT_BASES: + if image.base not in declarative.DEFAULT_BASES: continue layers = declarative.DEFAULT_BASES[image.base] @@ -417,12 +430,10 @@ def __pull_sources_in_context(prj: project.Project) -> None: src.version(local.cwd, str(version)) -BB_APP_ROOT: str = '/app' +BB_APP_ROOT: str = "/app" -def create_project_images( - experiments: ExperimentIndex, projects: ProjectIndex -) -> None: +def create_project_images(experiments: ExperimentIndex, projects: ProjectIndex) -> None: """ Create project images for all selected projects. @@ -438,7 +449,7 @@ def create_project_images( for prj in enumerate_projects(experiments, projects): version = make_version_tag(prj.revision) - image_tag = make_image_name(f'{prj.name}/{prj.group}', version) + image_tag = make_image_name(f"{prj.name}/{prj.group}", version) layers = prj.container layers.context(partial(__pull_sources_in_context, prj)) @@ -487,18 +498,16 @@ def create_experiment_images( for exp in enumerate_experiments(experiments, projects): for prj in exp.projects: version = make_version_tag(prj.revision) - base_tag = make_image_name(f'{prj.name}/{prj.group}', version) - image_tag = make_image_name( - f'{exp.name}/{prj.name}/{prj.group}', version - ) + base_tag = make_image_name(f"{prj.name}/{prj.group}", version) + image_tag = make_image_name(f"{exp.name}/{prj.name}/{prj.group}", version) image = declarative.ContainerImage().from_(base_tag) image.extend(exp.container) image.env(BB_PLUGINS_EXPERIMENTS=f'["{exp.__module__}"]') - verbosity = int(settings.CFG['verbosity']) - image.env(BB_VERBOSITY=f'{verbosity}') + verbosity = int(settings.CFG["verbosity"]) + image.env(BB_VERBOSITY=f"{verbosity}") - image.entrypoint('benchbuild', 'run', '-E', exp.name, str(prj.id)) + image.entrypoint("benchbuild", "run", "-E", exp.name, str(prj.id)) publish(commands.CreateImage(image_tag, image)) @@ -523,11 +532,9 @@ def run_experiment_images( for exp in enumerate_experiments(experiments, projects): for prj in exp.projects: version = make_version_tag(prj.revision) - image_tag = make_image_name( - f'{exp.name}/{prj.name}/{prj.group}', version - ) + image_tag = make_image_name(f"{exp.name}/{prj.name}/{prj.group}", version) - container_name = f'{exp.name}_{prj.name}_{prj.group}' + container_name = f"{exp.name}_{prj.name}_{prj.group}" publish( commands.RunProjectContainer( @@ -538,8 +545,7 @@ def run_experiment_images( def remove_images( - experiments: ExperimentIndex, projects: ProjectIndex, - delete_project_images: bool + experiments: ExperimentIndex, projects: ProjectIndex, delete_project_images: bool ) -> None: """ Remove all selected images from benchbuild's image registry. @@ -549,14 +555,12 @@ def remove_images( for exp in enumerate_experiments(experiments, projects): for prj in exp.projects: version = make_version_tag(prj.revision) - image_tag = make_image_name( - f'{exp.name}/{prj.name}/{prj.group}', version - ) + image_tag = make_image_name(f"{exp.name}/{prj.name}/{prj.group}", version) publish(commands.DeleteImage(image_tag)) if delete_project_images: for prj in enumerate_projects(experiments, projects): version = make_version_tag(prj.revision) - image_tag = make_image_name(f'{prj.name}/{prj.group}', version) + image_tag = make_image_name(f"{prj.name}/{prj.group}", version) publish(commands.DeleteImage(image_tag)) diff --git a/benchbuild/environments/service_layer/debug.py b/benchbuild/environments/service_layer/debug.py index d08d0b070..437e44077 100644 --- a/benchbuild/environments/service_layer/debug.py +++ b/benchbuild/environments/service_layer/debug.py @@ -26,24 +26,25 @@ def debug_image_kept( """ # pylint: disable=import-outside-toplevel from rich.markdown import Markdown + with uow: container = uow.create(event.image_name, event.failed_image_name) if container is None: - raise ValueError('Unable to create debug session.') + raise ValueError("Unable to create debug session.") print( Markdown( DEBUG_SESSION_INTRO.format( image_name=event.image_name, failed_image_name=event.failed_image_name, - layer_name=event.failed_layer + layer_name=event.failed_layer, ) ) ) - run_shell = bb_buildah('run')[container.container_id, '--', '/bin/sh'] + run_shell = bb_buildah("run")[container.container_id, "--", "/bin/sh"] try: run_shell.run_fg() uow.commit() except ProcessExecutionError as ex: - print(f'Debug session ended with retcode: {ex.retcode}') - print('[red]No image will be stored![/red]') + print(f"Debug session ended with retcode: {ex.retcode}") + print("[red]No image will be stored![/red]") diff --git a/benchbuild/environments/service_layer/handlers.py b/benchbuild/environments/service_layer/handlers.py index 06a14c1c9..015634d18 100644 --- a/benchbuild/environments/service_layer/handlers.py +++ b/benchbuild/environments/service_layer/handlers.py @@ -10,33 +10,28 @@ LOG = logging.getLogger(__name__) MessageHandler = tp.Callable[[unit_of_work.EventCollector, model.Message], None] -MessageHandlerWithUOW = tp.Callable[[model.Message], tp.Generator[model.Message, - None, None]] +MessageHandlerWithUOW = tp.Callable[ + [model.Message], tp.Generator[model.Message, None, None] +] -def bootstrap( - handler, uow: unit_of_work.EventCollector -) -> MessageHandlerWithUOW: +def bootstrap(handler, uow: unit_of_work.EventCollector) -> MessageHandlerWithUOW: """ Bootstrap prepares a message handler with a unit of work. """ - def wrapped_handler( - msg: model.Message - ) -> tp.Generator[model.Message, None, None]: + def wrapped_handler(msg: model.Message) -> tp.Generator[model.Message, None, None]: handler(uow, msg) return uow.collect_new_events() return wrapped_handler -def create_image( - uow: unit_of_work.ImageUnitOfWork, cmd: commands.CreateImage -) -> None: +def create_image(uow: unit_of_work.ImageUnitOfWork, cmd: commands.CreateImage) -> None: """ Create a container image using a pre-configured registry. """ - replace = CFG['container']['replace'] + replace = CFG["container"]["replace"] with uow: image = uow.registry.find(cmd.name) if image and not replace: diff --git a/benchbuild/environments/service_layer/messagebus.py b/benchbuild/environments/service_layer/messagebus.py index 777251cf8..f2de10df5 100644 --- a/benchbuild/environments/service_layer/messagebus.py +++ b/benchbuild/environments/service_layer/messagebus.py @@ -11,13 +11,11 @@ Message = tp.Union[model.Command, model.Event] Messages = tp.List[Message] -#EventHandlerT = tp.Callable[[events.Event, unit_of_work.AbstractUnitOfWork], -EventHandlerT = tp.Callable[[model.Event], tp.Generator[model.Event, None, - None]] -#CommandHandlerT = tp.Callable[ +# EventHandlerT = tp.Callable[[events.Event, unit_of_work.AbstractUnitOfWork], +EventHandlerT = tp.Callable[[model.Event], tp.Generator[model.Event, None, None]] +# CommandHandlerT = tp.Callable[ # [commands.Command, unit_of_work.AbstractUnitOfWork], str] -CommandHandlerT = tp.Callable[[model.Command], tp.Generator[model.Event, None, - None]] +CommandHandlerT = tp.Callable[[model.Command], tp.Generator[model.Event, None, None]] MessageT = tp.Union[tp.Type[model.Command], tp.Type[model.Event]] @@ -46,12 +44,10 @@ def handle( elif isinstance(message, model.Command): _handle_command(cmd_handlers, message, queue) else: - raise Exception(f'{message} was not an Event or Command') + raise Exception(f"{message} was not an Event or Command") -def _handle_event( - handlers: EventHandlers, event: model.Event, queue: Messages -) -> None: +def _handle_event(handlers: EventHandlers, event: model.Event, queue: Messages) -> None: """ Invokes all registered event handlers for this event. @@ -65,7 +61,7 @@ def _handle_event( try: queue.extend(handler(event)) except Exception: - LOG.exception('Exception handling event %s', event) + LOG.exception("Exception handling event %s", event) continue @@ -85,10 +81,12 @@ def _handle_command( handler = handlers[type(command)] queue.extend(handler(command)) except ensure.ImageNotFound as ex: - print(( - 'Command could not be executed, because I could not find a required' - f' image: {ex}' - )) + print( + ( + "Command could not be executed, because I could not find a required" + f" image: {ex}" + ) + ) except Exception: - LOG.exception('Exception handling command %s', command) + LOG.exception("Exception handling command %s", command) raise diff --git a/benchbuild/environments/service_layer/ui.py b/benchbuild/environments/service_layer/ui.py index 0706cf4da..159b7f9d6 100644 --- a/benchbuild/environments/service_layer/ui.py +++ b/benchbuild/environments/service_layer/ui.py @@ -7,47 +7,49 @@ def print_image_created( _: unit_of_work.ImageUnitOfWork, event: events.ImageCreated ) -> None: - print(f'Building {event.name}') + print(f"Building {event.name}") def print_image_creation_failed( _: unit_of_work.ImageUnitOfWork, event: events.ImageCreationFailed ) -> None: - print(f'[red]Image creation failed for [bold]{event.name}[/bold][/red]') - print(f'[red]Reason given: {event.reason}[/red]') + print(f"[red]Image creation failed for [bold]{event.name}[/bold][/red]") + print(f"[red]Reason given: {event.reason}[/red]") def print_layer_created( _: unit_of_work.ImageUnitOfWork, event: events.LayerCreated ) -> None: - print(f'[bold]{event.image_tag}[/bold] {event.name}') + print(f"[bold]{event.image_tag}[/bold] {event.name}") def print_container_created( _: unit_of_work.ContainerUnitOfWork, event: events.ContainerCreated ) -> None: - print(f'Created {event.name} for image: {event.image_id}') + print(f"Created {event.name} for image: {event.image_id}") def print_layer_creation_failed( _: unit_of_work.ImageUnitOfWork, event: events.LayerCreationFailed ) -> None: - print(( - f'[bold]{event.name}[/bold]\n' - f'[red]Failed to create layer while building {event.image_tag}.[/red]\n' - )) - print(f'[red]{event.message}[/red]') + print( + ( + f"[bold]{event.name}[/bold]\n" + f"[red]Failed to create layer while building {event.image_tag}.[/red]\n" + ) + ) + print(f"[red]{event.message}[/red]") def print_container_start_failed( _: unit_of_work.ContainerUnitOfWork, event: events.ContainerStartFailed ) -> None: - print(f'[bold]{event.name}[/bold] [red]start of container failed.[/red]\n') - print(f'Container: {event.container_id}') - print(f'Command: {event.message}') + print(f"[bold]{event.name}[/bold] [red]start of container failed.[/red]\n") + print(f"Container: {event.container_id}") + print(f"Command: {event.message}") def print_container_started( _: unit_of_work.ContainerUnitOfWork, event: events.ContainerStarted ) -> None: - print(f'[bold]{event.container_id}[/bold] [green]started[/green]') + print(f"[bold]{event.container_id}[/bold] [green]started[/green]") diff --git a/benchbuild/environments/service_layer/unit_of_work.py b/benchbuild/environments/service_layer/unit_of_work.py index b32efeb93..d82147e0e 100644 --- a/benchbuild/environments/service_layer/unit_of_work.py +++ b/benchbuild/environments/service_layer/unit_of_work.py @@ -1,6 +1,5 @@ import abc import logging -import sys import typing as tp from typing import Protocol @@ -15,13 +14,10 @@ class EventCollector(Protocol): - - def collect_new_events(self) -> tp.Generator[model.Message, None, None]: - ... + def collect_new_events(self) -> tp.Generator[model.Message, None, None]: ... class UnitOfWork(abc.ABC): - @abc.abstractmethod def commit(self) -> None: raise NotImplementedError @@ -45,7 +41,7 @@ def collect_new_events(self) -> tp.Generator[model.Message, None, None]: evt = self.events.pop(0) yield evt - def __enter__(self) -> 'ImageUnitOfWork': + def __enter__(self) -> "ImageUnitOfWork": return self def __exit__(self, *args: tp.Any) -> None: @@ -103,7 +99,6 @@ def _commit(self, container: model.Container) -> None: class BuildahImageUOW(ImageUnitOfWork): - def __init__(self) -> None: self.registry = buildah.BuildahImageRegistry() @@ -112,7 +107,7 @@ def _create(self, tag: str, from_: str) -> model.MaybeContainer: return self.registry.create(tag, from_layer) def _destroy(self, tag: str) -> None: - common.run(common.bb_buildah('rmi')['-f', tag]) + common.run(common.bb_buildah("rmi")["-f", tag]) def _export_image(self, image_id: str, out_path: str) -> None: podman.save(image_id, out_path) @@ -123,8 +118,7 @@ def _import_image(self, import_path: str) -> None: def _commit(self, container: model.Container) -> None: image = container.image res = common.run( - common.bb_buildah('commit')[container.container_id, - image.name.lower()] + common.bb_buildah("commit")[container.container_id, image.name.lower()] ) if isinstance(res, Err): @@ -132,7 +126,7 @@ def _commit(self, container: model.Container) -> None: LOG.error("Reason: %s", str(res.unwrap_err)) def _rollback(self, container: model.Container) -> None: - buildah.run(buildah.bb_buildah('rm')[container.container_id]) + buildah.run(buildah.bb_buildah("rm")[container.container_id]) context_path = local.path(container.context) if context_path.exists(): delete(context_path) @@ -141,7 +135,7 @@ def _rollback(self, container: model.Container) -> None: class ContainerUnitOfWork(UnitOfWork): registry: podman.ContainerRegistry - def __enter__(self) -> 'ContainerUnitOfWork': + def __enter__(self) -> "ContainerUnitOfWork": return self def __exit__(self, *args: tp.Any) -> None: @@ -158,9 +152,7 @@ def create( return self._create(image_id, name, args) @abc.abstractmethod - def _create( - self, tag: str, name: str, args: tp.Sequence[str] - ) -> model.Container: + def _create(self, tag: str, name: str, args: tp.Sequence[str]) -> model.Container: raise NotImplementedError def start(self, container: model.Container) -> None: @@ -178,13 +170,10 @@ def commit(self) -> None: class PodmanContainerUOW(ContainerUnitOfWork): - def __init__(self) -> None: self.registry = podman.PodmanRegistry() - def _create( - self, tag: str, name: str, args: tp.Sequence[str] - ) -> model.Container: + def _create(self, tag: str, name: str, args: tp.Sequence[str]) -> model.Container: return self.registry.create(tag, name, args) def _start(self, container: model.Container) -> None: @@ -199,7 +188,7 @@ def collect_new_events(self) -> tp.Generator[model.Message, None, None]: while image.events: yield image.events.pop(0) - def __enter__(self) -> 'AbstractUnitOfWork': + def __enter__(self) -> "AbstractUnitOfWork": return self def __exit__(self, *args: tp.Any) -> None: @@ -208,14 +197,10 @@ def __exit__(self, *args: tp.Any) -> None: def add_layer(self, container: model.Container, layer: model.Layer) -> None: self._add_layer(container, layer) - def create_image( - self, tag: str, layers: tp.List[model.Layer] - ) -> model.Container: + def create_image(self, tag: str, layers: tp.List[model.Layer]) -> model.Container: return self._create_image(tag, layers) - def create_container( - self, image_id: str, container_name: str - ) -> model.Container: + def create_container(self, image_id: str, container_name: str) -> model.Container: return self._create_container(image_id, container_name) def export_image(self, image_id: str, out_path: str) -> None: @@ -236,21 +221,15 @@ def rollback(self) -> None: raise NotImplementedError @abc.abstractmethod - def _create_image( - self, tag: str, layers: tp.List[model.Layer] - ) -> model.Container: + def _create_image(self, tag: str, layers: tp.List[model.Layer]) -> model.Container: raise NotImplementedError @abc.abstractmethod - def _add_layer( - self, container: model.Container, layer: model.Layer - ) -> None: + def _add_layer(self, container: model.Container, layer: model.Layer) -> None: raise NotImplementedError @abc.abstractmethod - def _create_container( - self, image_id: str, container_name: str - ) -> model.Container: + def _create_container(self, image_id: str, container_name: str) -> model.Container: raise NotImplementedError @abc.abstractmethod diff --git a/benchbuild/experiment.py b/benchbuild/experiment.py index abfec2735..f87d64e33 100644 --- a/benchbuild/experiment.py +++ b/benchbuild/experiment.py @@ -24,6 +24,7 @@ class HelloExperiment(Experiment): ``` """ + import copy import typing as tp import uuid @@ -51,12 +52,17 @@ class ExperimentRegistry(type): experiments = {} def __new__( - mcs: tp.Type[tp.Any], name: str, bases: tp.Tuple[type, ...], - attrs: tp.Dict[str, tp.Any], *args: tp.Any, **kwargs: tp.Any + mcs: tp.Type[tp.Any], + name: str, + bases: tp.Tuple[type, ...], + attrs: tp.Dict[str, tp.Any], + *args: tp.Any, + **kwargs: tp.Any, ) -> tp.Any: """Register a project in the registry.""" - cls = super(ExperimentRegistry, - mcs).__new__(mcs, name, bases, attrs, *args, **kwargs) + cls = super(ExperimentRegistry, mcs).__new__( + mcs, name, bases, attrs, *args, **kwargs + ) if bases and "NAME" in attrs: ExperimentRegistry.experiments[attrs["NAME"]] = cls return cls @@ -91,8 +97,7 @@ class Experiment(metaclass=ExperimentRegistry): NAME: tp.ClassVar[str] = "" SCHEMA = None REQUIREMENTS: tp.List[Requirement] = [] - CONTAINER: tp.ClassVar[declarative.ContainerImage - ] = declarative.ContainerImage() + CONTAINER: tp.ClassVar[declarative.ContainerImage] = declarative.ContainerImage() def __new__(cls, *args, **kwargs): """Create a new experiment instance and set some defaults.""" @@ -129,8 +134,8 @@ def default_id(self) -> uuid.UUID: def validate_id(self, _: tp.Any, new_id: uuid.UUID) -> None: if not isinstance(new_id, uuid.UUID): raise TypeError( - "%s expected to be '%s' but got '%s'" % - (str(new_id), str(uuid.UUID), str(type(new_id))) + "%s expected to be '%s' but got '%s'" + % (str(new_id), str(uuid.UUID), str(type(new_id))) ) schema = attr.ib() @@ -180,8 +185,9 @@ def actions(self) -> Actions: actns.Clean(p), actns.MakeBuildDir(p), actns.Echo( - message="Selected {0} with version {1}". - format(p.name, version_str) + message="Selected {0} with version {1}".format( + p.name, version_str + ) ), actns.ProjectEnvironment(p), ] diff --git a/benchbuild/experiments/__init__.py b/benchbuild/experiments/__init__.py index e7b7d5d89..22af738f1 100644 --- a/benchbuild/experiments/__init__.py +++ b/benchbuild/experiments/__init__.py @@ -10,6 +10,7 @@ Any subclass of benchbuild.experiments.Experiment will be automatically registered and made available on the command line. """ + import importlib import logging diff --git a/benchbuild/experiments/empty.py b/benchbuild/experiments/empty.py index 139722185..198e14d81 100644 --- a/benchbuild/experiments/empty.py +++ b/benchbuild/experiments/empty.py @@ -16,9 +16,10 @@ class Empty(Experiment): NAME = "empty" def actions_for_project(self, project): - """ Do nothing. """ + """Do nothing.""" project.compiler_extension = run.WithTimeout( - compiler.RunCompiler(project, self)) + compiler.RunCompiler(project, self) + ) return [MakeBuildDir(project), Compile(project), Clean(project)] @@ -30,6 +31,7 @@ class NoMeasurement(Experiment): def actions_for_project(self, project): """Execute all actions but don't do anything as extension.""" project.compiler_extension = run.WithTimeout( - compiler.RunCompiler(project, self)) + compiler.RunCompiler(project, self) + ) project.runtime_extension = run.RuntimeExtension(project, self) return self.default_runtime_actions(project) diff --git a/benchbuild/experiments/raw.py b/benchbuild/experiments/raw.py index b325d20ca..cc38c337f 100644 --- a/benchbuild/experiments/raw.py +++ b/benchbuild/experiments/raw.py @@ -15,6 +15,7 @@ time.system_s - The time spent in kernel space in seconds (aka system time) time.real_s - The time spent overall in seconds (aka Wall clock) """ + from benchbuild.environments.domain.declarative import ContainerImage from benchbuild.experiment import Experiment from benchbuild.extensions import compiler, run, time @@ -30,7 +31,9 @@ def actions_for_project(self, project): """Compile & Run the experiment with -O3 enabled.""" project.cflags = ["-O3", "-fno-omit-frame-pointer"] project.runtime_extension = time.RunWithTime( - run.RuntimeExtension(project, self)) + run.RuntimeExtension(project, self) + ) project.compiler_extension = run.WithTimeout( - compiler.RunCompiler(project, self)) + compiler.RunCompiler(project, self) + ) return self.default_runtime_actions(project) diff --git a/benchbuild/extensions/base.py b/benchbuild/extensions/base.py index 449ebde0e..1aa3b1741 100644 --- a/benchbuild/extensions/base.py +++ b/benchbuild/extensions/base.py @@ -1,6 +1,7 @@ """ Extension base-classes for compile-time and run-time experiments. """ + import logging import typing as tp from abc import ABCMeta @@ -35,17 +36,16 @@ class Extension(metaclass=ABCMeta): def __init__( self, - *extensions: 'Extension', + *extensions: "Extension", config: tp.Optional[tp.Dict[str, str]] = None, - **kwargs: tp.Any + **kwargs: tp.Any, ): """Initialize an extension with an arbitrary number of children.""" del kwargs self.next_extensions = extensions self.config = config - def call_next(self, *args: tp.Any, - **kwargs: tp.Any) -> tp.List[run.RunInfo]: + def call_next(self, *args: tp.Any, **kwargs: tp.Any) -> tp.List[run.RunInfo]: """Call all child extensions with the given arguments. This calls all child extensions and collects the results for @@ -73,7 +73,7 @@ def call_next(self, *args: tp.Any, return all_results - def __lshift__(self, rhs: 'Extension') -> 'Extension': + def __lshift__(self, rhs: "Extension") -> "Extension": rhs.next_extensions = [self] return rhs @@ -83,8 +83,9 @@ def print(self, indent: int = 0) -> None: for ext in self.next_extensions: ext.print(indent=indent + 2) - def __call__(self, command: BoundCommand, *args: str, - **kwargs: tp.Any) -> tp.List[run.RunInfo]: + def __call__( + self, command: BoundCommand, *args: str, **kwargs: tp.Any + ) -> tp.List[run.RunInfo]: return self.call_next(*args, **kwargs) def __str__(self) -> str: diff --git a/benchbuild/extensions/compiler.py b/benchbuild/extensions/compiler.py index 5d1498c0a..0cbb87566 100644 --- a/benchbuild/extensions/compiler.py +++ b/benchbuild/extensions/compiler.py @@ -26,10 +26,10 @@ class RunCompiler(base.Extension): def __init__( self, - project: 'Project', - experiment: 'Experiment', + project: "Project", + experiment: "Experiment", *extensions: base.Extension, - config: tp.Optional[tp.Dict[str, str]] = None + config: tp.Optional[tp.Dict[str, str]] = None, ): self.project = project self.experiment = experiment @@ -40,9 +40,9 @@ def __call__( self, command: BoundCommand, *args: str, - project: tp.Optional['Project'] = None, + project: tp.Optional["Project"] = None, rerun_on_error: bool = True, - **kwargs: tp.Any + **kwargs: tp.Any, ) -> tp.List[run.RunInfo]: if project: self.project = project @@ -59,17 +59,10 @@ def __call__( run_info = _run() if self.config: LOG.info( - yaml.dump( - self.config, - width=40, - indent=4, - default_flow_style=False - ) + yaml.dump(self.config, width=40, indent=4, default_flow_style=False) ) if CFG["db"]["enabled"]: - db.persist_config( - run_info.db_run, run_info.session, self.config - ) + db.persist_config(run_info.db_run, run_info.session, self.config) if run_info.has_failed: with run.track_execution( diff --git a/benchbuild/extensions/log.py b/benchbuild/extensions/log.py index 1115ae391..80cb68296 100644 --- a/benchbuild/extensions/log.py +++ b/benchbuild/extensions/log.py @@ -9,6 +9,7 @@ class LogTrackingMixin: """Add log-registering capabilities to extensions.""" + _logs = [] def add_log(self, path): diff --git a/benchbuild/extensions/run.py b/benchbuild/extensions/run.py index fbec1ba40..21d7dd5db 100644 --- a/benchbuild/extensions/run.py +++ b/benchbuild/extensions/run.py @@ -30,26 +30,16 @@ def __call__(self, binary_command, *args, **kwargs): self.project.name = kwargs.get("project_name", self.project.name) cmd = binary_command[args] - with run.track_execution( - cmd, self.project, self.experiment, **kwargs - ) as _run: + with run.track_execution(cmd, self.project, self.experiment, **kwargs) as _run: run_info = _run() if self.config: run_info.add_payload("config", self.config) LOG.info( - yaml.dump( - self.config, - width=40, - indent=4, - default_flow_style=False - ) + yaml.dump(self.config, width=40, indent=4, default_flow_style=False) ) - self.config['baseline'] = \ - os.getenv("BB_IS_BASELINE", "False") + self.config["baseline"] = os.getenv("BB_IS_BASELINE", "False") if CFG["db"]["enabled"]: - db.persist_config( - run_info.db_run, run_info.session, self.config - ) + db.persist_config(run_info.db_run, run_info.session, self.config) res = self.call_next(binary_command, *args, **kwargs) res.append(run_info) return res @@ -73,9 +63,8 @@ def __init__(self, *extensions, limit="10m", **kwargs): def __call__(self, binary_command, *args, **kwargs): # pylint: disable=import-outside-toplevel from benchbuild.utils.cmd import timeout - return self.call_next( - timeout[self.limit, binary_command], *args, **kwargs - ) + + return self.call_next(timeout[self.limit, binary_command], *args, **kwargs) class SetThreadLimit(base.Extension): @@ -87,7 +76,7 @@ class SetThreadLimit(base.Extension): def __call__(self, binary_command, *args, **kwargs): config = self.config - if config is not None and 'jobs' in config.keys(): + if config is not None and "jobs" in config.keys(): jobs = get_number_of_jobs(config) else: LOG.warning("Parameter 'config' was unusable, using defaults") diff --git a/benchbuild/extensions/time.py b/benchbuild/extensions/time.py index defcddb6c..9f75610a7 100644 --- a/benchbuild/extensions/time.py +++ b/benchbuild/extensions/time.py @@ -31,8 +31,9 @@ def handle_timing(run_infos): for run_info in run_infos: if may_wrap: timings = fetch_time_output( - time_tag, time_tag + "{:g}-{:g}-{:g}", - run_info.stderr.split("\n") + time_tag, + time_tag + "{:g}-{:g}-{:g}", + run_info.stderr.split("\n"), ) if timings: db.persist_time(run_info.db_run, session, timings) @@ -48,8 +49,9 @@ def __str__(self): return "Time execution of wrapped binary" -def fetch_time_output(marker: str, format_s: str, - ins: tp.List[str]) -> tp.List[parse.Match]: +def fetch_time_output( + marker: str, format_s: str, ins: tp.List[str] +) -> tp.List[parse.Match]: """ Fetch the output /usr/bin/time from a. diff --git a/benchbuild/likwid.py b/benchbuild/likwid.py index 3f49dfc7e..455d8fb2d 100644 --- a/benchbuild/likwid.py +++ b/benchbuild/likwid.py @@ -5,7 +5,7 @@ """ -def fetch_cols(fstream, split_char=','): +def fetch_cols(fstream, split_char=","): """ Fetch columns from likwid's output stream. @@ -163,23 +163,22 @@ def perfcounters(infile): A list of all measurements extracted from likwid's file stream. """ measurements = [] - with open(infile, 'r') as in_file: + with open(infile, "r") as in_file: read_struct(in_file) for region_struct in read_structs(in_file): region = region_struct["1"][1] core_info = region_struct["Region Info"] - measurements += \ - get_measurements(region, core_info, region_struct) + measurements += get_measurements(region, core_info, region_struct) for table_struct in read_tables(in_file): core_info = None if "Event" in table_struct: offset = 1 core_info = table_struct["Event"][offset:] - measurements += get_measurements(region, core_info, - table_struct, offset) + measurements += get_measurements( + region, core_info, table_struct, offset + ) elif "Metric" in table_struct: core_info = table_struct["Metric"] - measurements += get_measurements(region, core_info, - table_struct) + measurements += get_measurements(region, core_info, table_struct) return measurements diff --git a/benchbuild/plugins.py b/benchbuild/plugins.py index 386ec9f20..5df804516 100644 --- a/benchbuild/plugins.py +++ b/benchbuild/plugins.py @@ -14,6 +14,7 @@ will automatically register itself and is available on the CLI for all subcommands. """ + import importlib import itertools import logging diff --git a/benchbuild/project.py b/benchbuild/project.py index 20d4b55cc..7b0116d24 100644 --- a/benchbuild/project.py +++ b/benchbuild/project.py @@ -15,6 +15,7 @@ series of commands in both phases and that all experiments run inside a separate build directory in isolation of one another. """ + import copy import logging import typing as tp @@ -43,9 +44,9 @@ MaybeGroupNames = tp.Optional[tp.List[str]] ProjectNames = tp.List[str] Sources = tp.List[source.FetchableSource] -ContainerDeclaration = tp.Union[ContainerImage, - tp.List[tp.Tuple[RevisionRange, - ContainerImage]]] +ContainerDeclaration = tp.Union[ + ContainerImage, tp.List[tp.Tuple[RevisionRange, ContainerImage]] +] Workloads = tp.MutableMapping[SupportsUnwrap, tp.List[Command]] __REGISTRATION_SEPARATOR = "/" @@ -98,9 +99,7 @@ def active_revision(self) -> source.Revision: Returns: Active revision context. """ - assert hasattr( - self, "revision" - ), "revision attribute missing from subclass." + assert hasattr(self, "revision"), "revision attribute missing from subclass." if self._active_revision is None: self._active_revision = self.revision @@ -144,17 +143,16 @@ def clear_paths(self) -> None: class ProjectRunnables: - def __init_subclass__(cls, **kwargs): super().__init_subclass__(**kwargs) - if hasattr(cls, 'run_tests'): + if hasattr(cls, "run_tests"): f_run_tests = run.in_builddir()(run.store_config(cls.run_tests)) - setattr(cls, 'run_tests', f_run_tests) + setattr(cls, "run_tests", f_run_tests) - if hasattr(cls, 'compile'): + if hasattr(cls, "compile"): f_compile = run.in_builddir()(run.store_config(cls.compile)) - setattr(cls, 'compile', f_compile) + setattr(cls, "compile", f_compile) @abstractmethod def compile(self) -> None: @@ -166,9 +164,7 @@ def run_tests(self) -> None: @attr.s -class Project( - PathTracker, MultiVersioned, ProjectRunnables, metaclass=ProjectRegistry -): # pylint: disable=too-many-instance-attributes +class Project(PathTracker, MultiVersioned, ProjectRunnables, metaclass=ProjectRegistry): # pylint: disable=too-many-instance-attributes """Abstract class for benchbuild projects. A project is an arbitrary software system usable by benchbuild in @@ -245,9 +241,7 @@ def __new__(cls, *args, **kwargs): new_self = super(Project, cls).__new__(cls) mod_ident = f"{cls.__name__} @ {cls.__module__}" if not cls.NAME: - raise AttributeError( - f"{mod_ident} does not define a NAME class attribute." - ) + raise AttributeError(f"{mod_ident} does not define a NAME class attribute.") if not cls.DOMAIN: raise AttributeError( f"{mod_ident} does not define a DOMAIN class attribute." @@ -271,7 +265,7 @@ def __default_revision(self) -> source.Revision: # pylint: disable=unused-priva return source.Revision( type(self), source.primary(srcs[0]).default, - *[src.default for src in source.secondaries(srcs[1:])] + *[src.default for src in source.secondaries(srcs[1:])], ) name: str = attr.ib( @@ -296,7 +290,7 @@ def __default_revision(self) -> source.Revision: # pylint: disable=unused-priva @run_uuid.default def __default_run_uuid(self): # pylint: disable=unused-private-member - if (run_group := getenv("BB_DB_RUN_GROUP", None)): + if run_group := getenv("BB_DB_RUN_GROUP", None): return uuid.UUID(run_group) return uuid.uuid4() @@ -307,8 +301,7 @@ def __check_if_uuid(self, _: tp.Any, value: uuid.UUID) -> None: # pylint: disab builddir: local.path = attr.ib( default=attr.Factory( - lambda self: local.path(str(CFG["build_dir"])) / self.id / self. - run_uuid, + lambda self: local.path(str(CFG["build_dir"])) / self.id / self.run_uuid, takes_self=True, ) ) @@ -318,8 +311,7 @@ def __check_if_uuid(self, _: tp.Any, value: uuid.UUID) -> None: # pylint: disab ) workloads: Workloads = attr.ib( - default=attr. - Factory(lambda self: type(self).WORKLOADS, takes_self=True) + default=attr.Factory(lambda self: type(self).WORKLOADS, takes_self=True) ) primary_source: str = attr.ib() @@ -460,9 +452,7 @@ def source_of_primary(self) -> str: ProjectT = tp.Type[Project] -def __split_project_input__( - project_input: str -) -> tp.Tuple[str, tp.Optional[str]]: +def __split_project_input__(project_input: str) -> tp.Tuple[str, tp.Optional[str]]: split_input = project_input.rsplit("@", maxsplit=1) first = split_input[0] second = split_input[1] if len(split_input) > 1 else None @@ -490,9 +480,7 @@ def __add_single_filter__(project: ProjectT, version: str) -> ProjectT: return project -def __add_indexed_filters__( - project: ProjectT, versions: tp.List[str] -) -> ProjectT: +def __add_indexed_filters__(project: ProjectT, versions: tp.List[str]) -> ProjectT: sources = [src for src in project.SOURCE if src.is_expandable] for i in range(min(len(sources), len(versions))): @@ -504,9 +492,7 @@ def __add_indexed_filters__( return project -def __add_named_filters__( - project: ProjectT, versions: tp.Dict[str, str] -) -> ProjectT: +def __add_named_filters__(project: ProjectT, versions: tp.Dict[str, str]) -> ProjectT: sources = project.SOURCE sources = [src for src in project.SOURCE if src.is_expandable] named_sources: tp.Dict[str, source.base.FetchableSource] = { @@ -571,8 +557,7 @@ def csv(in_str: tp.Union[tp.Any, str]) -> bool: def populate( - projects_to_filter: ProjectNames, - group: MaybeGroupNames = None + projects_to_filter: ProjectNames, group: MaybeGroupNames = None ) -> ProjectIndex: """ Populate the list of projects that belong to this experiment. @@ -616,13 +601,9 @@ def normalize_key(key: str) -> str: if group: groupkeys = set(group) - prjs = { - name: cls for name, cls in prjs.items() if cls.GROUP in groupkeys - } + prjs = {name: cls for name, cls in prjs.items() if cls.GROUP in groupkeys} - populated = { - x: prjs[x] for x in prjs if prjs[x].DOMAIN != "debug" or x in p2f - } + populated = {x: prjs[x] for x in prjs if prjs[x].DOMAIN != "debug" or x in p2f} return populated diff --git a/benchbuild/projects/__init__.py b/benchbuild/projects/__init__.py index 808aa95fb..ce3d2d0cc 100644 --- a/benchbuild/projects/__init__.py +++ b/benchbuild/projects/__init__.py @@ -7,6 +7,7 @@ *_PLUGINS_PROJECTS """ + import importlib import logging diff --git a/benchbuild/projects/apollo/rodinia.py b/benchbuild/projects/apollo/rodinia.py index c2c74cd79..cf4477a1a 100644 --- a/benchbuild/projects/apollo/rodinia.py +++ b/benchbuild/projects/apollo/rodinia.py @@ -9,32 +9,36 @@ @attr.s class RodiniaGroup(bb.Project): """Generic handling of Rodinia benchmarks.""" - DOMAIN = 'rodinia' - GROUP = 'rodinia' + + DOMAIN = "rodinia" + GROUP = "rodinia" SOURCE = [ - HTTP(remote={ - '3.1': 'http://www.cs.virginia.edu/' - '~kw5na/lava/Rodinia/Packages/Current/3.1/' - 'rodinia_3.1.tar.bz2' - }, - local='rodinia.tar.bz2') + HTTP( + remote={ + "3.1": "http://www.cs.virginia.edu/" + "~kw5na/lava/Rodinia/Packages/Current/3.1/" + "rodinia_3.1.tar.bz2" + }, + local="rodinia.tar.bz2", + ) ] CONFIG = {} config = attr.ib( - default=attr.Factory(lambda self: type(self).CONFIG, takes_self=True)) + default=attr.Factory(lambda self: type(self).CONFIG, takes_self=True) + ) def compile(self): - tar('xf', 'rodinia.tar.bz2') - rodinia_version = self.version_of('rodinia.tar.bz2') - unpack_dir = local.path(f'rodinia_{rodinia_version}') + tar("xf", "rodinia.tar.bz2") + rodinia_version = self.version_of("rodinia.tar.bz2") + unpack_dir = local.path(f"rodinia_{rodinia_version}") c_compiler = bb.compiler.cc(self) cxx_compiler = bb.compiler.cxx(self) - config_dir = self.config['dir'] - config_src = self.config['src'] - config_flags = self.config['flags'] + config_dir = self.config["dir"] + config_src = self.config["src"] + config_flags = self.config["flags"] with local.cwd(unpack_dir / config_dir): for outfile, srcfiles in config_src.items(): @@ -52,39 +56,33 @@ def select_compiler(c_compiler, _): return c_compiler def run_tests(self): - rodinia_version = self.version_of('rodinia.tar.bz2') - unpack_dir = local.path(f'rodinia_{rodinia_version}') - in_src_dir = unpack_dir / self.config['dir'] + rodinia_version = self.version_of("rodinia.tar.bz2") + unpack_dir = local.path(f"rodinia_{rodinia_version}") + in_src_dir = unpack_dir / self.config["dir"] - for outfile in self.config['src']: + for outfile in self.config["src"]: bb.wrap(in_src_dir / outfile, self) with local.cwd(in_src_dir): sh_ = bb.watch(sh) - sh_('./run') + sh_("./run") class Backprop(RodiniaGroup): - NAME = 'backprop' + NAME = "backprop" CONFIG = { "dir": "openmp/backprop", - "src": { - NAME: [ - "backprop_kernel.c", "imagenet.c", "facetrain.c", "backprop.c" - ] - }, - "flags": ["-fopenmp", "-lm"] + "src": {NAME: ["backprop_kernel.c", "imagenet.c", "facetrain.c", "backprop.c"]}, + "flags": ["-fopenmp", "-lm"], } class BFS(RodiniaGroup): - NAME = 'bfs' + NAME = "bfs" CONFIG = { "dir": "openmp/bfs", - "src": { - NAME: ["bfs.cpp"] - }, - "flags": ["-fopenmp", "-UOPEN"] + "src": {NAME: ["bfs.cpp"]}, + "flags": ["-fopenmp", "-UOPEN"], } @staticmethod @@ -93,21 +91,24 @@ def select_compiler(_, cc): class BPlusTree(RodiniaGroup): - NAME = 'b+tree' + NAME = "b+tree" CONFIG = { "dir": "openmp/b+tree", "src": { "b+tree.out": [ - "./main.c", "./kernel/kernel_cpu.c", "./kernel/kernel_cpu_2.c", - "./util/timer/timer.c", "./util/num/num.c" + "./main.c", + "./kernel/kernel_cpu.c", + "./kernel/kernel_cpu_2.c", + "./util/timer/timer.c", + "./util/num/num.c", ] }, - "flags": ["-fopenmp", "-lm"] + "flags": ["-fopenmp", "-lm"], } class CFD(RodiniaGroup): - NAME = 'cfd' + NAME = "cfd" CONFIG = {"dir": "openmp/cfd", "src": {"euler3d_cpu": ["euler3d_cpu.cpp"]}} @staticmethod @@ -116,24 +117,20 @@ def select_compiler(_, cc): class HeartWall(RodiniaGroup): - NAME = 'heartwall' + NAME = "heartwall" CONFIG = { "dir": "openmp/heartwall", - "src": { - NAME: ["./AVI/avimod.c", "./AVI/avilib.c", "./main.c"] - }, - "flags": ["-I./AVI", "-fopenmp", "-lm"] + "src": {NAME: ["./AVI/avimod.c", "./AVI/avilib.c", "./main.c"]}, + "flags": ["-I./AVI", "-fopenmp", "-lm"], } class Hotspot(RodiniaGroup): - NAME = 'hotspot' + NAME = "hotspot" CONFIG = { "dir": "openmp/hotspot", - "src": { - NAME: ["hotspot_openmp.cpp"] - }, - "flags": ["-fopenmp"] + "src": {NAME: ["hotspot_openmp.cpp"]}, + "flags": ["-fopenmp"], } @staticmethod @@ -142,141 +139,171 @@ def select_compiler(_, cc): class Hotspot3D(RodiniaGroup): - NAME = 'hotspot3D' + NAME = "hotspot3D" CONFIG = { "dir": "openmp/hotspot3D", - "src": { - "3D": ["./3D.c"] - }, - "flags": ["-fopenmp", "-lm"] + "src": {"3D": ["./3D.c"]}, + "flags": ["-fopenmp", "-lm"], } class KMeans(RodiniaGroup): - NAME = 'kmeans' + NAME = "kmeans" CONFIG = { "dir": "openmp/kmeans", "src": { "./kmeans_serial/kmeans": [ "./kmeans_serial/kmeans_clustering.c", - "./kmeans_serial/kmeans.c", "./kmeans_serial/getopt.c", - "./kmeans_serial/cluster.c" + "./kmeans_serial/kmeans.c", + "./kmeans_serial/getopt.c", + "./kmeans_serial/cluster.c", ], "./kmeans_openmp/kmeans": [ "./kmeans_openmp/kmeans_clustering.c", - "./kmeans_openmp/kmeans.c", "./kmeans_openmp/getopt.c", - "./kmeans_openmp/cluster.c" - ] + "./kmeans_openmp/kmeans.c", + "./kmeans_openmp/getopt.c", + "./kmeans_openmp/cluster.c", + ], }, - "flags": ["-lm", "-fopenmp"] + "flags": ["-lm", "-fopenmp"], } class LavaMD(RodiniaGroup): - NAME = 'lavaMD' + NAME = "lavaMD" CONFIG = { "dir": "openmp/lavaMD", "src": { NAME: [ - "./main.c", "./util/timer/timer.c", "./util/num/num.c", - "./kernel/kernel_cpu.c" + "./main.c", + "./util/timer/timer.c", + "./util/num/num.c", + "./kernel/kernel_cpu.c", ] }, - "flags": ["-lm", "-fopenmp"] + "flags": ["-lm", "-fopenmp"], } class Leukocyte(RodiniaGroup): - NAME = 'leukocyte' + NAME = "leukocyte" CONFIG = { - "dir": - "openmp/leukocyte", + "dir": "openmp/leukocyte", "src": { NAME: [ - "./meschach_lib/memstat.c", "./meschach_lib/meminfo.c", - "./meschach_lib/version.c", "./meschach_lib/ivecop.c", - "./meschach_lib/matlab.c", "./meschach_lib/machine.c", - "./meschach_lib/otherio.c", "./meschach_lib/init.c", - "./meschach_lib/submat.c", "./meschach_lib/pxop.c", - "./meschach_lib/matop.c", "./meschach_lib/vecop.c", - "./meschach_lib/memory.c", "./meschach_lib/matrixio.c", - "./meschach_lib/err.c", "./meschach_lib/copy.c", - "./meschach_lib/bdfactor.c", "./meschach_lib/mfunc.c", - "./meschach_lib/fft.c", "./meschach_lib/svd.c", - "./meschach_lib/schur.c", "./meschach_lib/symmeig.c", - "./meschach_lib/hessen.c", "./meschach_lib/norm.c", - "./meschach_lib/update.c", "./meschach_lib/givens.c", - "./meschach_lib/hsehldr.c", "./meschach_lib/solve.c", - "./meschach_lib/qrfactor.c", "./meschach_lib/chfactor.c", - "./meschach_lib/bkpfacto.c", "./meschach_lib/lufactor.c", - "./meschach_lib/iternsym.c", "./meschach_lib/itersym.c", - "./meschach_lib/iter0.c", "./meschach_lib/spswap.c", - "./meschach_lib/spbkp.c", "./meschach_lib/splufctr.c", - "./meschach_lib/spchfctr.c", "./meschach_lib/sparseio.c", - "./meschach_lib/sprow.c", "./meschach_lib/sparse.c", - "./meschach_lib/zfunc.c", "./meschach_lib/znorm.c", - "./meschach_lib/zmatop.c", "./meschach_lib/zvecop.c", - "./meschach_lib/zmemory.c", "./meschach_lib/zmatio.c", - "./meschach_lib/zcopy.c", "./meschach_lib/zmachine.c", - "./meschach_lib/zschur.c", "./meschach_lib/zhessen.c", - "./meschach_lib/zgivens.c", "./meschach_lib/zqrfctr.c", - "./meschach_lib/zhsehldr.c", "./meschach_lib/zmatlab.c", - "./meschach_lib/zsolve.c", "./meschach_lib/zlufctr.c", - "./OpenMP/detect_main.c", "./OpenMP/misc_math.c", - "./OpenMP/track_ellipse.c", "./OpenMP/find_ellipse.c", - "./OpenMP/avilib.c" + "./meschach_lib/memstat.c", + "./meschach_lib/meminfo.c", + "./meschach_lib/version.c", + "./meschach_lib/ivecop.c", + "./meschach_lib/matlab.c", + "./meschach_lib/machine.c", + "./meschach_lib/otherio.c", + "./meschach_lib/init.c", + "./meschach_lib/submat.c", + "./meschach_lib/pxop.c", + "./meschach_lib/matop.c", + "./meschach_lib/vecop.c", + "./meschach_lib/memory.c", + "./meschach_lib/matrixio.c", + "./meschach_lib/err.c", + "./meschach_lib/copy.c", + "./meschach_lib/bdfactor.c", + "./meschach_lib/mfunc.c", + "./meschach_lib/fft.c", + "./meschach_lib/svd.c", + "./meschach_lib/schur.c", + "./meschach_lib/symmeig.c", + "./meschach_lib/hessen.c", + "./meschach_lib/norm.c", + "./meschach_lib/update.c", + "./meschach_lib/givens.c", + "./meschach_lib/hsehldr.c", + "./meschach_lib/solve.c", + "./meschach_lib/qrfactor.c", + "./meschach_lib/chfactor.c", + "./meschach_lib/bkpfacto.c", + "./meschach_lib/lufactor.c", + "./meschach_lib/iternsym.c", + "./meschach_lib/itersym.c", + "./meschach_lib/iter0.c", + "./meschach_lib/spswap.c", + "./meschach_lib/spbkp.c", + "./meschach_lib/splufctr.c", + "./meschach_lib/spchfctr.c", + "./meschach_lib/sparseio.c", + "./meschach_lib/sprow.c", + "./meschach_lib/sparse.c", + "./meschach_lib/zfunc.c", + "./meschach_lib/znorm.c", + "./meschach_lib/zmatop.c", + "./meschach_lib/zvecop.c", + "./meschach_lib/zmemory.c", + "./meschach_lib/zmatio.c", + "./meschach_lib/zcopy.c", + "./meschach_lib/zmachine.c", + "./meschach_lib/zschur.c", + "./meschach_lib/zhessen.c", + "./meschach_lib/zgivens.c", + "./meschach_lib/zqrfctr.c", + "./meschach_lib/zhsehldr.c", + "./meschach_lib/zmatlab.c", + "./meschach_lib/zsolve.c", + "./meschach_lib/zlufctr.c", + "./OpenMP/detect_main.c", + "./OpenMP/misc_math.c", + "./OpenMP/track_ellipse.c", + "./OpenMP/find_ellipse.c", + "./OpenMP/avilib.c", ] }, "flags": [ - "-DSPARSE", "-DCOMPLEX", "-DREAL_FLT", "-DREAL_DBL", - "-I./meschach_lib", "-lm", "-lpthread", "-fopenmp" - ] + "-DSPARSE", + "-DCOMPLEX", + "-DREAL_FLT", + "-DREAL_DBL", + "-I./meschach_lib", + "-lm", + "-lpthread", + "-fopenmp", + ], } class LUD(RodiniaGroup): - NAME = 'lud' + NAME = "lud" CONFIG = { "dir": "openmp/lud", "src": { - "./omp/lud_omp": [ - "./common/common.c", "./omp/lud_omp.c", "./omp/lud.c" - ] + "./omp/lud_omp": ["./common/common.c", "./omp/lud_omp.c", "./omp/lud.c"] }, - "flags": ["-I./common", "-lm", "-fopenmp"] + "flags": ["-I./common", "-lm", "-fopenmp"], } class Myocyte(RodiniaGroup): - NAME = 'myocyte' + NAME = "myocyte" CONFIG = { "dir": "openmp/myocyte", - "src": { - "./myocyte.out": ["main.c"] - }, - "flags": ["-lm", "-fopenmp"] + "src": {"./myocyte.out": ["main.c"]}, + "flags": ["-lm", "-fopenmp"], } class NN(RodiniaGroup): - NAME = 'nn' + NAME = "nn" CONFIG = { "dir": "openmp/nn", - "src": { - NAME: ["./nn_openmp.c"] - }, - "flags": ["-lm", "-fopenmp"] + "src": {NAME: ["./nn_openmp.c"]}, + "flags": ["-lm", "-fopenmp"], } class NW(RodiniaGroup): - NAME = 'nw' + NAME = "nw" CONFIG = { "dir": "openmp/nw", - "src": { - "needle": ["./needle.cpp"] - }, - "flags": ["-lm", "-fopenmp"] + "src": {"needle": ["./needle.cpp"]}, + "flags": ["-lm", "-fopenmp"], } @staticmethod @@ -285,24 +312,20 @@ def select_compiler(_, cc): class ParticleFilter(RodiniaGroup): - NAME = 'particlefilter' + NAME = "particlefilter" CONFIG = { "dir": "openmp/particlefilter", - "src": { - "particle_filter": ["./ex_particle_OPENMP_seq.c"] - }, - "flags": ["-lm", "-fopenmp"] + "src": {"particle_filter": ["./ex_particle_OPENMP_seq.c"]}, + "flags": ["-lm", "-fopenmp"], } class PathFinder(RodiniaGroup): - NAME = 'pathfinder' + NAME = "pathfinder" CONFIG = { "dir": "openmp/pathfinder", - "src": { - "pathfinder": ["./pathfinder.cpp"] - }, - "flags": ["-fopenmp"] + "src": {"pathfinder": ["./pathfinder.cpp"]}, + "flags": ["-fopenmp"], } @staticmethod @@ -311,24 +334,20 @@ def select_compiler(_, cc): class SRAD1(RodiniaGroup): - NAME = 'srad-1' + NAME = "srad-1" CONFIG = { "dir": "openmp/srad/srad_v1", - "src": { - "srad": ["./main.c"] - }, - "flags": ["-I.", "-lm", "-fopenmp"] + "src": {"srad": ["./main.c"]}, + "flags": ["-I.", "-lm", "-fopenmp"], } class SRAD2(RodiniaGroup): - NAME = 'srad-2' + NAME = "srad-2" CONFIG = { "dir": "openmp/srad/srad_v2", - "src": { - "srad": ["./srad.cpp"] - }, - "flags": ["-lm", "-fopenmp"] + "src": {"srad": ["./srad.cpp"]}, + "flags": ["-lm", "-fopenmp"], } @staticmethod @@ -337,13 +356,11 @@ def select_compiler(_, cc): class StreamCluster(RodiniaGroup): - NAME = 'streamcluster' + NAME = "streamcluster" CONFIG = { "dir": "openmp/streamcluster", - "src": { - "./sc_omp": ["./streamcluster_omp.cpp"] - }, - "flags": ["-lpthread", "-fopenmp"] + "src": {"./sc_omp": ["./streamcluster_omp.cpp"]}, + "flags": ["-lpthread", "-fopenmp"], } @staticmethod diff --git a/benchbuild/projects/apollo/scimark.py b/benchbuild/projects/apollo/scimark.py index 29449f7e7..a7825afdd 100644 --- a/benchbuild/projects/apollo/scimark.py +++ b/benchbuild/projects/apollo/scimark.py @@ -10,22 +10,22 @@ class SciMark(bb.Project): """SciMark""" - NAME = 'scimark' - DOMAIN = 'scientific' - GROUP = 'apollo' + NAME = "scimark" + DOMAIN = "scientific" + GROUP = "apollo" SOURCE = [ HTTP( - remote={'2.1c': 'http://math.nist.gov/scimark2/scimark2_1c.zip'}, - local='scimark.zip' + remote={"2.1c": "http://math.nist.gov/scimark2/scimark2_1c.zip"}, + local="scimark.zip", ) ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") WORKLOADS = {WorkloadSet(): [Command(SourceRoot(".") / "scimark2")]} def compile(self): - scimark_source = local.path(self.source_of('scimark.zip')) + scimark_source = local.path(self.source_of("scimark.zip")) clang = bb.compiler.cc(self) _clang = bb.watch(clang) unzip(local.cwd / scimark_source) diff --git a/benchbuild/projects/benchbuild/bots.py b/benchbuild/projects/benchbuild/bots.py index cedcf880e..1c5e0f8b2 100644 --- a/benchbuild/projects/benchbuild/bots.py +++ b/benchbuild/projects/benchbuild/bots.py @@ -24,13 +24,15 @@ class BOTSGroup(bb.Project): Strassen: Computes a matrix multiply with Strassen's method. """ - DOMAIN = 'bots' - GROUP = 'bots' + DOMAIN = "bots" + GROUP = "bots" SOURCE = [ - Git(remote='https://github.com/bsc-pm/bots', - local='bots.git', + Git( + remote="https://github.com/bsc-pm/bots", + local="bots.git", limit=5, - refspec='HEAD') + refspec="HEAD", + ) ] path_dict = { @@ -44,7 +46,7 @@ class BOTSGroup(bb.Project): "sort": "serial/sort", "sparselu": "serial/sparselu", "strassen": "serial/strassen", - "uts": "serial/uts" + "uts": "serial/uts", } input_dict = { @@ -52,23 +54,35 @@ class BOTSGroup(bb.Project): "floorplan": ["input.15", "input.20", "input.5"], "health": ["large.input", "medium.input", "small.input", "test.input"], "knapsack": [ - "knapsack-012.input", "knapsack-016.input", "knapsack-020.input", - "knapsack-024.input", "knapsack-032.input", "knapsack-036.input", - "knapsack-040.input", "knapsack-044.input", "knapsack-048.input", - "knapsack-064.input", "knapsack-096.input", "knapsack-128.input" + "knapsack-012.input", + "knapsack-016.input", + "knapsack-020.input", + "knapsack-024.input", + "knapsack-032.input", + "knapsack-036.input", + "knapsack-040.input", + "knapsack-044.input", + "knapsack-048.input", + "knapsack-064.input", + "knapsack-096.input", + "knapsack-128.input", ], "uts": [ - "huge.input", "large.input", "medium.input", "small.input", - "test.input", "tiny.input" - ] + "huge.input", + "large.input", + "medium.input", + "small.input", + "test.input", + "tiny.input", + ], } def compile(self): - bots_repo = local.path(self.source_of('bots.git')) + bots_repo = local.path(self.source_of("bots.git")) makefile_config = bots_repo / "config" / "make.config" clang = bb.compiler.cc(self) - with open(makefile_config, 'w') as config: + with open(makefile_config, "w") as config: lines = [ "LABEL=benchbuild", "ENABLE_OMPSS=", @@ -97,7 +111,7 @@ def compile(self): def run_tests(self): binary_name = "{name}.benchbuild.serial".format(name=self.name) - bots_repo = local.path(self.source_of('bots.git')) + bots_repo = local.path(self.source_of("bots.git")) binary_path = bots_repo / "bin" / binary_name exp = bb.wrap(binary_path, self) _exp = bb.watch(exp) @@ -111,44 +125,44 @@ def run_tests(self): class Alignment(BOTSGroup): - NAME = 'alignment' + NAME = "alignment" class FFT(BOTSGroup): - NAME = 'fft' + NAME = "fft" class Fib(BOTSGroup): - NAME = 'fib' + NAME = "fib" class FloorPlan(BOTSGroup): - NAME = 'floorplan' + NAME = "floorplan" class Health(BOTSGroup): - NAME = 'health' + NAME = "health" class Knapsack(BOTSGroup): - NAME = 'knapsack' + NAME = "knapsack" class NQueens(BOTSGroup): - NAME = 'nqueens' + NAME = "nqueens" class Sort(BOTSGroup): - NAME = 'sort' + NAME = "sort" class SparseLU(BOTSGroup): - NAME = 'sparselu' + NAME = "sparselu" class Strassen(BOTSGroup): - NAME = 'strassen' + NAME = "strassen" class UTS(BOTSGroup): - NAME = 'uts' + NAME = "uts" diff --git a/benchbuild/projects/benchbuild/bzip2.py b/benchbuild/projects/benchbuild/bzip2.py index 195471bcb..e669beb20 100644 --- a/benchbuild/projects/benchbuild/bzip2.py +++ b/benchbuild/projects/benchbuild/bzip2.py @@ -26,8 +26,7 @@ class Bzip2(bb.Project): ), ] - CONTAINER = ContainerImage().from_("benchbuild:alpine" - ).run("apk", "add", "make") + CONTAINER = ContainerImage().from_("benchbuild:alpine").run("apk", "add", "make") # yapf: disable WORKLOADS = { WorkloadSet("compression"): [ diff --git a/benchbuild/projects/benchbuild/ccrypt.py b/benchbuild/projects/benchbuild/ccrypt.py index 9d251400d..25f57a538 100644 --- a/benchbuild/projects/benchbuild/ccrypt.py +++ b/benchbuild/projects/benchbuild/ccrypt.py @@ -7,27 +7,26 @@ class Ccrypt(bb.Project): - """ ccrypt benchmark """ + """ccrypt benchmark""" - NAME = 'ccrypt' - DOMAIN = 'encryption' - GROUP = 'benchbuild' + NAME = "ccrypt" + DOMAIN = "encryption" + GROUP = "benchbuild" SOURCE = [ HTTP( remote={ - '1.10': - "http://ccrypt.sourceforge.net/download/ccrypt-1.10.tar.gz" + "1.10": "http://ccrypt.sourceforge.net/download/ccrypt-1.10.tar.gz" }, - local='ccrypt.tar.gz' + local="ccrypt.tar.gz", ) ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def compile(self): - ccrypt_source = local.path(self.source_of('ccrypt.tar.gz')) - ccrypt_version = self.version_of('ccrypt.tar.gz') - tar('xfz', ccrypt_source) - unpack_dir = f'ccrypt-{ccrypt_version}' + ccrypt_source = local.path(self.source_of("ccrypt.tar.gz")) + ccrypt_version = self.version_of("ccrypt.tar.gz") + tar("xfz", ccrypt_source) + unpack_dir = f"ccrypt-{ccrypt_version}" clang = bb.compiler.cc(self) clang_cxx = bb.compiler.cxx(self) @@ -41,8 +40,8 @@ def compile(self): _make("check") def run_tests(self): - ccrypt_version = self.version_of('ccrypt.tar.gz') - unpack_dir = f'ccrypt-{ccrypt_version}' + ccrypt_version = self.version_of("ccrypt.tar.gz") + unpack_dir = f"ccrypt-{ccrypt_version}" with local.cwd(unpack_dir): bb.wrap(local.path("src") / self.name, self) bb.wrap(local.path("check") / "crypt3-check", self) diff --git a/benchbuild/projects/benchbuild/crafty.py b/benchbuild/projects/benchbuild/crafty.py index f8af912f1..435c951e4 100644 --- a/benchbuild/projects/benchbuild/crafty.py +++ b/benchbuild/projects/benchbuild/crafty.py @@ -7,40 +7,34 @@ class Crafty(bb.Project): - """ crafty benchmark """ + """crafty benchmark""" - NAME = 'crafty' - DOMAIN = 'scientific' - GROUP = 'benchbuild' + NAME = "crafty" + DOMAIN = "scientific" + GROUP = "benchbuild" SOURCE = [ HTTP( remote={ - '25.2': ( - 'http://www.craftychess.com/downloads/source/' - 'crafty-25.2.zip' - ) + "25.2": ("http://www.craftychess.com/downloads/source/crafty-25.2.zip") }, - local='crafty.zip' + local="crafty.zip", ), HTTP( - remote={ - '1.0': 'http://www.craftychess.com/downloads/book/book.bin' - }, - local='book.bin' + remote={"1.0": "http://www.craftychess.com/downloads/book/book.bin"}, + local="book.bin", ), HTTP( remote={ - '2016-11-crafty.tar.gz': - 'http://lairosiel.de/dist/2016-11-crafty.tar.gz' + "2016-11-crafty.tar.gz": "http://lairosiel.de/dist/2016-11-crafty.tar.gz" }, - local='inputs.tar.gz' - ) + local="inputs.tar.gz", + ), ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def compile(self): - crafty_source = local.path(self.source_of('crafty.zip')) - book_source = local.path(self.source_of('inputs.tar.gz')) + crafty_source = local.path(self.source_of("crafty.zip")) + book_source = local.path(self.source_of("inputs.tar.gz")) unpack_dir = "crafty.src" mkdir(unpack_dir) @@ -54,13 +48,15 @@ def compile(self): target_opts = ["-DCPUS=1", "-DSYZYGY", "-DTEST"] _make = bb.watch(make) _make( - "target=UNIX", "CC=" + str(clang), - "opt=" + " ".join(target_opts), "crafty-make" + "target=UNIX", + "CC=" + str(clang), + "opt=" + " ".join(target_opts), + "crafty-make", ) def run_tests(self): - unpack_dir = local.path('crafty.src') - test_source = local.path(self.source_of('inputs.tar.gz')) + unpack_dir = local.path("crafty.src") + test_source = local.path(self.source_of("inputs.tar.gz")) with local.cwd(unpack_dir): crafty = bb.wrap("./crafty", self) diff --git a/benchbuild/projects/benchbuild/crocopat.py b/benchbuild/projects/benchbuild/crocopat.py index d9bcc63e5..7d9b41f8a 100644 --- a/benchbuild/projects/benchbuild/crocopat.py +++ b/benchbuild/projects/benchbuild/crocopat.py @@ -7,48 +7,41 @@ class Crocopat(bb.Project): - """ crocopat benchmark """ + """crocopat benchmark""" - NAME = 'crocopat' - DOMAIN = 'verification' - GROUP = 'benchbuild' + NAME = "crocopat" + DOMAIN = "verification" + GROUP = "benchbuild" SOURCE = [ HTTP( - remote={ - '2.1.4': - 'http://crocopat.googlecode.com/files/crocopat-2.1.4.zip' - }, - local='crocopat.zip' + remote={"2.1.4": "http://crocopat.googlecode.com/files/crocopat-2.1.4.zip"}, + local="crocopat.zip", ), HTTP( - remote={ - '2014-10': 'http://lairosiel.de/dist/2014-10-crocopat.tar.gz' - }, - local='inputs.tar.gz' - ) + remote={"2014-10": "http://lairosiel.de/dist/2014-10-crocopat.tar.gz"}, + local="inputs.tar.gz", + ), ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def run_tests(self): - crocopat = bb.wrap('crocopat', self) - test_source = self.source_of('inputs.tar.gz') - tar('xf', test_source) + crocopat = bb.wrap("crocopat", self) + test_source = self.source_of("inputs.tar.gz") + tar("xf", test_source) - test_dir = local.path('./crocopat/') + test_dir = local.path("./crocopat/") programs = test_dir / "programs" // "*.rml" projects = test_dir / "projects" // "*.rsf" for program in programs: for _project in projects: - _crocopat_project = bb.watch( - (cat[_project] | crocopat[program]) - ) + _crocopat_project = bb.watch((cat[_project] | crocopat[program])) _crocopat_project(retcode=None) def compile(self): - crocopat_source = local.path(self.source_of('crocopat.zip')) - crocopat_version = self.version_of('crocopat.zip') + crocopat_source = local.path(self.source_of("crocopat.zip")) + crocopat_version = self.version_of("crocopat.zip") unzip(crocopat_source) - unpack_dir = f'crocopat-{crocopat_version}' + unpack_dir = f"crocopat-{crocopat_version}" crocopat_dir = local.path(unpack_dir) / "src" self.cflags += ["-I.", "-ansi"] diff --git a/benchbuild/projects/benchbuild/ffmpeg.py b/benchbuild/projects/benchbuild/ffmpeg.py index 1f8207a25..ecc32c719 100644 --- a/benchbuild/projects/benchbuild/ffmpeg.py +++ b/benchbuild/projects/benchbuild/ffmpeg.py @@ -9,24 +9,25 @@ class LibAV(bb.Project): - """ LibAV benchmark """ - NAME = 'ffmpeg' - DOMAIN = 'multimedia' - GROUP = 'benchbuild' + """LibAV benchmark""" + + NAME = "ffmpeg" + DOMAIN = "multimedia" + GROUP = "benchbuild" SOURCE = [ HTTP( - remote={'3.1.3': 'http://ffmpeg.org/releases/ffmpeg-3.1.3.tar.bz2'}, - local='ffmpeg.tar.bz2' + remote={"3.1.3": "http://ffmpeg.org/releases/ffmpeg-3.1.3.tar.bz2"}, + local="ffmpeg.tar.bz2", ) ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") fate_dir = "fate-samples" fate_uri = "rsync://fate-suite.libav.org/fate-suite/" def run_tests(self): - ffmpeg_version = self.version_of('ffmpeg.tar.bz2') - unpack_dir = local.path(f'ffmpeg-{ffmpeg_version}') + ffmpeg_version = self.version_of("ffmpeg.tar.bz2") + unpack_dir = local.path(f"ffmpeg-{ffmpeg_version}") with local.cwd(unpack_dir): bb.wrap(self.name, self) @@ -34,10 +35,10 @@ def run_tests(self): _make("V=1", "-i", "fate") def compile(self): - ffmpeg_source = local.path(self.source_of('ffmpeg.tar.bz2')) - ffmpeg_version = self.version_of('ffmpeg.tar.bz2') - tar('xfj', ffmpeg_source) - unpack_dir = local.path(f'ffmpeg-{ffmpeg_version}') + ffmpeg_source = local.path(self.source_of("ffmpeg.tar.bz2")) + ffmpeg_version = self.version_of("ffmpeg.tar.bz2") + tar("xfj", ffmpeg_source) + unpack_dir = local.path(f"ffmpeg-{ffmpeg_version}") clang = bb.compiler.cc(self) with local.cwd(unpack_dir): @@ -47,9 +48,10 @@ def compile(self): _make = bb.watch(make) _configure( - "--disable-shared", "--cc=" + str(clang), + "--disable-shared", + "--cc=" + str(clang), "--extra-ldflags=" + " ".join(self.ldflags), - "--samples=" + self.fate_dir + "--samples=" + self.fate_dir, ) _make("clean") _make("-j{0}".format(str(get_number_of_jobs(CFG))), "all") diff --git a/benchbuild/projects/benchbuild/gzip.py b/benchbuild/projects/benchbuild/gzip.py index 16d92c2ab..82b222ed9 100644 --- a/benchbuild/projects/benchbuild/gzip.py +++ b/benchbuild/projects/benchbuild/gzip.py @@ -9,26 +9,26 @@ class Gzip(bb.Project): - """ Gzip """ + """Gzip""" - NAME = 'gzip' - DOMAIN = 'compression' - GROUP = 'benchbuild' + NAME = "gzip" + DOMAIN = "compression" + GROUP = "benchbuild" SOURCE = [ HTTP( - remote={'1.6': 'http://ftpmirror.gnu.org/gzip/gzip-1.6.tar.xz'}, - local='gzip.tar.xz' + remote={"1.6": "http://ftpmirror.gnu.org/gzip/gzip-1.6.tar.xz"}, + local="gzip.tar.xz", ), HTTP( - remote={'1.0': 'http://lairosiel.de/dist/compression.tar.gz'}, - local='compression.tar.gz' - ) + remote={"1.0": "http://lairosiel.de/dist/compression.tar.gz"}, + local="compression.tar.gz", + ), ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def compression_test(self): - gzip_version = self.version_of('gzip.tar.xz') - unpack_dir = local.path(f'gzip-{gzip_version}.tar.xz') + gzip_version = self.version_of("gzip.tar.xz") + unpack_dir = local.path(f"gzip-{gzip_version}.tar.xz") _gzip = bb.watch(bb.wrap(unpack_dir / "gzip", self)) # Compress @@ -46,13 +46,13 @@ def compression_test(self): _gzip("-f", "-k", "--decompress", "compression/liberty.jpg.gz") def compile_project(self): - gzip_source = local.path(self.source_of('gzip.tar.xz')) - compression_source = local.path(self.source_of('compression.tar.gz')) + gzip_source = local.path(self.source_of("gzip.tar.xz")) + compression_source = local.path(self.source_of("compression.tar.gz")) - tar('xfJ', gzip_source) - tar('xf', compression_source) + tar("xfJ", gzip_source) + tar("xf", compression_source) - gzip_version = self.version_of('gzip.tar.xz') + gzip_version = self.version_of("gzip.tar.xz") unpack_dir = "gzip-{0}.tar.xz".format(gzip_version) clang = bb.compiler.cc(self) @@ -60,8 +60,9 @@ def compile_project(self): _configure = bb.watch(local["./configure"]) with local.env(CC=str(clang)): _configure( - "--disable-dependency-tracking", "--disable-silent-rules", - "--with-gnu-ld" + "--disable-dependency-tracking", + "--disable-silent-rules", + "--with-gnu-ld", ) _make = bb.watch(make) _make("-j" + str(get_number_of_jobs(CFG)), "clean", "all") diff --git a/benchbuild/projects/benchbuild/js.py b/benchbuild/projects/benchbuild/js.py index a6faf008b..2bf71f09e 100644 --- a/benchbuild/projects/benchbuild/js.py +++ b/benchbuild/projects/benchbuild/js.py @@ -15,21 +15,21 @@ class SpiderMonkey(bb.Project): SpiderMonkey requires a legacy version of autoconf: autoconf-2.13 """ - NAME = 'js' - DOMAIN = 'compilation' - GROUP = 'benchbuild' + NAME = "js" + DOMAIN = "compilation" + GROUP = "benchbuild" SOURCE = [ Git( - remote='https://github.com/mozilla/gecko-dev.git', - local='gecko-dev.git', + remote="https://github.com/mozilla/gecko-dev.git", + local="gecko-dev.git", limit=5, - refspec='HEAD' + refspec="HEAD", ) ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def compile(self): - gecko_repo = local.path(self.source_of('gecko-dev.git')) + gecko_repo = local.path(self.source_of("gecko-dev.git")) js_dir = gecko_repo / "js" / "src" clang = bb.compiler.cc(self) @@ -40,7 +40,7 @@ def compile(self): DIST=self.builddir, MOZJS_MAJOR_VERSION=0, MOZJS_MINOR_VERSION=0, - MOZJS_PATCH_VERSION=0 + MOZJS_PATCH_VERSION=0, ): make_src_pkg() @@ -55,7 +55,7 @@ def compile(self): with local.env(CC=str(clang), CXX=str(clang_cxx)): configure = local["../configure"] _configure = bb.watch(configure) - _configure('--without-system-zlib') + _configure("--without-system-zlib") mozjs_obj_dir = mozjs_src_dir / "obj" with local.cwd(mozjs_obj_dir): diff --git a/benchbuild/projects/benchbuild/lammps.py b/benchbuild/projects/benchbuild/lammps.py index a573e3d0b..e461e258c 100644 --- a/benchbuild/projects/benchbuild/lammps.py +++ b/benchbuild/projects/benchbuild/lammps.py @@ -7,24 +7,24 @@ class Lammps(bb.Project): - """ LAMMPS benchmark """ + """LAMMPS benchmark""" - NAME = 'lammps' - DOMAIN = 'scientific' - GROUP = 'benchbuild' + NAME = "lammps" + DOMAIN = "scientific" + GROUP = "benchbuild" SOURCE = [ Git( - remote='https://github.com/lammps/lammps', - local='lammps.git', + remote="https://github.com/lammps/lammps", + local="lammps.git", limit=5, - refspec='HEAD' + refspec="HEAD", ) ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def run_tests(self): - lammps_repo = local.path(self.source_of('lammps.git')) - src = lammps_repo / 'src' + lammps_repo = local.path(self.source_of("lammps.git")) + src = lammps_repo / "src" examples = lammps_repo / "examples" lmp_serial = bb.wrap(src / "lmp_serial", self) @@ -37,14 +37,11 @@ def run_tests(self): _lmp_serial(retcode=None) def compile(self): - lammps_repo = local.path(self.source_of('lammps.git')) - src = lammps_repo / 'src' + lammps_repo = local.path(self.source_of("lammps.git")) + src = lammps_repo / "src" self.ldflags += ["-lgomp"] clang_cxx = bb.compiler.cxx(self) with local.cwd(src): _make = bb.watch(make) - _make( - "CC=" + str(clang_cxx), "LINK=" + str(clang_cxx), "clean", - "serial" - ) + _make("CC=" + str(clang_cxx), "LINK=" + str(clang_cxx), "clean", "serial") diff --git a/benchbuild/projects/benchbuild/lapack.py b/benchbuild/projects/benchbuild/lapack.py index c4b675648..007ea5ecc 100644 --- a/benchbuild/projects/benchbuild/lapack.py +++ b/benchbuild/projects/benchbuild/lapack.py @@ -11,21 +11,21 @@ class OpenBlas(bb.Project): - NAME = 'openblas' - DOMAIN = 'scientific' - GROUP = 'benchbuild' + NAME = "openblas" + DOMAIN = "scientific" + GROUP = "benchbuild" SOURCE = [ Git( - remote='https://github.com/xianyi/OpenBLAS', - local='OpenBLAS', + remote="https://github.com/xianyi/OpenBLAS", + local="OpenBLAS", limit=5, - refspec='HEAD' + refspec="HEAD", ) ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def compile(self): - openblas_repo = local.path(self.source_of('OpenBLAS')) + openblas_repo = local.path(self.source_of("OpenBLAS")) clang = bb.compiler.cc(self) with local.cwd(openblas_repo): _make = bb.watch(make) @@ -33,24 +33,24 @@ def compile(self): def run_tests(self): log = logging.getLogger(__name__) - log.warning('Not implemented') + log.warning("Not implemented") class Lapack(bb.Project): - NAME = 'lapack' - DOMAIN = 'scientific' - GROUP = 'benchbuild' + NAME = "lapack" + DOMAIN = "scientific" + GROUP = "benchbuild" SOURCE = [ HTTP( - remote={'3.2.1': 'http://www.netlib.org/clapack/clapack.tgz'}, - local='clapack.tgz' + remote={"3.2.1": "http://www.netlib.org/clapack/clapack.tgz"}, + local="clapack.tgz", ) ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def compile(self): - clapack_source = local.path(self.source_of('clapack.tgz')) - clapack_version = self.version_of('clapack.tgz') + clapack_source = local.path(self.source_of("clapack.tgz")) + clapack_version = self.version_of("clapack.tgz") tar("xfz", clapack_source) unpack_dir = "CLAPACK-{0}".format(clapack_version) @@ -58,23 +58,30 @@ def compile(self): clang = bb.compiler.cc(self) clang_cxx = bb.compiler.cxx(self) with local.cwd(unpack_dir): - with open("make.inc", 'w') as makefile: + with open("make.inc", "w") as makefile: content = [ - "SHELL = /bin/sh\n", "PLAT = _LINUX\n", - "CC = " + str(clang) + "\n", "CXX = " + - str(clang_cxx) + "\n", "CFLAGS = -I$(TOPDIR)/INCLUDE\n", - "LOADER = " + str(clang) + "\n", "LOADOPTS = \n", + "SHELL = /bin/sh\n", + "PLAT = _LINUX\n", + "CC = " + str(clang) + "\n", + "CXX = " + str(clang_cxx) + "\n", + "CFLAGS = -I$(TOPDIR)/INCLUDE\n", + "LOADER = " + str(clang) + "\n", + "LOADOPTS = \n", "NOOPT = -O0 -I$(TOPDIR)/INCLUDE\n", - "DRVCFLAGS = $(CFLAGS)\n", "F2CCFLAGS = $(CFLAGS)\n", - "TIMER = INT_CPU_TIME\n", "ARCH = ar\n", - "ARCHFLAGS = cr\n", "RANLIB = ranlib\n", - "BLASLIB = ../../blas$(PLAT).a\n", "XBLASLIB = \n", + "DRVCFLAGS = $(CFLAGS)\n", + "F2CCFLAGS = $(CFLAGS)\n", + "TIMER = INT_CPU_TIME\n", + "ARCH = ar\n", + "ARCHFLAGS = cr\n", + "RANLIB = ranlib\n", + "BLASLIB = ../../blas$(PLAT).a\n", + "XBLASLIB = \n", "LAPACKLIB = lapack$(PLAT).a\n", "F2CLIB = ../../F2CLIBS/libf2c.a\n", "TMGLIB = tmglib$(PLAT).a\n", "EIGSRCLIB = eigsrc$(PLAT).a\n", "LINSRCLIB = linsrc$(PLAT).a\n", - "F2CLIB = ../../F2CLIBS/libf2c.a\n" + "F2CLIB = ../../F2CLIBS/libf2c.a\n", ] makefile.writelines(content) @@ -85,7 +92,7 @@ def compile(self): _make("-j", get_number_of_jobs(CFG), "-f", "Makeblat3") def run_tests(self): - clapack_version = self.version_of('clapack.tgz') + clapack_version = self.version_of("clapack.tgz") unpack_dir = local.path("CLAPACK-{0}".format(clapack_version)) with local.cwd(unpack_dir / "BLAS"): xblat2s = bb.wrap("xblat2s", self) diff --git a/benchbuild/projects/benchbuild/leveldb.py b/benchbuild/projects/benchbuild/leveldb.py index 4559ae6eb..236d51ed7 100644 --- a/benchbuild/projects/benchbuild/leveldb.py +++ b/benchbuild/projects/benchbuild/leveldb.py @@ -9,21 +9,21 @@ class LevelDB(bb.Project): - NAME = 'leveldb' - DOMAIN = 'database' - GROUP = 'benchbuild' + NAME = "leveldb" + DOMAIN = "database" + GROUP = "benchbuild" SOURCE = [ Git( - remote='https://github.com/google/leveldb', - local='leveldb.src', + remote="https://github.com/google/leveldb", + local="leveldb.src", limit=5, - refspec='HEAD' + refspec="HEAD", ) ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def compile(self): - leveldb_repo = local.path(self.source_of('leveldb.src')) + leveldb_repo = local.path(self.source_of("leveldb.src")) clang = bb.compiler.cc(self) clang_cxx = bb.compiler.cxx(self) @@ -41,12 +41,13 @@ def run_tests(self): Args: experiment: The experiment's run function. """ - leveldb_repo = local.path(self.source_of('leveldb.src')) + leveldb_repo = local.path(self.source_of("leveldb.src")) leveldb = bb.wrap(leveldb_repo / "out-static" / "db_bench", self) _leveldb = bb.watch(leveldb) with local.env( - LD_LIBRARY_PATH="{}:{}". - format(leveldb_repo / "out-shared", getenv("LD_LIBRARY_PATH", "")) + LD_LIBRARY_PATH="{}:{}".format( + leveldb_repo / "out-shared", getenv("LD_LIBRARY_PATH", "") + ) ): _leveldb() diff --git a/benchbuild/projects/benchbuild/linpack.py b/benchbuild/projects/benchbuild/linpack.py index d0ec1403b..646ad0853 100644 --- a/benchbuild/projects/benchbuild/linpack.py +++ b/benchbuild/projects/benchbuild/linpack.py @@ -11,18 +11,18 @@ class Linpack(bb.Project): - """ Linpack (C-Version) """ + """Linpack (C-Version)""" - NAME = 'linpack' - DOMAIN = 'scientific' - GROUP = 'benchbuild' + NAME = "linpack" + DOMAIN = "scientific" + GROUP = "benchbuild" SOURCE = [ HTTP( - remote={'5_88': 'http://www.netlib.org/benchmark/linpackc.new'}, - local='linpack.c' + remote={"5_88": "http://www.netlib.org/benchmark/linpackc.new"}, + local="linpack.c", ) ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") WORKLOADS = {WorkloadSet(): [Command(SourceRoot(".") / "linpack")]} @@ -33,4 +33,4 @@ def compile(self) -> None: self.ldflags += ["-lm"] clang = bb.compiler.cc(self) _clang = bb.watch(clang) - _clang("-o", 'linpack', "linpack.c") + _clang("-o", "linpack", "linpack.c") diff --git a/benchbuild/projects/benchbuild/lulesh.py b/benchbuild/projects/benchbuild/lulesh.py index 7535623e8..c00a3fdf3 100644 --- a/benchbuild/projects/benchbuild/lulesh.py +++ b/benchbuild/projects/benchbuild/lulesh.py @@ -7,20 +7,20 @@ class Lulesh(bb.Project): - """ LULESH, Serial """ + """LULESH, Serial""" - NAME = 'lulesh' - DOMAIN = 'scientific' - GROUP = 'benchbuild' + NAME = "lulesh" + DOMAIN = "scientific" + GROUP = "benchbuild" SOURCE = [ Git( - remote='https://github.com/LLNL/LULESH/', - local='lulesh.git', + remote="https://github.com/LLNL/LULESH/", + local="lulesh.git", limit=5, - refspec='HEAD' + refspec="HEAD", ) ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") WORKLOADS = { WorkloadSet(): [ Command(SourceRoot("lulesh.git") / "lulesh", "-i", 1), @@ -37,19 +37,19 @@ class Lulesh(bb.Project): Command(SourceRoot("lulesh.git") / "lulesh", "-i", 12), Command(SourceRoot("lulesh.git") / "lulesh", "-i", 13), Command(SourceRoot("lulesh.git") / "lulesh", "-i", 14), - Command(SourceRoot("lulesh.git") / "lulesh", "-i", 15) + Command(SourceRoot("lulesh.git") / "lulesh", "-i", 15), ] } def compile(self): - lulesh_repo = local.path(self.source_of('lulesh.git')) + lulesh_repo = local.path(self.source_of("lulesh.git")) self.cflags += ["-DUSE_MPI=0"] cxx_files = local.cwd / lulesh_repo // "*.cc" clang = bb.compiler.cxx(self) with local.cwd(lulesh_repo): for src_file in cxx_files: - clang("-c", "-o", src_file + '.o', src_file) + clang("-c", "-o", src_file + ".o", src_file) obj_files = local.cwd / lulesh_repo // "*.cc.o" with local.cwd(lulesh_repo): @@ -57,20 +57,20 @@ def compile(self): class LuleshOMP(bb.Project): - """ LULESH, OpenMP """ + """LULESH, OpenMP""" - NAME = 'lulesh-omp' - DOMAIN = 'scientific' - GROUP = 'benchbuild' + NAME = "lulesh-omp" + DOMAIN = "scientific" + GROUP = "benchbuild" SOURCE = [ Git( - remote='https://github.com/LLNL/LULESH/', - local='lulesh.git', + remote="https://github.com/LLNL/LULESH/", + local="lulesh.git", limit=5, - refspec='HEAD' + refspec="HEAD", ) ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") WORKLOADS = { WorkloadSet(): [ Command(SourceRoot("lulesh.git") / "lulesh", "-i", 1), @@ -87,19 +87,19 @@ class LuleshOMP(bb.Project): Command(SourceRoot("lulesh.git") / "lulesh", "-i", 12), Command(SourceRoot("lulesh.git") / "lulesh", "-i", 13), Command(SourceRoot("lulesh.git") / "lulesh", "-i", 14), - Command(SourceRoot("lulesh.git") / "lulesh", "-i", 15) + Command(SourceRoot("lulesh.git") / "lulesh", "-i", 15), ] } def compile(self): - lulesh_repo = local.path(self.source_of('lulesh.git')) - self.cflags = ['-DUSE_MPI=0', '-fopenmp'] + lulesh_repo = local.path(self.source_of("lulesh.git")) + self.cflags = ["-DUSE_MPI=0", "-fopenmp"] cxx_files = local.cwd / lulesh_repo // "*.cc" clang = bb.compiler.cxx(self) with local.cwd(lulesh_repo): for src_file in cxx_files: - clang("-c", "-o", src_file + '.o', src_file) + clang("-c", "-o", src_file + ".o", src_file) obj_files = local.cwd / lulesh_repo // "*.cc.o" with local.cwd(lulesh_repo): diff --git a/benchbuild/projects/benchbuild/mcrypt.py b/benchbuild/projects/benchbuild/mcrypt.py index f5ab2f211..ce9c55663 100644 --- a/benchbuild/projects/benchbuild/mcrypt.py +++ b/benchbuild/projects/benchbuild/mcrypt.py @@ -10,41 +10,41 @@ class MCrypt(bb.Project): - """ MCrypt benchmark """ + """MCrypt benchmark""" - NAME = 'mcrypt' - DOMAIN = 'encryption' - GROUP = 'benchbuild' + NAME = "mcrypt" + DOMAIN = "encryption" + GROUP = "benchbuild" SOURCE = [ HTTP( remote={ - '2.6.8': ( - 'http://sourceforge.net/projects/mcrypt/files/MCrypt/' - '2.6.8/mcrypt-2.6.8.tar.gz' + "2.6.8": ( + "http://sourceforge.net/projects/mcrypt/files/MCrypt/" + "2.6.8/mcrypt-2.6.8.tar.gz" ) }, - local='mcrypt.tar.gz' + local="mcrypt.tar.gz", ), HTTP( remote={ - '2.5.8': ( - 'http://sourceforge.net/projects/mcrypt/files/Libmcrypt/' - '2.5.8/libmcrypt-2.5.8.tar.gz' + "2.5.8": ( + "http://sourceforge.net/projects/mcrypt/files/Libmcrypt/" + "2.5.8/libmcrypt-2.5.8.tar.gz" ) }, - local='libmcrypt.tar.gz' + local="libmcrypt.tar.gz", ), HTTP( remote={ - '0.9.9.9': ( - 'http://sourceforge.net/projects/mhash/files/mhash/' - '0.9.9.9/mhash-0.9.9.9.tar.gz' + "0.9.9.9": ( + "http://sourceforge.net/projects/mhash/files/mhash/" + "0.9.9.9/mhash-0.9.9.9.tar.gz" ) }, - local='mhash.tar.gz' - ) + local="mhash.tar.gz", + ), ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") libmcrypt_dir = "libmcrypt-2.5.8" libmcrypt_file = libmcrypt_dir + ".tar.gz" @@ -53,13 +53,13 @@ class MCrypt(bb.Project): mhash_file = mhash_dir + ".tar.gz" def compile(self): - mcrypt_source = local.path(self.source_of('mcrypt.tar.gz')) - libmcrypt_source = local.path(self.source_of('libmcrypt.tar.gz')) - mhash_source = local.path(self.source_of('mhash.tar.gz')) + mcrypt_source = local.path(self.source_of("mcrypt.tar.gz")) + libmcrypt_source = local.path(self.source_of("libmcrypt.tar.gz")) + mhash_source = local.path(self.source_of("mhash.tar.gz")) - tar('xfz', mcrypt_source) - tar('xfz', libmcrypt_source) - tar('xfz', mhash_source) + tar("xfz", mcrypt_source) + tar("xfz", libmcrypt_source) + tar("xfz", mhash_source) builddir = local.path(self.builddir) mcrypt_dir = builddir / "mcrypt-2.6.8" @@ -96,17 +96,19 @@ def compile(self): mod_env = dict( CC=_cc, CXX=_cxx, - LD_LIBRARY_PATH=path. - list_to_path([str(lib_dir)] + env.get("LD_LIBRARY_PATH", [])), + LD_LIBRARY_PATH=path.list_to_path( + [str(lib_dir)] + env.get("LD_LIBRARY_PATH", []) + ), LDFLAGS="-L" + str(lib_dir), - CFLAGS="-I" + str(inc_dir) + CFLAGS="-I" + str(inc_dir), ) env.update(mod_env) with local.env(**env): _configure( - "--disable-dependency-tracking", "--disable-shared", + "--disable-dependency-tracking", + "--disable-shared", "--with-libmcrypt=" + builddir, - "--with-libmhash=" + builddir + "--with-libmhash=" + builddir, ) _make("-j", get_number_of_jobs(CFG)) diff --git a/benchbuild/projects/benchbuild/minisat.py b/benchbuild/projects/benchbuild/minisat.py index 42c516caf..8c301fa4d 100644 --- a/benchbuild/projects/benchbuild/minisat.py +++ b/benchbuild/projects/benchbuild/minisat.py @@ -7,37 +7,36 @@ class Minisat(bb.Project): - """ minisat benchmark """ + """minisat benchmark""" - NAME = 'minisat' - DOMAIN = 'verification' - GROUP = 'benchbuild' + NAME = "minisat" + DOMAIN = "verification" + GROUP = "benchbuild" SOURCE = [ Git( - remote='https://github.com/niklasso/minisat', - local='minisat.git', + remote="https://github.com/niklasso/minisat", + local="minisat.git", limit=5, - refspec='HEAD' + refspec="HEAD", ), HTTP( remote={ - '2016-11-minisat.tar.gz': - 'http://lairosiel.de/dist/2016-11-minisat.tar.gz' + "2016-11-minisat.tar.gz": "http://lairosiel.de/dist/2016-11-minisat.tar.gz" }, - local='inputs.tar.gz' - ) + local="inputs.tar.gz", + ), ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def run_tests(self): - minisat_repo = local.path(self.source_of('minisat.git')) - minisat_build = minisat_repo / 'build' / 'dynamic' - minisat_lib = minisat_build / 'lib' - minisat_bin = minisat_build / 'bin' + minisat_repo = local.path(self.source_of("minisat.git")) + minisat_build = minisat_repo / "build" / "dynamic" + minisat_lib = minisat_build / "lib" + minisat_bin = minisat_build / "bin" - test_source = local.path(self.source_of('inputs.tar.gz')) - test_dir = local.path('./minisat/') - tar('xf', test_source) + test_source = local.path(self.source_of("inputs.tar.gz")) + test_dir = local.path("./minisat/") + tar("xf", test_source) testfiles = test_dir // "*.cnf.gz" @@ -49,7 +48,7 @@ def run_tests(self): _minisat() def compile(self): - minisat_repo = local.path(self.source_of('minisat.git')) + minisat_repo = local.path(self.source_of("minisat.git")) with local.cwd(minisat_repo): _make = bb.watch(make) _make("config") @@ -57,7 +56,4 @@ def compile(self): clang = bb.compiler.cc(self) clang_cxx = bb.compiler.cxx(self) - _make( - "CC=" + str(clang), "CXX=" + str(clang_cxx), "clean", "lsh", - "sh" - ) + _make("CC=" + str(clang), "CXX=" + str(clang_cxx), "clean", "lsh", "sh") diff --git a/benchbuild/projects/benchbuild/openssl.py b/benchbuild/projects/benchbuild/openssl.py index 4495929d0..274d018a1 100644 --- a/benchbuild/projects/benchbuild/openssl.py +++ b/benchbuild/projects/benchbuild/openssl.py @@ -7,44 +7,79 @@ class LibreSSL(bb.Project): - """ OpenSSL """ + """OpenSSL""" - NAME = 'libressl' - DOMAIN = 'encryption' - GROUP = 'benchbuild' + NAME = "libressl" + DOMAIN = "encryption" + GROUP = "benchbuild" BINARIES = [ - "aeadtest", "aes_wrap", "asn1test", "base64test", "bftest", "bntest", - "bytestringtest", "casttest", "chachatest", "cipherstest", "cts128test", - "destest", "dhtest", "dsatest", "ecdhtest", "ecdsatest", "ectest", - "enginetest", "evptest", "exptest", "gcm128test", "gost2814789t", - "hmactest", "ideatest", "igetest", "md4test", "md5test", "mdc2test", - "mont", "pbkdf2", "pkcs7test", "poly1305test", "pq_test", "randtest", - "rc2test", "rc4test", "rmdtest", "sha1test", "sha256test", "sha512test", - "shatest", "ssltest", "timingsafe", "utf8test" + "aeadtest", + "aes_wrap", + "asn1test", + "base64test", + "bftest", + "bntest", + "bytestringtest", + "casttest", + "chachatest", + "cipherstest", + "cts128test", + "destest", + "dhtest", + "dsatest", + "ecdhtest", + "ecdsatest", + "ectest", + "enginetest", + "evptest", + "exptest", + "gcm128test", + "gost2814789t", + "hmactest", + "ideatest", + "igetest", + "md4test", + "md5test", + "mdc2test", + "mont", + "pbkdf2", + "pkcs7test", + "poly1305test", + "pq_test", + "randtest", + "rc2test", + "rc4test", + "rmdtest", + "sha1test", + "sha256test", + "sha512test", + "shatest", + "ssltest", + "timingsafe", + "utf8test", ] SOURCE = [ HTTP( remote={ - '2.1.6.': ( - 'http://ftp.openbsd.org/pub/OpenBSD/LibreSSL/' - 'libressl-2.1.6.tar.gz' + "2.1.6.": ( + "http://ftp.openbsd.org/pub/OpenBSD/LibreSSL/libressl-2.1.6.tar.gz" ) }, - local='libressl.tar.gz' + local="libressl.tar.gz", ) ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def compile(self): - libressl_source = local.path(self.source_of('libressl.tar.gz')) - libressl_version = self.version_of('libressl.tar.gz') + libressl_source = local.path(self.source_of("libressl.tar.gz")) + libressl_version = self.version_of("libressl.tar.gz") self.cflags += ["-fPIC"] clang = bb.compiler.cc(self) tar("xfz", libressl_source) - unpack_dir = local.path(f'libressl-{libressl_version}') + unpack_dir = local.path(f"libressl-{libressl_version}") configure = local[unpack_dir / "configure"] _configure = bb.watch(configure) _make = bb.watch(make) @@ -52,8 +87,11 @@ def compile(self): with local.cwd(unpack_dir): with local.env(CC=str(clang)): _configure( - "--disable-asm", "--disable-shared", "--enable-static", - "--disable-dependency-tracking", "--with-pic=yes" + "--disable-asm", + "--disable-shared", + "--enable-static", + "--disable-dependency-tracking", + "--with-pic=yes", ) _make("-j8") @@ -62,8 +100,8 @@ def compile(self): _make_tests(LibreSSL.BINARIES) def run_tests(self): - libressl_version = self.version_of('libressl.tar.gz') - unpack_dir = local.path(f'libressl-{libressl_version}') + libressl_version = self.version_of("libressl.tar.gz") + unpack_dir = local.path(f"libressl-{libressl_version}") with local.cwd(unpack_dir / "tests"): for binary in LibreSSL.BINARIES: bb.wrap(local.cwd / binary, self) diff --git a/benchbuild/projects/benchbuild/povray.py b/benchbuild/projects/benchbuild/povray.py index a8ff6f5c0..ec18aff23 100644 --- a/benchbuild/projects/benchbuild/povray.py +++ b/benchbuild/projects/benchbuild/povray.py @@ -7,52 +7,48 @@ class Povray(bb.Project): - """ povray benchmark """ + """povray benchmark""" - NAME = 'povray' - DOMAIN = 'multimedia' - GROUP = 'benchbuild' + NAME = "povray" + DOMAIN = "multimedia" + GROUP = "benchbuild" SOURCE = [ - Git(remote='https://github.com/POV-Ray/povray', local='povray.git'), + Git(remote="https://github.com/POV-Ray/povray", local="povray.git"), HTTP( remote={ - '1.59.0': - 'http://sourceforge.net/projects/boost/files/boost/1.59.0/' - 'boost_1_59_0.tar.bz2' + "1.59.0": "http://sourceforge.net/projects/boost/files/boost/1.59.0/" + "boost_1_59_0.tar.bz2" }, - local='boost.tar.bz2' + local="boost.tar.bz2", ), HTTP( - remote={ - '2016-05-povray': - 'http://lairosiel.de/dist/2016-05-povray.tar.gz' - }, - local='inputs.tar.gz' - ) + remote={"2016-05-povray": "http://lairosiel.de/dist/2016-05-povray.tar.gz"}, + local="inputs.tar.gz", + ), ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") boost_src_dir = "boost_1_59_0" boost_src_file = boost_src_dir + ".tar.bz2" - boost_src_uri = \ - "http://sourceforge.net/projects/boost/files/boost/1.59.0/" + \ - boost_src_file + boost_src_uri = ( + "http://sourceforge.net/projects/boost/files/boost/1.59.0/" + boost_src_file + ) def compile(self): - povray_repo = local.path(self.source_of('povray.git')) - boost_source = local.path(self.source_of('boost.tar.bz2')) - inputs_source = local.path(self.source_of('inputs.tar.gz')) + povray_repo = local.path(self.source_of("povray.git")) + boost_source = local.path(self.source_of("boost.tar.bz2")) + inputs_source = local.path(self.source_of("inputs.tar.gz")) - tar('xf', boost_source) - tar('xf', inputs_source) + tar("xf", boost_source) + tar("xf", inputs_source) - inputs_dir = local.path('./povray/') + inputs_dir = local.path("./povray/") - cp("-ar", inputs_dir / "cfg", '.') - cp("-ar", inputs_dir / "etc", '.') - cp("-ar", inputs_dir / "scenes", '.') - cp("-ar", inputs_dir / "share", '.') - cp("-ar", inputs_dir / "test", '.') + cp("-ar", inputs_dir / "cfg", ".") + cp("-ar", inputs_dir / "etc", ".") + cp("-ar", inputs_dir / "scenes", ".") + cp("-ar", inputs_dir / "share", ".") + cp("-ar", inputs_dir / "test", ".") clang = bb.compiler.cc(self) clang_cxx = bb.compiler.cxx(self) @@ -62,14 +58,16 @@ def compile(self): mkdir(boost_prefix) bootstrap = local["./bootstrap.sh"] _bootstrap = bb.watch(bootstrap) - _bootstrap( - "--with-toolset=clang", "--prefix=\"{0}\"".format(boost_prefix) - ) + _bootstrap("--with-toolset=clang", '--prefix="{0}"'.format(boost_prefix)) _b2 = bb.watch(local["./b2"]) _b2( - "--ignore-site-config", "variant=release", "link=static", - "threading=multi", "optimization=speed", "install" + "--ignore-site-config", + "variant=release", + "link=static", + "threading=multi", + "optimization=speed", + "install", ) with local.cwd(povray_repo): @@ -79,17 +77,15 @@ def compile(self): configure = local["./configure"] _configure = bb.watch(configure) with local.env( - COMPILED_BY="BB ", - CC=str(clang), - CXX=str(clang_cxx) + COMPILED_BY="BB ", CC=str(clang), CXX=str(clang_cxx) ): _configure("--with-boost=" + boost_prefix) _make = bb.watch(make) _make("all") def run_tests(self): - povray_repo = local.path(self.source_of('povray.git')) - povray_binary = povray_repo / 'unix' / self.name + povray_repo = local.path(self.source_of("povray.git")) + povray_binary = povray_repo / "unix" / self.name tmpdir = local.path("tmp") tmpdir.mkdir() @@ -101,15 +97,18 @@ def run_tests(self): pov_files = find(scene_dir, "-name", "*.pov").splitlines() for pov_f in pov_files: with local.env( - POVRAY=povray_binary, - INSTALL_DIR='.', - OUTPUT_DIR=tmpdir, - POVINI=povini + POVRAY=povray_binary, INSTALL_DIR=".", OUTPUT_DIR=tmpdir, POVINI=povini ): - options = (((( - head["-n", "50", "\"" + pov_f + "\""] | - grep["-E", "'^//[ ]+[-+]{1}[^ -]'"] - ) | head["-n", "1"]) | sed["s?^//[ ]*??"]) & FG) + options = ( + ( + ( + head["-n", "50", '"' + pov_f + '"'] + | grep["-E", "'^//[ ]+[-+]{1}[^ -]'"] + ) + | head["-n", "1"] + ) + | sed["s?^//[ ]*??"] + ) & FG _povray( "+L" + scene_dir, "+L" + tmpdir, @@ -117,5 +116,5 @@ def run_tests(self): "-o" + tmpdir, options, "-p", - retcode=None + retcode=None, ) diff --git a/benchbuild/projects/benchbuild/python.py b/benchbuild/projects/benchbuild/python.py index 97528ac58..cfd847773 100644 --- a/benchbuild/projects/benchbuild/python.py +++ b/benchbuild/projects/benchbuild/python.py @@ -7,30 +7,27 @@ class Python(bb.Project): - """ python benchmarks """ + """python benchmarks""" - NAME = 'python' - DOMAIN = 'compilation' - GROUP = 'benchbuild' + NAME = "python" + DOMAIN = "compilation" + GROUP = "benchbuild" SOURCE = [ HTTP( remote={ - '3.4.3': ( - 'https://www.python.org/ftp/python/3.4.3/' - 'Python-3.4.3.tar.xz' - ) + "3.4.3": ("https://www.python.org/ftp/python/3.4.3/Python-3.4.3.tar.xz") }, - local='python.tar.xz' + local="python.tar.xz", ) ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def compile(self): - python_source = local.path(self.source_of('python.tar.xz')) - python_version = self.version_of('python.tar.xz') + python_source = local.path(self.source_of("python.tar.xz")) + python_version = self.version_of("python.tar.xz") tar("xfJ", python_source) - unpack_dir = local.path(f'Python-{python_version}') + unpack_dir = local.path(f"Python-{python_version}") clang = bb.compiler.cc(self) clang_cxx = bb.compiler.cxx(self) @@ -45,8 +42,8 @@ def compile(self): _make() def run_tests(self): - python_version = self.version_of('python.tar.xz') - unpack_dir = local.path(f'Python-{python_version}') + python_version = self.version_of("python.tar.xz") + unpack_dir = local.path(f"Python-{python_version}") bb.wrap(unpack_dir / "python", self) with local.cwd(unpack_dir): diff --git a/benchbuild/projects/benchbuild/rasdaman.py b/benchbuild/projects/benchbuild/rasdaman.py index 826d548c1..1c28d173e 100644 --- a/benchbuild/projects/benchbuild/rasdaman.py +++ b/benchbuild/projects/benchbuild/rasdaman.py @@ -11,33 +11,33 @@ class Rasdaman(bb.Project): - """ Rasdaman """ + """Rasdaman""" - NAME = 'Rasdaman' - DOMAIN = 'database' - GROUP = 'benchbuild' + NAME = "Rasdaman" + DOMAIN = "database" + GROUP = "benchbuild" SOURCE = [ Git( - remote='git://rasdaman.org/rasdaman.git', - local='rasdaman.git', + remote="git://rasdaman.org/rasdaman.git", + local="rasdaman.git", limit=5, - refspec='HEAD' + refspec="HEAD", ), Git( - remote='https://github.com/OSGeo/gdal', - local='gdal.git', + remote="https://github.com/OSGeo/gdal", + local="gdal.git", limit=5, - refspec='HEAD' - ) + refspec="HEAD", + ), ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") gdal_dir = "gdal" gdal_uri = "https://github.com/OSGeo/gdal" def compile(self): - rasdaman_repo = local.path(self.source_of('rasdaman.git')) - gdal_repo = local.path(self.source_of('gdal.git')) + rasdaman_repo = local.path(self.source_of("rasdaman.git")) + gdal_repo = local.path(self.source_of("gdal.git")) clang = bb.compiler.cc(self) clang_cxx = bb.compiler.cxx(self) @@ -48,8 +48,11 @@ def compile(self): with local.env(CC=str(clang), CXX=str(clang_cxx)): _configure( - "--with-pic", "--enable-static", "--with-gnu-ld", - "--without-ld-shared", "--without-libtool" + "--with-pic", + "--enable-static", + "--with-gnu-ld", + "--without-ld-shared", + "--without-libtool", ) _make = bb.watch(make) _make("-j", get_number_of_jobs(CFG)) @@ -61,13 +64,16 @@ def compile(self): with local.env(CC=str(clang), CXX=str(clang_cxx)): _configure( - "--without-debug-symbols", "--with-static-libs", - "--disable-java", "--with-pic", "--disable-debug", - "--without-docs" + "--without-debug-symbols", + "--with-static-libs", + "--disable-java", + "--with-pic", + "--disable-debug", + "--without-docs", ) _make = bb.watch(make) _make("clean", "all", "-j", get_number_of_jobs(CFG)) def run_tests(self): log = logging.getLogger(__name__) - log.warning('Not implemented') + log.warning("Not implemented") diff --git a/benchbuild/projects/benchbuild/ruby.py b/benchbuild/projects/benchbuild/ruby.py index dacc954db..4e8bcb8b4 100644 --- a/benchbuild/projects/benchbuild/ruby.py +++ b/benchbuild/projects/benchbuild/ruby.py @@ -9,34 +9,30 @@ class Ruby(bb.Project): - NAME = 'ruby' - DOMAIN = 'compilation' - GROUP = 'benchbuild' + NAME = "ruby" + DOMAIN = "compilation" + GROUP = "benchbuild" SOURCE = [ HTTP( remote={ - '2.2.2': ( - 'http://cache.ruby-lang.org/pub/ruby/2.2.2/' - 'ruby-2.2.2.tar.gz' - ) + "2.2.2": ("http://cache.ruby-lang.org/pub/ruby/2.2.2/ruby-2.2.2.tar.gz") }, - local='ruby.tar.gz' + local="ruby.tar.gz", ), HTTP( remote={ - '2016-11-ruby-inputs.tar.gz': - 'http://lairosiel.de/dist/2016-11-ruby-inputs.tar.gz' + "2016-11-ruby-inputs.tar.gz": "http://lairosiel.de/dist/2016-11-ruby-inputs.tar.gz" }, - local='inputs.tar.gz' - ) + local="inputs.tar.gz", + ), ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def compile(self): - ruby_source = local.path(self.source_of('ruby.tar.gz')) - ruby_version = self.version_of('ruby.tar.gz') + ruby_source = local.path(self.source_of("ruby.tar.gz")) + ruby_version = self.version_of("ruby.tar.gz") tar("xfz", ruby_source) - unpack_dir = local.path(f'ruby-{ruby_version}') + unpack_dir = local.path(f"ruby-{ruby_version}") clang = bb.compiler.cc(self) clang_cxx = bb.compiler.cxx(self) @@ -49,16 +45,22 @@ def compile(self): _make("-j", get_number_of_jobs(CFG)) def run_tests(self): - ruby_version = self.version_of('ruby.tar.gz') - unpack_dir = local.path(f'ruby-{ruby_version}') + ruby_version = self.version_of("ruby.tar.gz") + unpack_dir = local.path(f"ruby-{ruby_version}") ruby_n = bb.wrap(unpack_dir / "ruby", self) - test_dir = local.path('./ruby/') + test_dir = local.path("./ruby/") with local.env(RUBYOPT=""): _ = bb.watch(ruby) ruby( test_dir / "benchmark" / "run.rb", - "--ruby=\"" + str(ruby_n) + "\"", - "--opts=\"-I" + test_dir / "lib" + " -I" + test_dir / "." + - " -I" + test_dir / ".ext" / "common" + "\"", "-r" + '--ruby="' + str(ruby_n) + '"', + '--opts="-I' + + test_dir / "lib" + + " -I" + + test_dir / "." + + " -I" + + test_dir / ".ext" / "common" + + '"', + "-r", ) diff --git a/benchbuild/projects/benchbuild/sdcc.py b/benchbuild/projects/benchbuild/sdcc.py index 51bd6d455..281af12e2 100644 --- a/benchbuild/projects/benchbuild/sdcc.py +++ b/benchbuild/projects/benchbuild/sdcc.py @@ -8,11 +8,11 @@ class SDCC(bb.Project): - NAME = 'sdcc' - DOMAIN = 'compilation' - GROUP = 'benchbuild' - SRC_FILE = 'sdcc' - CONTAINER = ContainerImage().from_('benchbuild:alpine') + NAME = "sdcc" + DOMAIN = "compilation" + GROUP = "benchbuild" + SRC_FILE = "sdcc" + CONTAINER = ContainerImage().from_("benchbuild:alpine") src_uri = "svn://svn.code.sf.net/p/sdcc/code/trunk/" + SRC_FILE @@ -27,14 +27,13 @@ def compile(self): _configure = bb.watch(configure) with local.env(CC=str(clang), CXX=str(clang_cxx)): _configure( - "--without-ccache", "--disable-pic14-port", - "--disable-pic16-port" + "--without-ccache", "--disable-pic14-port", "--disable-pic16-port" ) _make = bb.watch(make) _make("-j", get_number_of_jobs(CFG)) def run_tests(self): - sdcc = bb.wrap('sdcc', self) + sdcc = bb.wrap("sdcc", self) _sdcc = bb.watch(sdcc) _sdcc() diff --git a/benchbuild/projects/benchbuild/sevenz.py b/benchbuild/projects/benchbuild/sevenz.py index e0dc6d655..4979fd9fb 100644 --- a/benchbuild/projects/benchbuild/sevenz.py +++ b/benchbuild/projects/benchbuild/sevenz.py @@ -7,32 +7,31 @@ class SevenZip(bb.Project): - """ 7Zip """ + """7Zip""" - NAME = '7z' - DOMAIN = 'compression' - GROUP = 'benchbuild' + NAME = "7z" + DOMAIN = "compression" + GROUP = "benchbuild" SOURCE = [ HTTP( remote={ - '16.02': - 'http://downloads.sourceforge.net/' - 'project/p7zip/p7zip/16.02/p7zip_16.02_src_all.tar.bz2' + "16.02": "http://downloads.sourceforge.net/" + "project/p7zip/p7zip/16.02/p7zip_16.02_src_all.tar.bz2" }, - local='p7zip.tar.bz2' + local="p7zip.tar.bz2", ) ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def compile(self): - sevenzip_source = local.path(self.source_of('p7zip.tar.bz2')) - sevenzip_version = self.version_of('p7zip.tar.bz2') - unpack_dir = local.path(f'p7zip_{sevenzip_version}') - tar('xfj', sevenzip_source) + sevenzip_source = local.path(self.source_of("p7zip.tar.bz2")) + sevenzip_version = self.version_of("p7zip.tar.bz2") + unpack_dir = local.path(f"p7zip_{sevenzip_version}") + tar("xfj", sevenzip_source) cp( unpack_dir / "makefile.linux_clang_amd64_asm", - unpack_dir / "makefile.machine" + unpack_dir / "makefile.machine", ) clang = bb.compiler.cc(self) @@ -43,8 +42,8 @@ def compile(self): _make("CC=" + str(clang), "CXX=" + str(clang_cxx), "clean", "all") def run_tests(self): - sevenzip_version = self.version_of('p7zip.tar.bz2') - unpack_dir = local.path(f'p7zip_{sevenzip_version}') + sevenzip_version = self.version_of("p7zip.tar.bz2") + unpack_dir = local.path(f"p7zip_{sevenzip_version}") _7z = bb.wrap(unpack_dir / "bin" / "7za", self) _7z = bb.watch(_7z) _7z("b", "-mmt1") diff --git a/benchbuild/projects/benchbuild/sqlite3.py b/benchbuild/projects/benchbuild/sqlite3.py index bb242b3d6..1d283d5c3 100644 --- a/benchbuild/projects/benchbuild/sqlite3.py +++ b/benchbuild/projects/benchbuild/sqlite3.py @@ -7,33 +7,32 @@ class SQLite3(bb.Project): - """ SQLite3 """ + """SQLite3""" - NAME = 'sqlite3' - DOMAIN = 'database' - GROUP = 'benchbuild' + NAME = "sqlite3" + DOMAIN = "database" + GROUP = "benchbuild" SOURCE = [ HTTP( remote={ - '3080900': - 'http://www.sqlite.org/2015/sqlite-amalgamation-3080900.zip' + "3080900": "http://www.sqlite.org/2015/sqlite-amalgamation-3080900.zip" }, - local='sqlite.zip' + local="sqlite.zip", ), Git( - remote='https://github.com/google/leveldb', - local='leveldb.src', - refspec='HEAD', - limit=5 - ) + remote="https://github.com/google/leveldb", + local="leveldb.src", + refspec="HEAD", + limit=5, + ), ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def compile(self): - sqlite_source = local.path(self.source_of('sqlite.zip')) - sqlite_version = self.version_of('sqlite.zip') + sqlite_source = local.path(self.source_of("sqlite.zip")) + sqlite_version = self.version_of("sqlite.zip") unzip(sqlite_source) - unpack_dir = local.path(f'sqlite-amalgamation-{sqlite_version}') + unpack_dir = local.path(f"sqlite-amalgamation-{sqlite_version}") clang = bb.compiler.cc(self) _clang = bb.watch(clang) @@ -45,10 +44,10 @@ def compile(self): self.build_leveldb() def build_leveldb(self): - sqlite_version = self.version_of('sqlite.zip') + sqlite_version = self.version_of("sqlite.zip") - sqlite_dir = local.path(f'sqlite-amalgamation-{sqlite_version}') - leveldb_repo = local.path(self.source_of('leveldb.src')) + sqlite_dir = local.path(f"sqlite-amalgamation-{sqlite_version}") + leveldb_repo = local.path(self.source_of("leveldb.src")) # We need to place sqlite3 in front of all other flags. self.ldflags += ["-L{0}".format(sqlite_dir)] @@ -62,11 +61,9 @@ def build_leveldb(self): _make("clean", "out-static/db_bench_sqlite3") def run_tests(self): - leveldb_repo = local.path(self.source_of('leveldb.src')) + leveldb_repo = local.path(self.source_of("leveldb.src")) with local.cwd(leveldb_repo): with local.env(LD_LIBRARY_PATH=leveldb_repo): - sqlite = bb.wrap( - leveldb_repo / 'out-static' / 'db_bench_sqlite3', self - ) + sqlite = bb.wrap(leveldb_repo / "out-static" / "db_bench_sqlite3", self) _sqlite = bb.watch(sqlite) _sqlite() diff --git a/benchbuild/projects/benchbuild/tcc.py b/benchbuild/projects/benchbuild/tcc.py index fe0870ed5..b155887fc 100644 --- a/benchbuild/projects/benchbuild/tcc.py +++ b/benchbuild/projects/benchbuild/tcc.py @@ -9,29 +9,29 @@ class TCC(bb.Project): - VERSION = '0.9.26' - NAME = 'tcc' - DOMAIN = 'compilation' - GROUP = 'benchbuild' + VERSION = "0.9.26" + NAME = "tcc" + DOMAIN = "compilation" + GROUP = "benchbuild" SOURCE = [ HTTP( remote={ - '0.9.26': ( - 'http://download-mirror.savannah.gnu.org/releases/' - 'tinycc/tcc-0.9.26.tar.bz2' + "0.9.26": ( + "http://download-mirror.savannah.gnu.org/releases/" + "tinycc/tcc-0.9.26.tar.bz2" ) }, - local='tcc.tar.bz2' + local="tcc.tar.bz2", ) ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def compile(self): - tcc_source = local.path(self.source_of('tcc.tar.bz2')) - tcc_version = self.version_of('tcc.tar.bz2') + tcc_source = local.path(self.source_of("tcc.tar.bz2")) + tcc_version = self.version_of("tcc.tar.bz2") tar("xf", tcc_source) - unpack_dir = local.path(f'tcc-{tcc_version}.tar.bz2') + unpack_dir = local.path(f"tcc-{tcc_version}.tar.bz2") clang = bb.compiler.cc(self) @@ -46,8 +46,8 @@ def compile(self): _make() def run_tests(self): - tcc_version = self.version_of('tcc.tar.bz2') - unpack_dir = local.path(f'tcc-{tcc_version}.tar.bz2') + tcc_version = self.version_of("tcc.tar.bz2") + unpack_dir = local.path(f"tcc-{tcc_version}.tar.bz2") with local.cwd(unpack_dir): with local.cwd("build"): bb.wrap("tcc", self) diff --git a/benchbuild/projects/benchbuild/x264.py b/benchbuild/projects/benchbuild/x264.py index 2a90c7a84..90592f304 100644 --- a/benchbuild/projects/benchbuild/x264.py +++ b/benchbuild/projects/benchbuild/x264.py @@ -10,30 +10,30 @@ class X264(bb.Project): - """ x264 """ + """x264""" NAME = "x264" DOMAIN = "multimedia" - GROUP = 'benchbuild' + GROUP = "benchbuild" SOURCE = [ Git( - remote='https://code.videolan.org/videolan/x264.git', - local='x264.git', - refspec='HEAD', - limit=5 + remote="https://code.videolan.org/videolan/x264.git", + local="x264.git", + refspec="HEAD", + limit=5, ), HTTP( - remote={'tbbt-small': 'http://lairosiel.de/dist/tbbt-small.y4m'}, - local='tbbt-small.y4m' + remote={"tbbt-small": "http://lairosiel.de/dist/tbbt-small.y4m"}, + local="tbbt-small.y4m", ), HTTP( - remote={'sintel': 'http://lairosiel.de/dist/Sintel.2010.720p.raw'}, - local='sintel.raw' + remote={"sintel": "http://lairosiel.de/dist/Sintel.2010.720p.raw"}, + local="sintel.raw", ), ] CONFIG = {"tbbt-small": [], "sintel": ["--input-res", "1280x720"]} - CONTAINER = declarative.ContainerImage().from_('benchbuild:alpine') + CONTAINER = declarative.ContainerImage().from_("benchbuild:alpine") # yapf: disable WORKLOADS = { @@ -77,7 +77,7 @@ class X264(bb.Project): # yapf: enable def compile(self): - x264_repo = local.path(self.source_of('x264.git')) + x264_repo = local.path(self.source_of("x264.git")) clang = bb.compiler.cc(self) with local.cwd(x264_repo): @@ -85,9 +85,7 @@ def compile(self): _configure = bb.watch(configure) with local.env(CC=str(clang)): - _configure( - "--disable-thread", "--disable-opencl", "--enable-pic" - ) + _configure("--disable-thread", "--disable-opencl", "--enable-pic") _make = bb.watch(make) _make("clean", "all", "-j", get_number_of_jobs(CFG)) diff --git a/benchbuild/projects/benchbuild/xz.py b/benchbuild/projects/benchbuild/xz.py index 6d8fc41f4..94c80602f 100644 --- a/benchbuild/projects/benchbuild/xz.py +++ b/benchbuild/projects/benchbuild/xz.py @@ -1,59 +1,63 @@ from plumbum import local import benchbuild as bb -from benchbuild import CFG from benchbuild.environments.domain.declarative import ContainerImage from benchbuild.source import HTTP from benchbuild.utils.cmd import make, tar class XZ(bb.Project): - """ XZ """ - VERSION = '5.2.1' - NAME = 'xz' - DOMAIN = 'compression' - GROUP = 'benchbuild' + """XZ""" + + VERSION = "5.2.1" + NAME = "xz" + DOMAIN = "compression" + GROUP = "benchbuild" SOURCE = [ HTTP( - remote={'5.2.1': 'http://tukaani.org/xz/xz-5.2.1.tar.gz'}, - local='xz.tar.gz' + remote={"5.2.1": "http://tukaani.org/xz/xz-5.2.1.tar.gz"}, local="xz.tar.gz" ), HTTP( - remote={'1.0': 'http://lairosiel.de/dist/compression.tar.gz'}, - local='compression.tar.gz' - ) + remote={"1.0": "http://lairosiel.de/dist/compression.tar.gz"}, + local="compression.tar.gz", + ), ] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def compile(self): - xz_source = local.path(self.source_of('xz.tar.gz')) - xz_version = self.version_of('xz.tar.gz') - compression_source = local.path(self.source_of('compression.tar.gz')) + xz_source = local.path(self.source_of("xz.tar.gz")) + xz_version = self.version_of("xz.tar.gz") + compression_source = local.path(self.source_of("compression.tar.gz")) - tar('xf', xz_source) - tar('xf', compression_source) + tar("xf", xz_source) + tar("xf", compression_source) - unpack_dir = local.path(f'xz-{xz_version}') + unpack_dir = local.path(f"xz-{xz_version}") clang = bb.compiler.cc(self) with local.cwd(unpack_dir): configure = local["./configure"] _configure = bb.watch(configure) with local.env(CC=str(clang)): _configure( - "--enable-threads=no", "--with-gnu-ld=yes", - "--disable-shared", "--disable-dependency-tracking", - "--disable-xzdec", "--disable-lzmadec", - "--disable-lzmainfo", "--disable-lzma-links", - "--disable-scripts", "--disable-doc" + "--enable-threads=no", + "--with-gnu-ld=yes", + "--disable-shared", + "--disable-dependency-tracking", + "--disable-xzdec", + "--disable-lzmadec", + "--disable-lzmainfo", + "--disable-lzma-links", + "--disable-scripts", + "--disable-doc", ) _make = bb.watch(make) _make("CC=" + str(clang), "clean", "all") def run_tests(self): - xz_version = self.version_of('xz.tar.gz') - unpack_dir = local.path(f'xz-{xz_version}') + xz_version = self.version_of("xz.tar.gz") + unpack_dir = local.path(f"xz-{xz_version}") xz = bb.wrap(unpack_dir / "src" / "xz" / "xz", self) _xz = bb.watch(xz) diff --git a/benchbuild/projects/gentoo/__init__.py b/benchbuild/projects/gentoo/__init__.py index cca5463b4..17baaed0f 100644 --- a/benchbuild/projects/gentoo/__init__.py +++ b/benchbuild/projects/gentoo/__init__.py @@ -5,13 +5,26 @@ Portage_Gen based projects will be generated automatically as soon as we can find an index generated by portage info. """ + import logging import os from benchbuild.settings import CFG -from . import (autoportage, bzip2, crafty, eix, gentoo, gzip, info, lammps, - postgresql, sevenz, x264, xz) +from . import ( + autoportage, + bzip2, + crafty, + eix, + gentoo, + gzip, + info, + lammps, + postgresql, + sevenz, + x264, + xz, +) LOG = logging.getLogger(__name__) @@ -21,14 +34,15 @@ def __initialize_dynamic_projects__(autotest_path): LOG.debug("Loading AutoPortage projects from %s", autotest_path) if os.path.exists(autotest_path): - with open(autotest_path, 'r') as ebuilds: + with open(autotest_path, "r") as ebuilds: for line in ebuilds: - ebuild_data = line.strip('\n') - ebuild_data = ebuild_data.split('/') + ebuild_data = line.strip("\n") + ebuild_data = ebuild_data.split("/") domain = ebuild_data[0] name = ebuild_data[1] - PortageFactory("Auto{0}{1}".format(domain, name), - domain + "_" + name, domain) + PortageFactory( + "Auto{0}{1}".format(domain, name), domain + "_" + name, domain + ) -__initialize_dynamic_projects__(str(CFG['gentoo']['autotest_loc'])) +__initialize_dynamic_projects__(str(CFG["gentoo"]["autotest_loc"])) diff --git a/benchbuild/projects/gentoo/autoportage.py b/benchbuild/projects/gentoo/autoportage.py index 4aee55d92..f3c272200 100644 --- a/benchbuild/projects/gentoo/autoportage.py +++ b/benchbuild/projects/gentoo/autoportage.py @@ -13,16 +13,13 @@ class AutoPortage(GentooGroup): def compile(self): emerge_in_chroot = uchroot.uchroot()["/usr/bin/emerge"] - prog = self.DOMAIN + "/" + str(self.NAME)[len(self.DOMAIN) + 1:] + prog = self.DOMAIN + "/" + str(self.NAME)[len(self.DOMAIN) + 1 :] with local.env(CONFIG_PROTECT="-*"): emerge_in_chroot( - "--autounmask-only=y", - "--autounmask-write=y", - prog, - retcode=None + "--autounmask-only=y", "--autounmask-write=y", prog, retcode=None ) uchroot.uretry(emerge_in_chroot[prog]) def run_tests(self): log = logging.getLogger(__name__) - log.warning('Not implemented') + log.warning("Not implemented") diff --git a/benchbuild/projects/gentoo/bzip2.py b/benchbuild/projects/gentoo/bzip2.py index ba2067244..28c656a0e 100644 --- a/benchbuild/projects/gentoo/bzip2.py +++ b/benchbuild/projects/gentoo/bzip2.py @@ -1,6 +1,7 @@ """ bzip2 experiment within gentoo chroot. """ + from plumbum import local import benchbuild as bb @@ -10,16 +11,15 @@ class BZip2(GentooGroup): """ - app-arch/bzip2 + app-arch/bzip2 """ + NAME = "bzip2" DOMAIN = "app-arch" test_url = "http://lairosiel.de/dist/" test_archive = "compression.tar.gz" - testfiles = [ - "text.html", "chicken.jpg", "control", "input.source", "liberty.jpg" - ] + testfiles = ["text.html", "chicken.jpg", "control", "input.source", "liberty.jpg"] def compile(self): super().compile() @@ -30,7 +30,7 @@ def compile(self): tar("fxz", test_archive) def run_tests(self): - bzip2 = bb.wrap(local.path('/bin/bzip2'), self) + bzip2 = bb.wrap(local.path("/bin/bzip2"), self) bzip2 = bb.watch(bzip2) # Compress diff --git a/benchbuild/projects/gentoo/crafty.py b/benchbuild/projects/gentoo/crafty.py index 3e451d0c6..22f7aea11 100644 --- a/benchbuild/projects/gentoo/crafty.py +++ b/benchbuild/projects/gentoo/crafty.py @@ -1,6 +1,7 @@ """ crafty experiment within gentoo chroot. """ + from plumbum import local import benchbuild as bb @@ -10,8 +11,9 @@ class Crafty(GentooGroup): """ - games-board/crafty + games-board/crafty """ + NAME = "crafty" DOMAIN = "games-board" @@ -26,8 +28,8 @@ def run_tests(self): crafty_path = local.path("/usr/bin/crafty") crafty = bb.wrap(crafty_path, self) - with open("test1.sh", 'w') as test1: - lines = ''' + with open("test1.sh", "w") as test1: + lines = """ st=10 ponder=off display nomoves @@ -91,12 +93,12 @@ def run_tests(self): mt=0 quit EOF -''' +""" test1.write(lines) - with open("test2.sh", 'w') as test2: - lines = ''' + with open("test2.sh", "w") as test2: + lines = """ st=10 ponder=off mt=2 @@ -104,12 +106,12 @@ def run_tests(self): move mt=0 quit -''' +""" test2.write(lines) - crafty_test1 = bb.watch((cat['test1.sh'] | crafty)) + crafty_test1 = bb.watch((cat["test1.sh"] | crafty)) crafty_test1() - crafty_test2 = bb.watch((cat['test2.sh'] | crafty)) + crafty_test2 = bb.watch((cat["test2.sh"] | crafty)) crafty_test2() diff --git a/benchbuild/projects/gentoo/eix.py b/benchbuild/projects/gentoo/eix.py index ed8da3afb..a0563729f 100644 --- a/benchbuild/projects/gentoo/eix.py +++ b/benchbuild/projects/gentoo/eix.py @@ -1,6 +1,7 @@ """ eix experiment within gentoo chroot """ + from plumbum import local import benchbuild as bb @@ -10,12 +11,12 @@ class Eix(GentooGroup): """Represents the package eix from the portage tree.""" - NAME = 'eix' - DOMAIN = 'app-portage' + NAME = "eix" + DOMAIN = "app-portage" def run_tests(self): """Runs runtime tests for eix""" - eix = bb.wrap(local.path('/usr/bin/eix'), self) + eix = bb.wrap(local.path("/usr/bin/eix"), self) _eix = bb.watch(eix) _eix("clang") diff --git a/benchbuild/projects/gentoo/gentoo.py b/benchbuild/projects/gentoo/gentoo.py index 68683fa85..e38f863e8 100644 --- a/benchbuild/projects/gentoo/gentoo.py +++ b/benchbuild/projects/gentoo/gentoo.py @@ -11,6 +11,7 @@ the gentoo image in benchbuild's source directory. """ + import logging import typing as tp @@ -32,9 +33,9 @@ class GentooGroup(bb.Project): """Gentoo ProjectGroup is the base class for every portage build.""" - GROUP = 'gentoo' + GROUP = "gentoo" SRC_FILE = None - CONTAINER = declarative.ContainerImage().from_('benchbuild:alpine') + CONTAINER = declarative.ContainerImage().from_("benchbuild:alpine") emerge_env: tp.Dict[str, tp.Any] = attr.ib( default=attr.Factory(dict), repr=False, eq=False, order=False @@ -52,34 +53,30 @@ def redirect(self) -> None: uchroot.mkfile_uchroot("/.benchbuild-container") benchbuild = find_benchbuild() _benchbuild = run.watch(benchbuild) - with local.env(BB_VERBOSITY=str(CFG['verbosity'])): + with local.env(BB_VERBOSITY=str(CFG["verbosity"])): project_id = "{0}/{1}".format(self.name, self.group) _benchbuild("run", "-E", self.experiment.name, project_id) def compile(self) -> None: - package_atom = "{domain}/{name}".format( - domain=self.domain, name=self.name - ) + package_atom = "{domain}/{name}".format(domain=self.domain, name=self.name) - LOG.debug('Installing dependencies.') - emerge(package_atom, '--onlydeps', env=self.emerge_env) + LOG.debug("Installing dependencies.") + emerge(package_atom, "--onlydeps", env=self.emerge_env) c_compiler = local.path(str(compiler.cc(self))) cxx_compiler = local.path(str(compiler.cxx(self))) - setup_compilers('/etc/portage/make.conf') - ln("-sf", str(c_compiler), local.path('/') / c_compiler.basename) - ln('-sf', str(cxx_compiler), local.path('/') / cxx_compiler.basename) + setup_compilers("/etc/portage/make.conf") + ln("-sf", str(c_compiler), local.path("/") / c_compiler.basename) + ln("-sf", str(cxx_compiler), local.path("/") / cxx_compiler.basename) - LOG.debug('Installing %s.', package_atom) + LOG.debug("Installing %s.", package_atom) emerge(package_atom, env=self.emerge_env) def configure_benchbuild(self, cfg: Configuration) -> None: config_file = local.path("/.benchbuild.yml") - paths, libs = \ - uchroot.env( - uchroot.mounts( - "mnt", - cfg["container"]["mounts"].value)) + paths, libs = uchroot.env( + uchroot.mounts("mnt", cfg["container"]["mounts"].value) + ) uchroot_cfg = cfg env = uchroot_cfg["env"].value @@ -87,10 +84,8 @@ def configure_benchbuild(self, cfg: Configuration) -> None: env["LD_LIBRARY_PATH"] = libs uchroot_cfg["env"] = env - uchroot_cfg['plugins']['projects'] = [str(self.__module__)] - uchroot_cfg['plugins']['experiments'] = [ - str(self.experiment.__module__) - ] + uchroot_cfg["plugins"]["projects"] = [str(self.__module__)] + uchroot_cfg["plugins"]["experiments"] = [str(self.experiment.__module__)] uchroot_cfg["config_file"] = str(config_file) uchroot_cfg["unionfs"]["enable"] = False uchroot_cfg["build_dir"] = "/benchbuild/build" @@ -126,21 +121,21 @@ def configure_portage() -> None: def write_sandbox_d(_path: str) -> None: - uchroot.mkfile_uchroot(local.path('/') / _path) - with open(_path, 'a') as sandbox_conf: - lines = ''' + uchroot.mkfile_uchroot(local.path("/") / _path) + with open(_path, "a") as sandbox_conf: + lines = """ SANDBOX_WRITE="/clang.stderr:/clang++.stderr:/clang.stdout:/clang++.stdout" -''' +""" sandbox_conf.write(lines) def setup_compilers(_path: str) -> None: LOG.debug("Arming compiler symlinks.") - with open(_path, 'a') as makeconf: - lines = ''' + with open(_path, "a") as makeconf: + lines = """ CC="/clang" CXX="/clang++" -''' +""" makeconf.write(lines) @@ -155,9 +150,9 @@ def write_makeconfig(_path: str) -> None: ftp_proxy = str(CFG["gentoo"]["ftp_proxy"]) rsync_proxy = str(CFG["gentoo"]["rsync_proxy"]) - uchroot.mkfile_uchroot(local.path('/') / _path) - with open(_path, 'w') as makeconf: - lines = ''' + uchroot.mkfile_uchroot(local.path("/") / _path) + with open(_path, "w") as makeconf: + lines = """ PORTAGE_USERNAME=root PORTAGE_GROUPNAME=root CFLAGS="-O2 -pipe" @@ -168,7 +163,7 @@ def write_makeconfig(_path: str) -> None: PORTDIR="/usr/portage" DISTDIR="/mnt/distfiles" PKGDIR="${PORTDIR}/packages" -''' +""" makeconf.write(lines) @@ -210,11 +205,11 @@ def write_bashrc(_path: str) -> None: paths = paths + p_paths libs = libs + p_libs - with open(_path, 'w') as bashrc: - lines = ''' + with open(_path, "w") as bashrc: + lines = """ export PATH="{0}:${{PATH}}" export LD_LIBRARY_PATH="{1}:${{LD_LIBRARY_PATH}}" -'''.format(path.list_to_path(paths), path.list_to_path(libs)) +""".format(path.list_to_path(paths), path.list_to_path(libs)) bashrc.write(lines) @@ -229,8 +224,8 @@ def write_layout(_path: str) -> None: uchroot.mkdir_uchroot("/etc/portage/metadata") uchroot.mkfile_uchroot("/etc/portage/metadata/layout.conf") - with open(_path, 'w') as layoutconf: - lines = '''masters = gentoo''' + with open(_path, "w") as layoutconf: + lines = """masters = gentoo""" layoutconf.write(lines) @@ -245,7 +240,7 @@ def write_wgetrc(_path: str) -> None: ftp_proxy = str(CFG["gentoo"]["ftp_proxy"]) uchroot.mkfile_uchroot("/etc/wgetrc") - with open(_path, 'w') as wgetrc: + with open(_path, "w") as wgetrc: if http_proxy is not None: http_s = "http_proxy = {0}".format(http_proxy) https_s = "https_proxy = {0}".format(http_proxy) @@ -261,14 +256,14 @@ def write_wgetrc(_path: str) -> None: def setup_virtualenv(_path: str = "/benchbuild") -> None: LOG.debug("Setting up Benchbuild virtualenv...") env = uchroot.uchroot()["/usr/bin/env"] - env = env['-i', '--'] + env = env["-i", "--"] venv = env["/usr/bin/virtualenv"] venv = venv("-p", "/usr/bin/python3", _path) def find_benchbuild() -> tp.Optional[BoundCommand]: try: - uchrt = uchroot.clean_env(uchroot.uchroot(), ['HOME']) + uchrt = uchroot.clean_env(uchroot.uchroot(), ["HOME"]) benchbuild_loc = uchrt("which", "benchbuild").strip() benchbuild = uchrt[benchbuild_loc] return benchbuild @@ -327,10 +322,9 @@ def setup_benchbuild() -> None: def __upgrade_from_pip(venv_dir: local.path) -> None: LOG.debug("Upgrading from pip") - uchrt_cmd = uchroot.clean_env(uchroot.uchroot(), ['HOME']) + uchrt_cmd = uchroot.clean_env(uchroot.uchroot(), ["HOME"]) uchroot.uretry( - uchrt_cmd[venv_dir / "bin" / "pip3", "install", "--upgrade", - "benchbuild"] + uchrt_cmd[venv_dir / "bin" / "pip3", "install", "--upgrade", "benchbuild"] ) @@ -344,7 +338,7 @@ def __mount_source(src_dir: str) -> None: def __upgrade_from_source(venv_dir: local.path, with_deps: bool = True) -> None: LOG.debug("Upgrading from source") - uchrt_cmd = uchroot.clean_env(uchroot.uchroot(), ['HOME']) + uchrt_cmd = uchroot.clean_env(uchroot.uchroot(), ["HOME"]) opts = ["--upgrade"] if not with_deps: opts.append("--no-deps") diff --git a/benchbuild/projects/gentoo/gzip.py b/benchbuild/projects/gentoo/gzip.py index 1da8ef540..39ae6a82d 100644 --- a/benchbuild/projects/gentoo/gzip.py +++ b/benchbuild/projects/gentoo/gzip.py @@ -1,6 +1,7 @@ """ gzip experiment within gentoo chroot. """ + from plumbum import local import benchbuild as bb @@ -10,16 +11,15 @@ class GZip(GentooGroup): """ - app-arch/gzip + app-arch/gzip """ + NAME = "gzip" DOMAIN = "app-arch" test_url = "http://lairosiel.de/dist/" test_archive = "compression.tar.gz" - testfiles = [ - "text.html", "chicken.jpg", "control", "input.source", "liberty.jpg" - ] + testfiles = ["text.html", "chicken.jpg", "control", "input.source", "liberty.jpg"] def compile(self): super().compile() @@ -30,7 +30,7 @@ def compile(self): tar("fxz", test_archive) def run_tests(self): - gzip = bb.wrap(local.path('/bin/gzip'), self) + gzip = bb.wrap(local.path("/bin/gzip"), self) gzip = bb.watch(gzip) # Compress diff --git a/benchbuild/projects/gentoo/info.py b/benchbuild/projects/gentoo/info.py index 73bad2019..dc666cef4 100644 --- a/benchbuild/projects/gentoo/info.py +++ b/benchbuild/projects/gentoo/info.py @@ -2,6 +2,7 @@ Get package infos, e.g., specific ebuilds for given languages, from gentoo chroot. """ + import re from plumbum import local @@ -37,16 +38,16 @@ def compile(self): for language in languages: output = qgrep_in_chroot("-l", get_string_for_language(language)) - for line in output.split('\n'): + for line in output.split("\n"): if "ebuild" in line: - parts = line.split('.ebuild')[0].split('/') - package_atom = '{0}/{1}'.format(parts[0], parts[1]) + parts = line.split(".ebuild")[0].split("/") + package_atom = "{0}/{1}".format(parts[0], parts[1]) ebuilds.add(package_atom) for use in use_flags: output = equery_in_chroot("-q", "hasuse", "-p", use) ebuilds_use = set() - for line in output.split('\n'): + for line in output.split("\n"): ebuilds_use.add(re.sub(r"(.*)-[0-9]+.*$", r"\1", line)) ebuilds = ebuilds.intersection(ebuilds_use) @@ -64,6 +65,6 @@ def get_string_for_language(language_name): language_name = language_name.lower().lstrip() if language_name == "c": return "tc-getCC" - if language_name in ('c++', 'cxx'): + if language_name in ("c++", "cxx"): return "tc-getCXX" return language_name diff --git a/benchbuild/projects/gentoo/lammps.py b/benchbuild/projects/gentoo/lammps.py index 9fb2fe005..484fa21c5 100644 --- a/benchbuild/projects/gentoo/lammps.py +++ b/benchbuild/projects/gentoo/lammps.py @@ -1,6 +1,7 @@ """ LAMMPS (sci-physics/lammps) project within gentoo chroot. """ + from plumbum import local from benchbuild.projects.gentoo.gentoo import GentooGroup @@ -10,8 +11,9 @@ class Lammps(GentooGroup): """ - sci-physics/lammps + sci-physics/lammps """ + NAME = "lammps" DOMAIN = "sci-physics" @@ -30,7 +32,7 @@ def compile(self): def run_tests(self): builddir = self.builddir - lammps = wrapping.wrap(local.path('/usr/bin/lmp'), self) + lammps = wrapping.wrap(local.path("/usr/bin/lmp"), self) lammps_dir = builddir / "lammps" with local.cwd(lammps_dir): diff --git a/benchbuild/projects/gentoo/portage_gen.py b/benchbuild/projects/gentoo/portage_gen.py index 8380f89ff..7a0e34562 100644 --- a/benchbuild/projects/gentoo/portage_gen.py +++ b/benchbuild/projects/gentoo/portage_gen.py @@ -1,6 +1,7 @@ """ Generic experiment to test portage packages within gentoo chroot. """ + import logging from plumbum import ProcessExecutionError, local @@ -37,23 +38,23 @@ def __repr__(self): def __str__(self): try: domain, _, name = self.name.partition("_") - package = domain + '/' + name + package = domain + "/" + name _container = self.container() _uchroot = uchroot.no_args() _uchroot = _uchroot["-E", "-A", "-C", "-w", "/", "-r"] _uchroot = _uchroot[_container.local] with local.env(CONFIG_PROTECT="-*"): - fake_emerge = _uchroot["emerge", "--autounmask-only=y", - "--autounmask-write=y", "--nodeps"] + fake_emerge = _uchroot[ + "emerge", "--autounmask-only=y", "--autounmask-write=y", "--nodeps" + ] _fake_emerge = run.watch(fake_emerge) _fake_emerge(package) - emerge_in_chroot = \ - _uchroot["emerge", "-p", "--nodeps", package] + emerge_in_chroot = _uchroot["emerge", "-p", "--nodeps", package] _, stdout, _ = emerge_in_chroot.run() - for line in stdout.split('\n'): + for line in stdout.split("\n"): if package in line: _, _, package_name = line.partition("/") _, name, version = package_name.partition(name) @@ -105,13 +106,15 @@ def run_not_supported(_, *args, **kwargs): LOG.warning("Runtime testing not supported on auto-generated projects.") newclass = type( - name, (BaseClass,), { + name, + (BaseClass,), + { "NAME": NAME, "DOMAIN": DOMAIN, "SOURCE": [nosource()], "GROUP": "auto-gentoo", "run": run_not_supported, - "__module__": "__main__" - } + "__module__": "__main__", + }, ) return newclass diff --git a/benchbuild/projects/gentoo/postgresql.py b/benchbuild/projects/gentoo/postgresql.py index ea94f198e..fc6123fa9 100644 --- a/benchbuild/projects/gentoo/postgresql.py +++ b/benchbuild/projects/gentoo/postgresql.py @@ -1,6 +1,7 @@ """ postgresql experiment within gentoo chroot. """ + from time import sleep from plumbum import local @@ -15,6 +16,7 @@ class Postgresql(GentooGroup): """ dev-db/postgresql """ + NAME = "postgresql" DOMAIN = "dev-db/postgresql" @@ -33,9 +35,9 @@ def run_tests(self): postgres = wrapping.wrap(pg_path, self) def pg_su(command): - return su['-c', command, '-g', 'postgres', 'postgres'] + return su["-c", command, "-g", "postgres", "postgres"] - dropdb = pg_su('/usr/bin/dropdb') + dropdb = pg_su("/usr/bin/dropdb") _dropdb = run.watch(dropdb) createdb = pg_su("/usr/bin/createdb") @@ -54,22 +56,22 @@ def pg_su(command): _initdb() with pg_server.bgrun() as postgres: - #We get the PID of the running 'pg_server, which is actually - #the PID of the uchroot binary. This is not the PID we - #want to send a SIGTERM to. - - #We need to enumerate all children of 'postgres' recursively - #and select the one PID that is named 'postgres.bin' and has - #not a process with the same name as parent. - #This should be robust enough, as long as postgres doesn't - #switch process names after forking. + # We get the PID of the running 'pg_server, which is actually + # the PID of the uchroot binary. This is not the PID we + # want to send a SIGTERM to. + + # We need to enumerate all children of 'postgres' recursively + # and select the one PID that is named 'postgres.bin' and has + # not a process with the same name as parent. + # This should be robust enough, as long as postgres doesn't + # switch process names after forking. sleep(3) postgres_root = Process(pid=postgres.pid) real_postgres = [ c.pid for c in postgres_root.children(True) - if c.name() == 'postgres.bin' and - c.parent().name() != 'postgres.bin' + if c.name() == "postgres.bin" + and c.parent().name() != "postgres.bin" ] try: _createdb() diff --git a/benchbuild/projects/gentoo/sevenz.py b/benchbuild/projects/gentoo/sevenz.py index aa6560960..1f187e6b6 100644 --- a/benchbuild/projects/gentoo/sevenz.py +++ b/benchbuild/projects/gentoo/sevenz.py @@ -1,6 +1,7 @@ """ p7zip experiment within gentoo chroot. """ + from plumbum import local from benchbuild.projects.gentoo.gentoo import GentooGroup @@ -9,12 +10,13 @@ class SevenZip(GentooGroup): """ - app-arch/p7zip + app-arch/p7zip """ + NAME = "p7zip" DOMAIN = "app-arch" def run_tests(self): - _7z = wrapping.wrap(local.path('/usr/bin/7z'), self) + _7z = wrapping.wrap(local.path("/usr/bin/7z"), self) _7z = run.watch(_7z) _7z("b", "-mmt1") diff --git a/benchbuild/projects/gentoo/x264.py b/benchbuild/projects/gentoo/x264.py index 5b3b1be86..7d9ba31b0 100644 --- a/benchbuild/projects/gentoo/x264.py +++ b/benchbuild/projects/gentoo/x264.py @@ -1,6 +1,7 @@ """ media-video/x264-encoder within gentoo chroot. """ + from plumbum import local from benchbuild.projects.gentoo.gentoo import GentooGroup @@ -9,15 +10,16 @@ class X264(GentooGroup): """ - media-video/x264-encoder + media-video/x264-encoder """ + NAME = "x264" DOMAIN = "media-libs" test_url = "http://lairosiel.de/dist/" inputfiles = { "tbbt-small.y4m": [], - "Sintel.2010.720p.raw": ["--input-res", "1280x720"] + "Sintel.2010.720p.raw": ["--input-res", "1280x720"], } def compile(self) -> None: @@ -27,38 +29,40 @@ def compile(self) -> None: download.Wget(self.test_url + testfile, testfile) def run_tests(self) -> None: - x264 = wrapping.wrap(local.path('/usr/bin/x264'), self) + x264 = wrapping.wrap(local.path("/usr/bin/x264"), self) _x264 = run.watch(x264) tests = [ ( - '--crf 30 -b1 -m1 -r1 --me dia --no-cabac --direct temporal ' - '--ssim --no-weightb' - ), - ( - '--crf 16 -b2 -m3 -r3 --me hex --no-8x8dct --direct spatial ' - '--no-dct-decimate -t0 --slice-max-mbs 50' + "--crf 30 -b1 -m1 -r1 --me dia --no-cabac --direct temporal " + "--ssim --no-weightb" ), ( - '--crf 26 -b4 -m5 -r2 --me hex --cqm jvt --nr 100 --psnr ' - '--no-mixed-refs --b-adapt 2 --slice-max-size 1500' + "--crf 16 -b2 -m3 -r3 --me hex --no-8x8dct --direct spatial " + "--no-dct-decimate -t0 --slice-max-mbs 50" ), ( - '--crf 18 -b3 -m9 -r5 --me umh -t1 -A all --b-pyramid normal ' - '--direct auto --no-fast-pskip --no-mbtree' + "--crf 26 -b4 -m5 -r2 --me hex --cqm jvt --nr 100 --psnr " + "--no-mixed-refs --b-adapt 2 --slice-max-size 1500" ), ( - '--crf 22 -b3 -m7 -r4 --me esa -t2 -A all --psy-rd 1.0:1.0 ' - '--slices 4' + "--crf 18 -b3 -m9 -r5 --me umh -t1 -A all --b-pyramid normal " + "--direct auto --no-fast-pskip --no-mbtree" ), - ('--frames 50 --crf 24 -b3 -m10 -r3 --me tesa -t2'), - ('--frames 50 -q0 -m9 -r2 --me hex -Aall'), - ('--frames 50 -q0 -m2 -r1 --me hex --no-cabac'), + ("--crf 22 -b3 -m7 -r4 --me esa -t2 -A all --psy-rd 1.0:1.0 --slices 4"), + ("--frames 50 --crf 24 -b3 -m10 -r3 --me tesa -t2"), + ("--frames 50 -q0 -m9 -r2 --me hex -Aall"), + ("--frames 50 -q0 -m2 -r1 --me hex --no-cabac"), ] for ifile in self.inputfiles: for _, test in enumerate(tests): _x264( - ifile, self.inputfiles[ifile], "--threads", "1", "-o", - "/dev/null", test.split(" ") + ifile, + self.inputfiles[ifile], + "--threads", + "1", + "-o", + "/dev/null", + test.split(" "), ) diff --git a/benchbuild/projects/gentoo/xz.py b/benchbuild/projects/gentoo/xz.py index 1af037113..dc54a9260 100644 --- a/benchbuild/projects/gentoo/xz.py +++ b/benchbuild/projects/gentoo/xz.py @@ -1,6 +1,7 @@ """ xz experiment within gentoo chroot. """ + from plumbum import local from benchbuild.projects.gentoo.gentoo import GentooGroup @@ -10,16 +11,15 @@ class XZ(GentooGroup): """ - app-arch/xz + app-arch/xz """ + NAME = "xz" DOMAIN = "app-arch" test_url = "http://lairosiel.de/dist/" test_archive = "compression.tar.gz" - testfiles = [ - "text.html", "chicken.jpg", "control", "input.source", "liberty.jpg" - ] + testfiles = ["text.html", "chicken.jpg", "control", "input.source", "liberty.jpg"] def compile(self): super().compile() diff --git a/benchbuild/projects/lnt/lnt.py b/benchbuild/projects/lnt/lnt.py index c22453228..363eb0953 100644 --- a/benchbuild/projects/lnt/lnt.py +++ b/benchbuild/projects/lnt/lnt.py @@ -1,4 +1,5 @@ """LNT based measurements.""" + import logging from plumbum import FG, local @@ -14,22 +15,21 @@ class LNTGroup(bb.Project): """LNT ProjectGroup for running the lnt test suite.""" - DOMAIN = 'lnt' - GROUP = 'lnt' + DOMAIN = "lnt" + GROUP = "lnt" NAME_FILTERS = [ - r'(?P.+)\.simple', - r'(?P.+)-(dbl|flt)', + r"(?P.+)\.simple", + r"(?P.+)-(dbl|flt)", ] SUBDIR = None SOURCE = [ - Git(remote='http://llvm.org/git/lnt', - local='lnt.git', - refspec='HEAD', - limit=5), - Git(remote='http://llvm.org/git/test-suite', - local='test-suite', - refspec='HEAD', - limit=5) + Git(remote="http://llvm.org/git/lnt", local="lnt.git", refspec="HEAD", limit=5), + Git( + remote="http://llvm.org/git/test-suite", + local="test-suite", + refspec="HEAD", + limit=5, + ), ] # Will be set by configure. @@ -40,8 +40,8 @@ class LNTGroup(bb.Project): binary = None def compile(self): - lnt_repo = local.path(self.source_of('lnt.git')) - test_suite_source = local.path(self.source_of('test-suite')) + lnt_repo = local.path(self.source_of("lnt.git")) + test_suite_source = local.path(self.source_of("test-suite")) venv_path = local.cwd / "local" virtualenv(venv_path, "--python=python2") @@ -49,8 +49,7 @@ def compile(self): pip = local[pip_path] with local.cwd(lnt_repo): - pip("install", "--no-cache-dir", "--disable-pip-version-check", - "-e", ".") + pip("install", "--no-cache-dir", "--disable-pip-version-check", "-e", ".") self.sandbox_dir = local.cwd / "run" if self.sandbox_dir.exists(): @@ -62,11 +61,23 @@ def compile(self): self.clang_cxx = bb.compiler.cxx(self, detect_project=True) _runtest = bb.watch(self.lnt) - _runtest("runtest", "test-suite", "-v", "-j1", "--sandbox", - self.sandbox_dir, "--benchmarking-only", - "--only-compile", "--cc", str(self.clang), "--cxx", - str(self.clang_cxx), "--test-suite", test_suite_source, - "--only-test=" + self.SUBDIR) + _runtest( + "runtest", + "test-suite", + "-v", + "-j1", + "--sandbox", + self.sandbox_dir, + "--benchmarking-only", + "--only-compile", + "--cc", + str(self.clang), + "--cxx", + str(self.clang_cxx), + "--test-suite", + test_suite_source, + "--only-test=" + self.SUBDIR, + ) @staticmethod def after_run_tests(sandbox_dir): @@ -76,70 +87,87 @@ def after_run_tests(sandbox_dir): (cat[log] & FG) # pylint: disable=pointless-statement def run_tests(self): - test_suite_source = local.path(self.source_of('test-suite')) - binary = bb.wrapping.wrap_dynamic(self, - "lnt_runner", - name_filters=LNTGroup.NAME_FILTERS) + test_suite_source = local.path(self.source_of("test-suite")) + binary = bb.wrapping.wrap_dynamic( + self, "lnt_runner", name_filters=LNTGroup.NAME_FILTERS + ) _runtest = bb.watch(self.lnt) - _runtest("runtest", "nt", "-v", "-j1", "--sandbox", self.sandbox_dir, - "--benchmarking-only", "--cc", str(self.clang), "--cxx", - str(self.clang_cxx), "--test-suite", test_suite_source, - "--test-style", "simple", "--test-externals", self.builddir, - "--make-param=RUNUNDER=" + str(binary), - "--only-test=" + self.SUBDIR) + _runtest( + "runtest", + "nt", + "-v", + "-j1", + "--sandbox", + self.sandbox_dir, + "--benchmarking-only", + "--cc", + str(self.clang), + "--cxx", + str(self.clang_cxx), + "--test-suite", + test_suite_source, + "--test-style", + "simple", + "--test-externals", + self.builddir, + "--make-param=RUNUNDER=" + str(binary), + "--only-test=" + self.SUBDIR, + ) LNTGroup.after_run_tests(self.sandbox_dir) class SingleSourceBenchmarks(LNTGroup): - NAME = 'SingleSourceBenchmarks' - DOMAIN = 'LNT (SSB)' + NAME = "SingleSourceBenchmarks" + DOMAIN = "LNT (SSB)" SUBDIR = "SingleSource/Benchmarks" class MultiSourceBenchmarks(LNTGroup): - NAME = 'MultiSourceBenchmarks' - DOMAIN = 'LNT (MSB)' + NAME = "MultiSourceBenchmarks" + DOMAIN = "LNT (MSB)" SUBDIR = "MultiSource/Benchmarks" class MultiSourceApplications(LNTGroup): - NAME = 'MultiSourceApplications' - DOMAIN = 'LNT (MSA)' + NAME = "MultiSourceApplications" + DOMAIN = "LNT (MSA)" SUBDIR = "MultiSource/Applications" class SPEC2006(LNTGroup): - NAME = 'SPEC2006' - DOMAIN = 'LNT (Ext)' + NAME = "SPEC2006" + DOMAIN = "LNT (Ext)" SUBDIR = "External/SPEC" def compile(self): - if bb.download.CopyNoFail('speccpu2006'): + if bb.download.CopyNoFail("speccpu2006"): super(SPEC2006, self).compile() else: - print('======================================================') - print(('SPECCPU2006 not found in %s. This project will fail.', - CFG['tmp_dir'])) - print('======================================================') + print("======================================================") + print( + ("SPECCPU2006 not found in %s. This project will fail.", CFG["tmp_dir"]) + ) + print("======================================================") class Povray(LNTGroup): - NAME = 'Povray' - DOMAIN = 'LNT (Ext)' + NAME = "Povray" + DOMAIN = "LNT (Ext)" SUBDIR = "External/Povray" SOURCE = [ - Git(remote='http://llvm.org/git/lnt', - local='lnt.git', - refspec='HEAD', - limit=5), - Git(remote='http://llvm.org/git/test-suite', - local='test-suite', - refspec='HEAD', - limit=5), - Git(remote='https://github.com/POV-Ray/povray', - local='povray.git', - refspec='HEAD', - limit=5) + Git(remote="http://llvm.org/git/lnt", local="lnt.git", refspec="HEAD", limit=5), + Git( + remote="http://llvm.org/git/test-suite", + local="test-suite", + refspec="HEAD", + limit=5, + ), + Git( + remote="https://github.com/POV-Ray/povray", + local="povray.git", + refspec="HEAD", + limit=5, + ), ] diff --git a/benchbuild/projects/polybench/polybench-mod.py b/benchbuild/projects/polybench/polybench-mod.py index 82975ac8e..c74acdf69 100644 --- a/benchbuild/projects/polybench/polybench-mod.py +++ b/benchbuild/projects/polybench/polybench-mod.py @@ -7,18 +7,20 @@ class PolybenchModGroup(PolyBenchGroup): - DOMAIN = 'polybench' - GROUP = 'polybench-mod' - DOMAIN = 'polybench' + DOMAIN = "polybench" + GROUP = "polybench-mod" + DOMAIN = "polybench" SOURCE = [ - Git(remote='https://github.com/simbuerg/polybench-c-4.2-1.git', - local='polybench.git', + Git( + remote="https://github.com/simbuerg/polybench-c-4.2-1.git", + local="polybench.git", limit=5, - refspec='HEAD') + refspec="HEAD", + ) ] def compile(self): - polybench_repo = local.path(self.source_of('polybench.git')) + polybench_repo = local.path(self.source_of("polybench.git")) polybench_opts = CFG["projects"]["polybench"] verify = bool(polybench_opts["verify"]) @@ -32,138 +34,158 @@ def compile(self): utils_dir = src_dir / "utilities" polybench_opts = [ - "-D" + str(workload), "-DPOLYBENCH_USE_C99_PROTO", - "-DPOLYBENCH_USE_RESTRICT" + "-D" + str(workload), + "-DPOLYBENCH_USE_C99_PROTO", + "-DPOLYBENCH_USE_RESTRICT", ] if verify: - polybench_opts = self.compile_verify([ - "-I", utils_dir, "-I", src_sub, utils_dir / "polybench.c", - kernel_file, src_file, "-lm" - ], polybench_opts) + polybench_opts = self.compile_verify( + [ + "-I", + utils_dir, + "-I", + src_sub, + utils_dir / "polybench.c", + kernel_file, + src_file, + "-lm", + ], + polybench_opts, + ) clang = bb.compiler.cc(self) _clang = bb.watch(clang) - _clang("-I", utils_dir, "-I", src_sub, polybench_opts, - utils_dir / "polybench.c", kernel_file, src_file, "-lm", "-o", - self.name) + _clang( + "-I", + utils_dir, + "-I", + src_sub, + polybench_opts, + utils_dir / "polybench.c", + kernel_file, + src_file, + "-lm", + "-o", + self.name, + ) class Correlation(PolybenchModGroup): - NAME = 'correlation' + NAME = "correlation" class Covariance(PolybenchModGroup): - NAME = 'covariance' + NAME = "covariance" class TwoMM(PolybenchModGroup): - NAME = '2mm' + NAME = "2mm" class ThreeMM(PolybenchModGroup): - NAME = '3mm' + NAME = "3mm" class Atax(PolybenchModGroup): - NAME = 'atax' + NAME = "atax" class BicG(PolybenchModGroup): - NAME = 'bicg' + NAME = "bicg" class Doitgen(PolybenchModGroup): - NAME = 'doitgen' + NAME = "doitgen" class Mvt(PolybenchModGroup): - NAME = 'mvt' + NAME = "mvt" class Gemm(PolybenchModGroup): - NAME = 'gemm' + NAME = "gemm" class Gemver(PolybenchModGroup): - NAME = 'gemver' + NAME = "gemver" class Gesummv(PolybenchModGroup): - NAME = 'gesummv' + NAME = "gesummv" class Symm(PolybenchModGroup): - NAME = 'symm' + NAME = "symm" class Syr2k(PolybenchModGroup): - NAME = 'syr2k' + NAME = "syr2k" class Syrk(PolybenchModGroup): - NAME = 'syrk' + NAME = "syrk" class Trmm(PolybenchModGroup): - NAME = 'trmm' + NAME = "trmm" class Cholesky(PolybenchModGroup): - NAME = 'cholesky' + NAME = "cholesky" class Durbin(PolybenchModGroup): - NAME = 'durbin' + NAME = "durbin" class Gramschmidt(PolybenchModGroup): - NAME = 'gramschmidt' + NAME = "gramschmidt" class Lu(PolybenchModGroup): - NAME = 'lu' + NAME = "lu" class LuDCMP(PolybenchModGroup): - NAME = 'ludcmp' + NAME = "ludcmp" class Trisolv(PolybenchModGroup): - NAME = 'trisolv' + NAME = "trisolv" class Deriche(PolybenchModGroup): - NAME = 'deriche' + NAME = "deriche" class FloydWarshall(PolybenchModGroup): - NAME = 'floyd-warshall' + NAME = "floyd-warshall" class Nussinov(PolybenchModGroup): - NAME = 'nussinov' + NAME = "nussinov" class Adi(PolybenchModGroup): - NAME = 'adi' + NAME = "adi" class FDTD2D(PolybenchModGroup): - NAME = 'fdtd-2d' + NAME = "fdtd-2d" class Jacobi1D(PolybenchModGroup): - NAME = 'jacobi-1d' + NAME = "jacobi-1d" class Jacobi2Dimper(PolybenchModGroup): - NAME = 'jacobi-2d' + NAME = "jacobi-2d" class Seidel2D(PolybenchModGroup): - NAME = 'seidel-2d' + NAME = "seidel-2d" class Heat3D(PolybenchModGroup): - NAME = 'heat-3d' + NAME = "heat-3d" diff --git a/benchbuild/projects/polybench/polybench.py b/benchbuild/projects/polybench/polybench.py index ff3460145..611ee7511 100644 --- a/benchbuild/projects/polybench/polybench.py +++ b/benchbuild/projects/polybench/polybench.py @@ -9,16 +9,16 @@ from benchbuild.utils.cmd import diff, tar LOG = logging.getLogger(__name__) -CFG['projects'] = { +CFG["projects"] = { "polybench": { "verify": { "default": True, - "desc": "Verify results with POLYBENCH_DUMP_ARRAYS." + "desc": "Verify results with POLYBENCH_DUMP_ARRAYS.", }, "workload": { "default": "EXTRALARGE_DATASET", - "desc": "Control the dataset variable for polybench." - } + "desc": "Control the dataset variable for polybench.", + }, } } @@ -43,8 +43,8 @@ def get_dump_arrays_output(data: tp.List[str]) -> tp.List[str]: class PolyBenchGroup(bb.Project): - DOMAIN = 'polybench' - GROUP = 'polybench' + DOMAIN = "polybench" + GROUP = "polybench" path_dict = { "correlation": "datamining", "covariance": "datamining", @@ -81,12 +81,12 @@ class PolyBenchGroup(bb.Project): SOURCE = [ HTTP( remote={ - '4.2': ( - 'http://downloads.sourceforge.net/project/' - 'polybench/polybench-c-4.2.tar.gz' + "4.2": ( + "http://downloads.sourceforge.net/project/" + "polybench/polybench-c-4.2.tar.gz" ) }, - local='polybench.tar.gz' + local="polybench.tar.gz", ) ] @@ -110,47 +110,61 @@ def compile_verify(self, compiler_args, polybench_opts): return polybench_opts def compile(self): - polybench_source = local.path(self.source_of('polybench.tar.gz')) - polybench_version = self.version_of('polybench.tar.gz') + polybench_source = local.path(self.source_of("polybench.tar.gz")) + polybench_version = self.version_of("polybench.tar.gz") polybench_opts = CFG["projects"]["polybench"] verify = bool(polybench_opts["verify"]) workload = str(polybench_opts["workload"]) - tar('xfz', polybench_source) + tar("xfz", polybench_source) - src_dir = local.path(f'./polybench-c-{polybench_version}') + src_dir = local.path(f"./polybench-c-{polybench_version}") src_sub = src_dir / self.path_dict[self.name] / self.name src_file = src_sub / (self.name + ".c") utils_dir = src_dir / "utilities" polybench_opts = [ - "-DPOLYBENCH_USE_C99_PROTO", "-D" + str(workload), - "-DPOLYBENCH_USE_RESTRICT" + "-DPOLYBENCH_USE_C99_PROTO", + "-D" + str(workload), + "-DPOLYBENCH_USE_RESTRICT", ] if verify: - polybench_opts = self.compile_verify([ - "-I", utils_dir, "-I", src_sub, utils_dir / "polybench.c", - src_file, "-lm" - ], polybench_opts) + polybench_opts = self.compile_verify( + [ + "-I", + utils_dir, + "-I", + src_sub, + utils_dir / "polybench.c", + src_file, + "-lm", + ], + polybench_opts, + ) clang = bb.compiler.cc(self) _clang = bb.watch(clang) _clang( - "-I", utils_dir, "-I", src_sub, polybench_opts, - utils_dir / "polybench.c", src_file, "-lm", "-o", self.name + "-I", + utils_dir, + "-I", + src_sub, + polybench_opts, + utils_dir / "polybench.c", + src_file, + "-lm", + "-o", + self.name, ) def run_tests(self): - def filter_stderr(stderr_raw, stderr_filtered): """Extract dump_arrays_output from stderr.""" - with open(stderr_raw, 'r') as stderr: - with open(stderr_filtered, 'w') as stderr_filt: - stderr_filt.writelines( - get_dump_arrays_output(stderr.readlines()) - ) + with open(stderr_raw, "r") as stderr: + with open(stderr_filtered, "w") as stderr_filt: + stderr_filt.writelines(get_dump_arrays_output(stderr.readlines())) polybench_opts = CFG["projects"]["polybench"] verify = bool(polybench_opts["verify"]) @@ -181,120 +195,120 @@ def filter_stderr(stderr_raw, stderr_filtered): class Correlation(PolyBenchGroup): - NAME = 'correlation' + NAME = "correlation" class Covariance(PolyBenchGroup): - NAME = 'covariance' + NAME = "covariance" class TwoMM(PolyBenchGroup): - NAME = '2mm' + NAME = "2mm" class ThreeMM(PolyBenchGroup): - NAME = '3mm' + NAME = "3mm" class Atax(PolyBenchGroup): - NAME = 'atax' + NAME = "atax" class BicG(PolyBenchGroup): - NAME = 'bicg' + NAME = "bicg" class Doitgen(PolyBenchGroup): - NAME = 'doitgen' + NAME = "doitgen" class Mvt(PolyBenchGroup): - NAME = 'mvt' + NAME = "mvt" class Gemm(PolyBenchGroup): - NAME = 'gemm' + NAME = "gemm" class Gemver(PolyBenchGroup): - NAME = 'gemver' + NAME = "gemver" class Gesummv(PolyBenchGroup): - NAME = 'gesummv' + NAME = "gesummv" class Symm(PolyBenchGroup): - NAME = 'symm' + NAME = "symm" class Syr2k(PolyBenchGroup): - NAME = 'syr2k' + NAME = "syr2k" class Syrk(PolyBenchGroup): - NAME = 'syrk' + NAME = "syrk" class Trmm(PolyBenchGroup): - NAME = 'trmm' + NAME = "trmm" class Cholesky(PolyBenchGroup): - NAME = 'cholesky' + NAME = "cholesky" class Durbin(PolyBenchGroup): - NAME = 'durbin' + NAME = "durbin" class Gramschmidt(PolyBenchGroup): - NAME = 'gramschmidt' + NAME = "gramschmidt" class Lu(PolyBenchGroup): - NAME = 'lu' + NAME = "lu" class LuDCMP(PolyBenchGroup): - NAME = 'ludcmp' + NAME = "ludcmp" class Trisolv(PolyBenchGroup): - NAME = 'trisolv' + NAME = "trisolv" class Deriche(PolyBenchGroup): - NAME = 'deriche' + NAME = "deriche" class FloydWarshall(PolyBenchGroup): - NAME = 'floyd-warshall' + NAME = "floyd-warshall" class Nussinov(PolyBenchGroup): - NAME = 'nussinov' + NAME = "nussinov" class Adi(PolyBenchGroup): - NAME = 'adi' + NAME = "adi" class FDTD2D(PolyBenchGroup): - NAME = 'fdtd-2d' + NAME = "fdtd-2d" class Jacobi1D(PolyBenchGroup): - NAME = 'jacobi-1d' + NAME = "jacobi-1d" class Jacobi2Dimper(PolyBenchGroup): - NAME = 'jacobi-2d' + NAME = "jacobi-2d" class Seidel2D(PolyBenchGroup): - NAME = 'seidel-2d' + NAME = "seidel-2d" class Heat3D(PolyBenchGroup): - NAME = 'heat-3d' + NAME = "heat-3d" diff --git a/benchbuild/projects/test/test.py b/benchbuild/projects/test/test.py index 02cba2b39..349fddf5a 100644 --- a/benchbuild/projects/test/test.py +++ b/benchbuild/projects/test/test.py @@ -9,7 +9,6 @@ class TestSource(FetchableSource): - _versions: tp.Tuple[Variant, ...] def __init__(self, *versions: str): @@ -20,26 +19,23 @@ def __init__(self, *versions: str): versions_w_default = ("default",) + versions - self._versions = tuple( - Variant(self, version) for version in versions_w_default - ) + self._versions = tuple(Variant(self, version) for version in versions_w_default) @property def default(self) -> Variant: return self._versions[0] def version(self, target_dir: str, version: str) -> pb.LocalPath: - return 'None' + return "None" def versions(self) -> tp.List[Variant]: return list(self._versions) def fetch(self) -> pb.LocalPath: - return 'None' + return "None" class CAWTestSource(FetchableSource): - _versions: tp.Dict[Variant, tp.List[Variant]] def __init__( @@ -51,12 +47,9 @@ def __init__( self._local = "test.caw.local" self._remote = "test.caw.remote" - versions_w_default = \ - (("default", "caw_default"),) + versions + versions_w_default = (("default", "caw_default"),) + versions - self._versions = { - Variant(self, lhs): [] for lhs, _ in versions_w_default - } + self._versions = {Variant(self, lhs): [] for lhs, _ in versions_w_default} for lhs, rhs in versions_w_default: self._versions[Variant(self, lhs)].append(Variant(self, rhs)) @@ -75,13 +68,13 @@ def version(self, target_dir: str, version: str) -> pb.LocalPath: """ Do not return anything useful for a test here. """ - return 'None' + return "None" def fetch(self) -> pb.LocalPath: """ Do not return anything useful for a test here. """ - return 'None' + return "None" def versions(self) -> tp.List[Variant]: raise ValueError("Context-Aware sources must not use versions()!") @@ -97,15 +90,16 @@ def versions_with_context(self, ctx: Revision) -> tp.Sequence[Variant]: class TestProject(project.Project): """Test project that does nothing.""" + NAME = "test" DOMAIN = "test" GROUP = "test" SOURCE = [TestSource("1", "2")] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def compile(self): - with open('test.cpp', 'w', encoding="utf-8") as test_source: + with open("test.cpp", "w", encoding="utf-8") as test_source: lines = """ #include int main(int argc, char **argv) { @@ -118,10 +112,10 @@ def compile(self): clang = bb.compiler.cxx(self) _clang = bb.watch(clang) - _clang('test.cpp', "-o", 'test.cpp.out') + _clang("test.cpp", "-o", "test.cpp.out") def run_tests(self): - exp = bb.wrap('test.cpp.out', self) + exp = bb.wrap("test.cpp.out", self) _exp = bb.watch(exp) _exp() @@ -134,10 +128,10 @@ class TestProjectRuntimeFail(project.Project): GROUP = "test" SRC_FILE = "test.cpp" SOURCE = [nosource()] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def compile(self): - with open('test.cpp', 'w') as test_source: + with open("test.cpp", "w") as test_source: lines = """ #include int main(int argc, char **argv) { @@ -150,10 +144,10 @@ def compile(self): clang = bb.compiler.cxx(self) _clang = bb.watch(clang) - _clang('test.cpp', "-o", 'test.cpp.out') + _clang("test.cpp", "-o", "test.cpp.out") def run_tests(self): - exp = bb.wrap('test.cpp.ou', self) + exp = bb.wrap("test.cpp.ou", self) _exp = bb.watch(exp) _exp() @@ -164,15 +158,12 @@ class CAWTestProject(project.Project): NAME = "test-caw" DOMAIN = "test" GROUP = "test" - SOURCE = [ - TestSource("1", "2", "3"), - CAWTestSource(("1", "caw_0"), ("3", "caw_0")) - ] + SOURCE = [TestSource("1", "2", "3"), CAWTestSource(("1", "caw_0"), ("3", "caw_0"))] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def compile(self): - with open('test.cpp', 'w', encoding="utf-8") as test_source: + with open("test.cpp", "w", encoding="utf-8") as test_source: lines = """ #include int main(int argc, char **argv) { @@ -185,9 +176,9 @@ def compile(self): clang = bb.compiler.cxx(self) _clang = bb.watch(clang) - _clang('test.cpp', "-o", 'test.cpp.out') + _clang("test.cpp", "-o", "test.cpp.out") def run_tests(self): - exp = bb.wrap('test.cpp.out', self) + exp = bb.wrap("test.cpp.out", self) _exp = bb.watch(exp) _exp() diff --git a/benchbuild/settings.py b/benchbuild/settings.py index 97cf3ad89..488268cf4 100644 --- a/benchbuild/settings.py +++ b/benchbuild/settings.py @@ -4,6 +4,7 @@ All settings are stored in a simple dictionary. Each setting should be modifiable via environment variable. """ + import os import uuid from datetime import datetime @@ -17,286 +18,252 @@ "version": { "desc": "Version Number", "default": s.__version__, - "export": False + "export": False, }, "verbosity": { "desc": "The verbosity level of the logger. Range: 0-4", - "default": 0 - }, - "debug": { - "desc": "Should debug logging be enabled?", - "default": False + "default": 0, }, + "debug": {"desc": "Should debug logging be enabled?", "default": False}, "config_file": { "desc": "Config file path of benchbuild. Not guaranteed to exist.", "default": None, }, "build_dir": { - "desc": - "build directory of benchbuild. All intermediate projects will " - "be placed here", - "default": s.ConfigPath(os.path.join(os.getcwd(), "results")) + "desc": "build directory of benchbuild. All intermediate projects will " + "be placed here", + "default": s.ConfigPath(os.path.join(os.getcwd(), "results")), }, "tmp_dir": { "desc": "Temporary dir. This will be used for caching downloads.", - "default": s.ConfigPath(os.path.join(os.getcwd(), "tmp")) - }, - "force_tty": { - "desc": "Assume an active TTY.", - "default": False + "default": s.ConfigPath(os.path.join(os.getcwd(), "tmp")), }, + "force_tty": {"desc": "Assume an active TTY.", "default": False}, "force_watch_unbuffered": { "desc": "Force watched commands to output unbuffered.", - "default": False + "default": False, }, "jobs": { "desc": "Number of jobs that can be used for building and running.", - "default": str(s.available_cpu_count()) + "default": str(s.available_cpu_count()), }, "parallel_processes": { "desc": "Proccesses use to work on execution plans.", - "default": 1 + "default": 1, }, "experiments": { - "default": { - "empty": uuid.uuid4() - }, - "desc": "Dictionary of all experiments we want a defined uuid for." + "default": {"empty": uuid.uuid4()}, + "desc": "Dictionary of all experiments we want a defined uuid for.", }, "clean": { "default": True, "desc": "Clean temporary objects, after completion.", }, - "experiment_description": { - "default": str(datetime.now()), - "export": False - }, + "experiment_description": {"default": str(datetime.now()), "export": False}, "regression_prefix": { "default": os.path.join("/", "tmp", "benchbuild-regressions") }, "source_dir": { - "default": - None, - "desc": - "Path to a benchbuild source directory. For developers only." - }, - "benchbuild_ebuild": { - "default": "" + "default": None, + "desc": "Path to a benchbuild source directory. For developers only.", }, + "benchbuild_ebuild": {"default": ""}, "cleanup_paths": { "default": [], "desc": ( - 'List of existing paths that benchbuild should delete ' - 'in addition to the default cleanup steps.' + "List of existing paths that benchbuild should delete " + "in addition to the default cleanup steps." ), - "export": False + "export": False, }, "sequence": { - "desc": - "The name of the sequence that should be used for " - "preoptimization.", - "default": "no_preperation" - } - } + "desc": "The name of the sequence that should be used for preoptimization.", + "default": "no_preperation", + }, + }, ) -CFG['bootstrap'] = { - 'packages': { - 'default': [ - "mkdir", "git", "tar", "mv", "rm", "bash", "rmdir", "time", "chmod", - "cp", "ln", "make", "unzip", "cat", "patch", "find", "echo", "grep", - "sed", "sh", "autoreconf", "ruby", "curl", "tail", "kill", - "virtualenv", "timeout" +CFG["bootstrap"] = { + "packages": { + "default": [ + "mkdir", + "git", + "tar", + "mv", + "rm", + "bash", + "rmdir", + "time", + "chmod", + "cp", + "ln", + "make", + "unzip", + "cat", + "patch", + "find", + "echo", + "grep", + "sed", + "sh", + "autoreconf", + "ruby", + "curl", + "tail", + "kill", + "virtualenv", + "timeout", ], - 'desc': - 'List of packages that we require to be installed on the system.' + "desc": "List of packages that we require to be installed on the system.", + }, + "install": { + "default": True, + "desc": "Should we try to install packages automatically?", }, - 'install': { - 'default': True, - 'desc': 'Should we try to install packages automatically?' - } } CFG["compiler"] = { - "c": { - "desc": "The C compiler we should use.", - "default": "clang" - }, - "cxx": { - "desc": "The C++ compiler we should use.", - "default": "clang++" - } + "c": {"desc": "The C compiler we should use.", "default": "clang"}, + "cxx": {"desc": "The C++ compiler we should use.", "default": "clang++"}, } CFG["unionfs"] = { "enable": { "default": False, - "desc": "Wrap all project operations in a unionfs filesystem." + "desc": "Wrap all project operations in a unionfs filesystem.", }, - "rw": { - "default": 'rw', - "desc": 'Name of the image directory' - } + "rw": {"default": "rw", "desc": "Name of the image directory"}, } CFG["env"] = { "default": {}, - "desc": "The environment benchbuild's commands should operate in." + "desc": "The environment benchbuild's commands should operate in.", } -CFG['db'] = { - "enabled": { - "desc": "Whether the database is enabled.", - "default": False - }, - "connect_string": { - "desc": "sqlalchemy connect string", - "default": "sqlite://" - }, +CFG["db"] = { + "enabled": {"desc": "Whether the database is enabled.", "default": False}, + "connect_string": {"desc": "sqlalchemy connect string", "default": "sqlite://"}, "rollback": { "desc": "Rollback all operations after benchbuild completes.", - "default": False + "default": False, }, "create_functions": { "default": False, - "desc": "Should we recreate our SQL functions from scratch?" - } + "desc": "Should we recreate our SQL functions from scratch?", + }, } -CFG['gentoo'] = { +CFG["gentoo"] = { "autotest_lang": { "default": [], - "desc": "Language filter for ebuilds, like C or C++." + "desc": "Language filter for ebuilds, like C or C++.", }, "autotest_use": { "default": [], "desc": ( - 'USE filter for ebuilds. Filters packages without the given ' - 'use flags.' - ) + "USE filter for ebuilds. Filters packages without the given use flags." + ), }, "autotest_loc": { "default": "/tmp/gentoo-autotest", - "desc": "Location for the list of auto generated ebuilds." + "desc": "Location for the list of auto generated ebuilds.", }, - "http_proxy": { - "default": None, - "desc": "HTTP Proxy to use for downloads." - }, - "rsync_proxy": { - "default": None, - "desc": "RSYNC Proxy to use for downloads." - }, - "ftp_proxy": { - "default": None, - "desc": "FTP Proxy to use for downloads." - } + "http_proxy": {"default": None, "desc": "HTTP Proxy to use for downloads."}, + "rsync_proxy": {"default": None, "desc": "RSYNC Proxy to use for downloads."}, + "ftp_proxy": {"default": None, "desc": "FTP Proxy to use for downloads."}, } CFG["slurm"] = { - "account": { - "desc": "The SLURM account to use by default.", - "default": "cl" - }, + "account": {"desc": "The SLURM account to use by default.", "default": "cl"}, "partition": { "desc": "The SLURM partition to use by default.", - "default": "chimaira" + "default": "chimaira", }, "template": { "desc": "Template used to generate a SLURM script.", - "default": "misc/slurm.sh.inc" + "default": "misc/slurm.sh.inc", }, "script": { - "desc": - "Name of the script that can be passed to SLURM. Used by external " - "tools.", - "default": "slurm.sh" + "desc": "Name of the script that can be passed to SLURM. Used by external " + "tools.", + "default": "slurm.sh", }, "cpus_per_task": { "desc": ( - 'Number of CPUs that should be requested from SLURM.' - ' Used by external tools.' + "Number of CPUs that should be requested from SLURM." + " Used by external tools." ), - "default": 10 + "default": 10, }, "node_dir": { - "desc": - "Node directory, when executing on a cluster node. This is not " - "used by benchbuild directly, but by external scripts.", - "default": os.path.join(os.getcwd(), "results") + "desc": "Node directory, when executing on a cluster node. This is not " + "used by benchbuild directly, but by external scripts.", + "default": os.path.join(os.getcwd(), "results"), }, "timelimit": { "desc": "The timelimit we want to give to a job", - "default": "12:00:00" + "default": "12:00:00", }, "exclusive": { "desc": "Shall we reserve a node exclusively, or share it with others?", - "default": True + "default": True, }, "multithread": { "desc": "Hint SLURM to disable multithreading. False adds --hint=nomultithread.", - "default": False + "default": False, }, "turbo": { "desc": "Enable Intel Turbo Boost via SLURM. False adds --pstate-turbo=off.", - "default": False - }, - "logs": { - "desc": "Location the SLURM logs will be stored", - "default": "slurm.log" - }, - "nice": { - "desc": "Add a niceness value on our priority", - "default": 0 + "default": False, }, + "logs": {"desc": "Location the SLURM logs will be stored", "default": "slurm.log"}, + "nice": {"desc": "Add a niceness value on our priority", "default": 0}, "nice_clean": { "desc": "Add a niceness value on our cleanup job priority", - "default": 2500 + "default": 2500, }, "max_running": { "desc": "Limit the number of concurrent running array jobs", - "default": 0 + "default": 0, }, "node_image": { "desc": "Path to the archive we want on each cluster node.", - "default": os.path.join(os.path.curdir, "llvm.tar.gz") + "default": os.path.join(os.path.curdir, "llvm.tar.gz"), }, "extra_log": { "desc": "Extra log file to be managed by SLURM", - "default": "/tmp/.slurm" + "default": "/tmp/.slurm", }, "container_root": { "default": None, - "desc": "Permanent storage for container images" + "desc": "Permanent storage for container images", }, - "container_runroot": { - "default": None, - "desc": "Runtime storage for containers" - } + "container_runroot": {"default": None, "desc": "Runtime storage for containers"}, } CFG["uchroot"] = { "repo": { "default": "https://github.com/PolyJIT/erlent.git/", - "desc": "GIT Repo URL for erlent." + "desc": "GIT Repo URL for erlent.", } } CFG["plugins"] = { "autoload": { "default": True, - "desc": "Should automatic load of plugins be enabled?" + "desc": "Should automatic load of plugins be enabled?", }, "experiments": { "default": [ "benchbuild.experiments.raw", "benchbuild.experiments.empty", ], - "desc": "The experiment plugins we know about." + "desc": "The experiment plugins we know about.", }, "projects": { "default": [ - "benchbuild.projects.gentoo", "benchbuild.projects.lnt.lnt", + "benchbuild.projects.gentoo", + "benchbuild.projects.lnt.lnt", "benchbuild.projects.polybench.polybench", "benchbuild.projects.polybench.polybench-mod", "benchbuild.projects.benchbuild.bots", @@ -327,65 +294,57 @@ "benchbuild.projects.benchbuild.xz", "benchbuild.projects.apollo.scimark", "benchbuild.projects.apollo.rodinia", - "benchbuild.projects.test.test" + "benchbuild.projects.test.test", ], - "desc": "The project plugins we know about." - } + "desc": "The project plugins we know about.", + }, } CFG["container"] = { "interactive": { "default": False, - "desc": "Drop into an interactive shell for all container runs" + "desc": "Drop into an interactive shell for all container runs", }, "keep": { "default": False, - "desc": "Keep failed image builds at their last known good state." + "desc": "Keep failed image builds at their last known good state.", }, "keep_suffix": { "default": "failed", - "desc": "Suffix to add to failed image builds, if we keep them." - }, - "replace": { - "default": False, - "desc": "Replace existing container images." + "desc": "Suffix to add to failed image builds, if we keep them.", }, + "replace": {"default": False, "desc": "Replace existing container images."}, "export": { - "default": - s.ConfigPath(os.path.join(os.getcwd(), "containers", "export")), - "desc": - "Export path for container images." + "default": s.ConfigPath(os.path.join(os.getcwd(), "containers", "export")), + "desc": "Export path for container images.", }, "import": { - "default": - s.ConfigPath(os.path.join(os.getcwd(), "containers", "export")), - "desc": - "Import path for container images." + "default": s.ConfigPath(os.path.join(os.getcwd(), "containers", "export")), + "desc": "Import path for container images.", }, "from_source": { "default": False, - "desc": "Install benchbuild from source or from pip (default)" + "desc": "Install benchbuild from source or from pip (default)", }, "root": { "default": s.ConfigPath(os.path.join(os.getcwd(), "containers", "lib")), - "desc": "Permanent storage for container images" + "desc": "Permanent storage for container images", }, "runroot": { "default": s.ConfigPath(os.path.join(os.getcwd(), "containers", "run")), - "desc": "Runtime storage for containers" + "desc": "Runtime storage for containers", }, "runtime": { "default": "/usr/bin/crun", - "desc": "Default container runtime used by podman" + "desc": "Default container runtime used by podman", }, "source": { "default": s.ConfigPath(os.getcwd()), - "desc": "Path to benchbuild's source directory" + "desc": "Path to benchbuild's source directory", }, "storage_driver": { "default": None, - "desc": "Storage driver for containers." - "If 'null' use podman's default." + "desc": "Storage driver for containers.If 'null' use podman's default.", }, "storage_opts": { "default": [], @@ -404,138 +363,87 @@ "default": "container-out.tar.bz2", "desc": "Output container file." }, + "input": {"default": "container.tar.bz2", "desc": "Input container file/folder."}, + "output": {"default": "container-out.tar.bz2", "desc": "Output container file."}, "mounts": { "default": [], - "desc": "List of paths that will be mounted inside the container." + "desc": "List of paths that will be mounted inside the container.", }, "prefixes": { "default": [], - "desc": - "List of paths that will be treated as an " - "existing prefix inside a container." + "desc": "List of paths that will be treated as an " + "existing prefix inside a container.", }, "shell": { "default": "/bin/bash", - "desc": "Command string that should be used as shell command." + "desc": "Command string that should be used as shell command.", }, "known": { "default": [], - "desc": - "List of known containers. Format: " - "[{ 'path': ," - " 'hash': }]" - }, - "images": { - "default": { - "gentoo": "gentoo.tar.bz2", - "ubuntu": "ubuntu.tar.bz2" - } + "desc": "List of known containers. Format: " + "[{ 'path': ," + " 'hash': }]", }, + "images": {"default": {"gentoo": "gentoo.tar.bz2", "ubuntu": "ubuntu.tar.bz2"}}, "prefered": { "default": [], - "desc": - "List of containers of which the project can chose from." - "Format:" - "[{ 'path': }]" - } + "desc": "List of containers of which the project can chose from." + "Format:" + "[{ 'path': }]", + }, } # This is needed to generate a valid benchbuild configuration schema. -CFG['container']['strategy'] = { - "dummy": { - 'default': None, - 'desc': 'dummy value' - } -} +CFG["container"]["strategy"] = {"dummy": {"default": None, "desc": "dummy value"}} -CFG['container']['strategy']['polyjit'] = { - "sync": { - "default": True, - "desc": "Update portage tree?" - }, - "upgrade": { - "default": True, - "desc": "Upgrade all packages?" - }, +CFG["container"]["strategy"]["polyjit"] = { + "sync": {"default": True, "desc": "Update portage tree?"}, + "upgrade": {"default": True, "desc": "Upgrade all packages?"}, "packages": { - "default": [{ - "name": "sys-devel/gcc:5.4.0", - "env": { - "ACCEPT_KEYWORDS": "~amd64", - "USE": "-filecaps" - } - }, { - "name": "dev-db/postgresql:9.5", - "env": {} - }, { - "name": "dev-python/pip", - "env": {} - }, { - "name": "net-misc/curl", - "env": {} - }, { - "name": "sys-apps/likwid", - "env": { - "USE": "-filecaps", - "ACCEPT_KEYWORDS": "~amd64" - } - }, { - "name": "dev-libs/libpfm", - "env": { - "USE": "static-libs", - "ACCEPT_KEYWORDS": "~amd64" - } - }, { - "name": "sys-process/time", - "env": {} - }, { - "name": "=dev-util/boost-build-1.58.0", - "env": { - "ACCEPT_KEYWORDS": "~amd64" - } - }, { - "name": "=dev-libs/boost-1.62-r1", - "env": { - "ACCEPT_KEYWORDS": "~amd64" - } - }, { - "name": "dev-libs/libpqxx", - "env": {} - }, { - "name": "dev-lang/python-3.5.3", - "env": { - "ACCEPT_KEYWORDS": "~amd64" - } - }, { - "name": "dev-python/dill", - "env": { - "ACCEPT_KEYWORDS": "~amd64" - } - }], - "desc": "A list of gentoo package atoms that should be merged." - } + "default": [ + { + "name": "sys-devel/gcc:5.4.0", + "env": {"ACCEPT_KEYWORDS": "~amd64", "USE": "-filecaps"}, + }, + {"name": "dev-db/postgresql:9.5", "env": {}}, + {"name": "dev-python/pip", "env": {}}, + {"name": "net-misc/curl", "env": {}}, + { + "name": "sys-apps/likwid", + "env": {"USE": "-filecaps", "ACCEPT_KEYWORDS": "~amd64"}, + }, + { + "name": "dev-libs/libpfm", + "env": {"USE": "static-libs", "ACCEPT_KEYWORDS": "~amd64"}, + }, + {"name": "sys-process/time", "env": {}}, + { + "name": "=dev-util/boost-build-1.58.0", + "env": {"ACCEPT_KEYWORDS": "~amd64"}, + }, + {"name": "=dev-libs/boost-1.62-r1", "env": {"ACCEPT_KEYWORDS": "~amd64"}}, + {"name": "dev-libs/libpqxx", "env": {}}, + {"name": "dev-lang/python-3.5.3", "env": {"ACCEPT_KEYWORDS": "~amd64"}}, + {"name": "dev-python/dill", "env": {"ACCEPT_KEYWORDS": "~amd64"}}, + ], + "desc": "A list of gentoo package atoms that should be merged.", + }, } CFG["versions"] = { "full": { "default": False, - "desc": "Ignore default sampling and provide full version exploration." + "desc": "Ignore default sampling and provide full version exploration.", } } CFG["coverage"] = { "collect": { "desc": "Should benchuild collect coverage inside wrapped binaries.", - "default": False - }, - "config": { - "desc": "Where is the coverage config?", - "default": ".coveragerc" + "default": False, }, - "path": { - "desc": "Where should the coverage files be placed?", - "default": None - } + "config": {"desc": "Where is the coverage config?", "default": ".coveragerc"}, + "path": {"desc": "Where should the coverage files be placed?", "default": None}, } s.setup_config(CFG) diff --git a/benchbuild/signals.py b/benchbuild/signals.py index c8968b5db..23c8bfc9b 100644 --- a/benchbuild/signals.py +++ b/benchbuild/signals.py @@ -14,9 +14,7 @@ class CleanupOnSignal: def stored_procedures(self): return self.__stored_procedures - def register( - self, callback: tp.Callable, *args: tp.Any, **kwargs: tp.Any - ) -> None: + def register(self, callback: tp.Callable, *args: tp.Any, **kwargs: tp.Any) -> None: new_func = functools.partial(callback, *args, **kwargs) self.__stored_procedures[callback] = new_func diff --git a/benchbuild/source/__init__.py b/benchbuild/source/__init__.py index c0a8a1fc4..f2d23a2b8 100644 --- a/benchbuild/source/__init__.py +++ b/benchbuild/source/__init__.py @@ -2,6 +2,7 @@ """ Declarative API for downloading sources required by benchbuild. """ + from .base import FetchableSource as FetchableSource from .base import Expandable as Expandable from .base import ContextAwareSource as ContextAwareSource diff --git a/benchbuild/source/base.py b/benchbuild/source/base.py index 7a851a8d2..bfcaf7d9d 100644 --- a/benchbuild/source/base.py +++ b/benchbuild/source/base.py @@ -1,9 +1,9 @@ """ Provide a base interface for downloadable sources. """ + import abc import itertools -import sys import typing as tp from typing import Protocol @@ -37,13 +37,13 @@ class Variant: same way as a program variant like a specific configuraiton. """ - owner: 'FetchableSource' = attr.ib(eq=False, repr=False) + owner: "FetchableSource" = attr.ib(eq=False, repr=False) version: str = attr.ib() def name(self) -> str: return self.owner.local - def source(self) -> 'FetchableSource': + def source(self) -> "FetchableSource": return self.owner def __str__(self) -> str: @@ -70,8 +70,7 @@ class Revision: variants: tp.Sequence[Variant] def __init__( - self, project_cls: tp.Type["Project"], _primary: Variant, - *variants: Variant + self, project_cls: tp.Type["Project"], _primary: Variant, *variants: Variant ) -> None: self.project_cls = project_cls self.variants = [_primary] + list(variants) @@ -80,7 +79,6 @@ def extend(self, *variants: Variant) -> None: self.variants = list(self.variants) + list(variants) def __update_variant(self, variant: Variant) -> None: - def __replace(elem: Variant): if elem.name() == variant.name(): return variant @@ -122,7 +120,7 @@ def has_variant(self, name: str) -> bool: """ return any(variant.owner.key == name for variant in self.variants) - def source_by_name(self, name: str) -> 'FetchableSource': + def source_by_name(self, name: str) -> "FetchableSource": """ Return the source object that matches the key. @@ -179,7 +177,6 @@ def to_str(*variants: Variant) -> str: class Fetchable(Protocol): - @property def key(self) -> str: """ @@ -211,7 +208,6 @@ def fetch(self) -> pb.LocalPath: class Expandable(Protocol): - @property def is_expandable(self) -> bool: """ @@ -231,7 +227,6 @@ def versions(self) -> tp.Sequence[Variant]: class ContextAwareSource(Protocol): - def is_context_free(self) -> bool: """ Return, if this source needs context to evaluate it's own @@ -266,7 +261,6 @@ def versions_with_context(self, ctx: Revision) -> tp.Sequence[Variant]: class Versioned(Protocol): - @property def default(self) -> Variant: """ @@ -390,27 +384,26 @@ def fetch(self) -> pb.LocalPath: raise NotImplementedError() -Sources = tp.List['FetchableSource'] +Sources = tp.List["FetchableSource"] class NoSource(FetchableSource): - @property def default(self) -> Variant: - return Variant(owner=self, version='None') + return Variant(owner=self, version="None") def version(self, target_dir: str, version: str) -> pb.LocalPath: - return 'None' + return "None" def versions(self) -> tp.List[Variant]: - return [Variant(owner=self, version='None')] + return [Variant(owner=self, version="None")] def fetch(self) -> pb.LocalPath: - return 'None' + return "None" def nosource() -> NoSource: - return NoSource('NoSource', 'NoSource') + return NoSource("NoSource", "NoSource") def target_prefix() -> str: @@ -420,10 +413,10 @@ def target_prefix() -> str: Returns: str: the prefix where we download everything to. """ - return str(CFG['tmp_dir']) + return str(CFG["tmp_dir"]) -SourceT = tp.TypeVar('SourceT') +SourceT = tp.TypeVar("SourceT") def primary(*sources: SourceT) -> SourceT: @@ -469,7 +462,6 @@ class BaseSource(Expandable, Versioned, ContextAwareSource, Protocol): class EnumeratorFn(Protocol): - def __call__(self, *source: Expandable) -> NestedVariants: """ Return an enumeration of all variants for each source. @@ -484,10 +476,11 @@ def _default_enumerator(*sources: Expandable) -> NestedVariants: class ContextEnumeratorFn(Protocol): - def __call__( - self, project_cls: tp.Type["Project"], context: Revision, - *sources: ContextAwareSource + self, + project_cls: tp.Type["Project"], + context: Revision, + *sources: ContextAwareSource, ) -> tp.Sequence[Revision]: """ Enumerate all revisions that are valid under the given context. @@ -495,8 +488,7 @@ def __call__( def _default_caw_enumerator( - project_cls: tp.Type["Project"], context: Revision, - *sources: ContextAwareSource + project_cls: tp.Type["Project"], context: Revision, *sources: ContextAwareSource ) -> tp.Sequence[Revision]: """ Transform given variant into a list of variants to check. @@ -522,7 +514,7 @@ def _default_caw_enumerator( def enumerate_revisions( project_cls: tp.Type["Project"], context_free_enumerator: EnumeratorFn = _default_enumerator, - context_aware_enumerator: ContextEnumeratorFn = _default_caw_enumerator + context_aware_enumerator: ContextEnumeratorFn = _default_caw_enumerator, ) -> tp.Sequence[Revision]: """ Enumerates the given sources. @@ -533,25 +525,20 @@ def enumerate_revisions( """ sources = project_cls.SOURCE - context_free_sources = [ - source for source in sources if source.is_context_free() - ] + context_free_sources = [source for source in sources if source.is_context_free()] context_aware_sources = [ source for source in sources if not source.is_context_free() ] revisions = context_free_enumerator(*context_free_sources) - project_revisions = [ - Revision(project_cls, *variants) for variants in revisions - ] + project_revisions = [Revision(project_cls, *variants) for variants in revisions] if len(context_aware_sources) > 0: revs = list( itertools.chain( *( - context_aware_enumerator( - project_cls, rev, *context_aware_sources - ) for rev in project_revisions + context_aware_enumerator(project_cls, rev, *context_aware_sources) + for rev in project_revisions ) ) ) @@ -602,12 +589,16 @@ def revision_from_str( for source in sources: if source.is_expandable: - found.extend([ - variant for variant in source.explore() for rev in revs - if variant.version == rev.value - ]) + found.extend( + [ + variant + for variant in source.explore() + for rev in revs + if variant.version == rev.value + ] + ) if len(found) == 0: - raise ValueError(f'Revisions {revs} not found in any available source.') + raise ValueError(f"Revisions {revs} not found in any available source.") return Revision(project_cls, primary(*found), *secondaries(*found)) diff --git a/benchbuild/source/git.py b/benchbuild/source/git.py index 5b967d0d0..91c4ffe6a 100644 --- a/benchbuild/source/git.py +++ b/benchbuild/source/git.py @@ -32,7 +32,7 @@ def __init__( local: str, clone: bool = True, limit: tp.Optional[int] = 10, - refspec: str = 'HEAD', + refspec: str = "HEAD", shallow: bool = True, submodule_set_urls: tp.Optional[tp.Dict[str, str]] = None, version_filter: tp.Callable[[str], bool] = lambda version: True @@ -98,7 +98,7 @@ def fetch(self) -> pb.LocalPath: return cache_path - def version(self, target_dir: str, version: str = 'HEAD') -> pb.LocalPath: + def version(self, target_dir: str, version: str = "HEAD") -> pb.LocalPath: """ Create a new git worktree pointing to the requested version. @@ -114,7 +114,7 @@ def version(self, target_dir: str, version: str = 'HEAD') -> pb.LocalPath: """ src_loc = self.fetch() active_loc = pb.local.path(target_dir) / self.local - tgt_subdir = f'{self.local}-{version}' + tgt_subdir = f"{self.local}-{version}" tgt_loc = pb.local.path(target_dir) / tgt_subdir clone = git['clone'] @@ -125,17 +125,18 @@ def version(self, target_dir: str, version: str = 'HEAD') -> pb.LocalPath: checkout = git['checkout', '-f'] with pb.local.cwd(src_loc): - is_shallow = rev_parse('--is-shallow-repository').strip() - if is_shallow == 'true': - pull('--unshallow') + is_shallow = rev_parse("--is-shallow-repository").strip() + if is_shallow == "true": + pull("--unshallow") if Path(tgt_loc).exists(): LOG.info( - 'Found target location %s. Going to skip creation and ' - 'repository cloning.', str(tgt_loc) + "Found target location %s. Going to skip creation and " + "repository cloning.", + str(tgt_loc), ) else: - mkdir('-p', tgt_loc) + mkdir("-p", tgt_loc) with pb.local.cwd(tgt_loc): clone('--dissociate', '--reference', src_loc, self.remote, '.') checkout('--detach', version) @@ -151,20 +152,19 @@ def version(self, target_dir: str, version: str = 'HEAD') -> pb.LocalPath: def versions(self) -> tp.List[base.Variant]: cache_path = self.fetch() - git_rev_list = git['rev-list', '--abbrev-commit', '--abbrev=10'] + git_rev_list = git["rev-list", "--abbrev-commit", "--abbrev=10"] rev_list: tp.List[str] = [] with pb.local.cwd(cache_path): - rev_list = list(git_rev_list(self.refspec).strip().split('\n')) + rev_list = list(git_rev_list(self.refspec).strip().split("\n")) rev_list = list(filter(self.version_filter, rev_list)) - rev_list = rev_list[:self.limit] if self.limit else rev_list + rev_list = rev_list[: self.limit] if self.limit else rev_list revs = [base.Variant(version=rev, owner=self) for rev in rev_list] return revs class GitSubmodule(Git): - @property def is_expandable(self) -> bool: """Submodules will not participate in version expansion.""" @@ -185,7 +185,7 @@ def maybe_shallow(cmd: BoundCommand, enable: bool) -> BoundCommand: Any: A new git clone command, with shallow clone enabled, if selected. """ if enable: - return cmd['--depth', '1'] + return cmd["--depth", "1"] return cmd @@ -193,6 +193,6 @@ def clone_needed(repository: VarRemotes, repo_loc: str) -> bool: from benchbuild.utils.download import __clone_needed__ if not isinstance(repository, str): - raise TypeError('\'remote\' needs to be a git repo string') + raise TypeError("'remote' needs to be a git repo string") return __clone_needed__(repository, repo_loc) diff --git a/benchbuild/source/http.py b/benchbuild/source/http.py index 2d7e5b44e..be90eca6a 100644 --- a/benchbuild/source/http.py +++ b/benchbuild/source/http.py @@ -1,12 +1,13 @@ """ Declare a http source. """ + import typing as tp import plumbum as pb from benchbuild.source import base -from benchbuild.utils.cmd import cp, ln, wget, tar, mkdir, mv +from benchbuild.utils.cmd import cp, ln, wget, tar, mkdir VarRemotes = tp.Union[str, tp.Dict[str, str]] Remotes = tp.Dict[str, str] @@ -63,8 +64,8 @@ def version(self, target_dir: str, version: str) -> pb.LocalPath: target_path = pb.local.path(target_dir) / target_name active_loc = pb.local.path(target_dir) / self.local - cp('-ar', cache_path, target_path) - ln('-sf', target_name, active_loc) + cp("-ar", cache_path, target_path) + ln("-sf", target_name, active_loc) return target_path @@ -113,7 +114,7 @@ def version(self, target_dir: str, version: str) -> pb.LocalPath: mkdir(target_path) tar("-x", "--no-same-owner", "-C", target_path, "-f", archive_path) - ln('-sf', target_path, active_loc) + ln("-sf", target_path, active_loc) return target_path @@ -141,7 +142,7 @@ def fetch_version(self, version: str) -> pb.LocalPath: target_name = versioned_target_name(self.local, version) cache_path = pb.local.path(prefix) / target_name - mkdir('-p', cache_path) + mkdir("-p", cache_path) for file in self._files: download_single_version( @@ -153,7 +154,7 @@ def fetch_version(self, version: str) -> pb.LocalPath: def normalize_remotes(remote: VarRemotes) -> Remotes: if isinstance(remote, str): - raise TypeError('\'remote\' needs to be a mapping type') + raise TypeError("'remote' needs to be a mapping type") # FIXME: What the hell? _remotes: Remotes = {} @@ -177,10 +178,12 @@ def download_single_version( wget(url, '--no-check-certificate', '-O', target_path) from benchbuild.utils.download import update_hash + update_hash(target_path) return target_path def download_required(target_path: str) -> bool: from benchbuild.utils.download import source_required + return source_required(target_path) diff --git a/benchbuild/source/versions.py b/benchbuild/source/versions.py index 21e4b8742..fb9bd3500 100644 --- a/benchbuild/source/versions.py +++ b/benchbuild/source/versions.py @@ -68,14 +68,12 @@ def __init__(self, child: base.FetchableSource, filter_version: str): self.filter_version = filter_version def versions(self) -> tp.List[base.Variant]: - return [ - v for v in self.child.versions() if str(v) == self.filter_version - ] + return [v for v in self.child.versions() if str(v) == self.filter_version] - def versions_with_context(self, - ctx: base.Revision) -> tp.Sequence[base.Variant]: + def versions_with_context(self, ctx: base.Revision) -> tp.Sequence[base.Variant]: return [ - v for v in self.child.versions_with_context(ctx) + v + for v in self.child.versions_with_context(ctx) if str(v) == self.filter_version ] diff --git a/benchbuild/statistics.py b/benchbuild/statistics.py index 0fa069a3d..902c2d767 100644 --- a/benchbuild/statistics.py +++ b/benchbuild/statistics.py @@ -1,6 +1,7 @@ """ Handle all statsitic related classes and methods. """ + import logging from benchbuild.extensions import Extension @@ -42,7 +43,7 @@ def t_test(self, *results, significance=0.95): del result # Unused temporarily t_statistic = 0 p_value = 0 - #t_statistic, p_value = scipy.stats.ttest_1samp(result, TRUE_MU) + # t_statistic, p_value = scipy.stats.ttest_1samp(result, TRUE_MU) LOG.debug("t-statistic = %f, pvalue = %f", t_statistic, p_value) return p_value >= 1 - significance @@ -63,28 +64,26 @@ def __call__(self, *args, timeout=TIMEOUT, **kwargs): session = Session() while iterator < timeout: - #get an run_info object after executing the run with its extensions + # get an run_info object after executing the run with its extensions ri_object = self.call_next(*args, **kwargs) - #check if the experiment defines the result function - if hasattr(self.experiment, 'res_func'): + # check if the experiment defines the result function + if hasattr(self.experiment, "res_func"): results = self.experiment.res_func(ri_object) if self.t_test(results): LOG.info("The run was significant.") break - #check if this was the last iteration + # check if this was the last iteration if iterator == (timeout - 1): - LOG.warning( - "No significant run happened before the timeout!" - ) + LOG.warning("No significant run happened before the timeout!") iterator += 1 # no need to repeat the run without a result function else: break - #Commit the database session containing all runs + # Commit the database session containing all runs session.commit() LOG.info("Overall one command was executed %s times.", iterator) diff --git a/benchbuild/utils/__init__.py b/benchbuild/utils/__init__.py index 80188c091..f1bc4c171 100644 --- a/benchbuild/utils/__init__.py +++ b/benchbuild/utils/__init__.py @@ -3,6 +3,7 @@ to plumbum. The built modules are only active during a run of an experiment and get deleted afterwards. """ + import logging import os import sys @@ -23,6 +24,7 @@ class ErrorCommand(LocalCommand): without the entire study to crash. The experiment will fail anyway, but without the entire programm crashing. """ + EXE = __name__ + ".error_cmd" def run(self, *args, **kwargs): @@ -81,7 +83,7 @@ def __getattr__(self, command: str) -> pb.commands.ConcreteCommand: alias_cmd = alias_cmd.with_env( PATH=list_to_path(path), LD_LIBRARY_PATH=list_to_path(libs_path), - HOME=home + HOME=home, ) return alias_cmd except AttributeError: diff --git a/benchbuild/utils/actions.py b/benchbuild/utils/actions.py index bcc1eb091..bcb9de37e 100644 --- a/benchbuild/utils/actions.py +++ b/benchbuild/utils/actions.py @@ -11,6 +11,7 @@ ```python ``` """ + from __future__ import annotations import abc @@ -38,8 +39,9 @@ ReturnTypeA = tp.TypeVar("ReturnTypeA") ReturnTypeB = tp.TypeVar("ReturnTypeB") DecoratedFunction = tp.Callable[..., ReturnType] -FunctionDecorator = tp.Callable[[DecoratedFunction[ReturnTypeA]], - DecoratedFunction[ReturnTypeB]] +FunctionDecorator = tp.Callable[ + [DecoratedFunction[ReturnTypeA]], DecoratedFunction[ReturnTypeB] +] if tp.TYPE_CHECKING: import benchbuild.experiment.Experiment # pylint: disable=unused-import @@ -61,8 +63,7 @@ class StepResult(enum.IntEnum): def step_has_failed( - result: StepResult, - error_status: tp.Optional[tp.List[StepResult]] = None + result: StepResult, error_status: tp.Optional[tp.List[StepResult]] = None ) -> bool: if not error_status: error_status = [StepResult.ERROR, StepResult.CAN_CONTINUE] @@ -74,12 +75,10 @@ def prepend_status(func: DecoratedFunction[str]) -> DecoratedFunction[str]: """Prepends the output of `func` with the status.""" @tp.overload - def wrapper(self: "Step", indent: int) -> str: - ... + def wrapper(self: "Step", indent: int) -> str: ... @tp.overload - def wrapper(self: "Step") -> str: - ... + def wrapper(self: "Step") -> str: ... @ft.wraps(func) def wrapper(self: "Step", *args: tp.Any, **kwargs: tp.Any) -> str: @@ -92,8 +91,7 @@ def wrapper(self: "Step", *args: tp.Any, **kwargs: tp.Any) -> str: return wrapper -def log_before_after(name: str, - desc: str) -> FunctionDecorator[StepResult, StepResult]: +def log_before_after(name: str, desc: str) -> FunctionDecorator[StepResult, StepResult]: """Log customized string before & after running func.""" def func_decorator( @@ -135,8 +133,7 @@ def __init_subclass__(cls, **kwargs: tp.Any): super().__init_subclass__(**kwargs) setattr( - cls, "__call__", - log_before_after(cls.NAME, cls.DESCRIPTION)(cls.__call__) + cls, "__call__", log_before_after(cls.NAME, cls.DESCRIPTION)(cls.__call__) ) setattr(cls, "__str__", prepend_status(cls.__str__)) @@ -204,8 +201,7 @@ class MultiStep(Step, tp.Generic[StepTy_co]): actions: tp.MutableSequence[StepTy_co] def __init__( - self, - actions: tp.Optional[tp.MutableSequence[StepTy_co]] = None + self, actions: tp.Optional[tp.MutableSequence[StepTy_co]] = None ) -> None: super().__init__(StepResult.UNSET) @@ -233,9 +229,7 @@ class Clean(ProjectStep): DESCRIPTION = "Cleans the build directory" def __init__( - self, - project: "benchbuild.project.Project", - check_empty: bool = False + self, project: "benchbuild.project.Project", check_empty: bool = False ) -> None: super().__init__(project) self.check_empty = check_empty @@ -282,8 +276,7 @@ def __call__(self) -> StepResult: def __str__(self, indent: int = 0) -> str: project = self.project return textwrap.indent( - f"* {project.name}: Clean the directory: {project.builddir}", - indent * " " + f"* {project.name}: Clean the directory: {project.builddir}", indent * " " ) @@ -438,9 +431,7 @@ def __init__( Echo(message=f"Start experiment: {experiment.name}") ] _actions.extend(actions if actions else []) - _actions.extend([ - Echo(message=f"Completed experiment: {experiment.name}") - ]) + _actions.extend([Echo(message=f"Completed experiment: {experiment.name}")]) super().__init__(_actions) self.experiment = experiment @@ -449,6 +440,7 @@ def begin_transaction( self, ) -> tp.Tuple["benchbuild.utils.schema.Experiment", tp.Any]: import sqlalchemy as sa # pylint: disable=import-outside-toplevel + experiment, session = db.persist_experiment(self.experiment) if experiment.begin is None: experiment.begin = datetime.now() @@ -462,9 +454,7 @@ def begin_transaction( LOG.error("Transaction isolation level caused a StaleDataError") # React to external signals - signals.handlers.register( - Experiment.end_transaction, experiment, session - ) + signals.handlers.register(Experiment.end_transaction, experiment, session) return experiment, session @@ -473,6 +463,7 @@ def end_transaction( experiment: "benchbuild.utils.schema.Experiment", session: tp.Any ) -> None: import sqlalchemy as sa # pylint: disable=import-outside-toplevel + try: experiment.end = max(experiment.end, datetime.now()) session.add(experiment) @@ -494,8 +485,7 @@ def __run_children(self, num_processes: int) -> tp.List[StepResult]: LOG.info("Experiment aborting by user request") results.append(StepResult.ERROR) except Exception: - LOG.error("Experiment terminates " - "because we got an exception:") + LOG.error("Experiment terminates because we got an exception:") e_type, e_value, e_traceb = sys.exc_info() lines = traceback.format_exception(e_type, e_value, e_traceb) LOG.error("".join(lines)) @@ -541,9 +531,7 @@ def no_fail(*args: tp.Any, **kwargs: tp.Any): result = action() except ProcessExecutionError as proc_ex: LOG.error("\n==== ERROR ====") - LOG.error( - "Execution of a binary failed in step: %s", str(action) - ) + LOG.error("Execution of a binary failed in step: %s", str(action)) LOG.error(str(proc_ex)) LOG.error("==== ERROR ====\n") result = StepResult.ERROR @@ -596,7 +584,7 @@ def workload_ref(self) -> WorkloadTy: def __init__( self, project: "benchbuild.project.Project", - workload: tp.Optional[WorkloadTy] = None + workload: tp.Optional[WorkloadTy] = None, ) -> None: super().__init__(project) @@ -637,22 +625,21 @@ def __init__( self.experiment = experiment index = command.unwrap(project.workloads, project) - workloads = itertools.chain( - *command.filter_workload_index(run_only, index) - ) + workloads = itertools.chain(*command.filter_workload_index(run_only, index)) for workload in workloads: - self.actions.extend([ - RunWorkload(project, command.ProjectCommand(project, workload)) - ]) + self.actions.extend( + [RunWorkload(project, command.ProjectCommand(project, workload))] + ) def __call__(self) -> StepResult: if CFG["db"]["enabled"]: group, session = run.begin_run_group(self.project, self.experiment) signals.handlers.register(run.fail_run_group, group, session) try: - self.status = max([workload() for workload in self.actions], - default=StepResult.OK) + self.status = max( + [workload() for workload in self.actions], default=StepResult.OK + ) if CFG["db"]["enabled"]: run.end_run_group(group, session) except ProcessExecutionError: @@ -675,7 +662,7 @@ def __str__(self, indent: int = 0) -> str: sub_actns = "\n".join([a.__str__(indent + 1) for a in self.actions]) return textwrap.indent( f"* Require all of {self.project.name}'s workloads:\n{sub_actns}", - indent * " " + indent * " ", ) @@ -701,9 +688,7 @@ def __str__(self, indent: int = 0) -> str: paths = CFG["cleanup_paths"].value lines = [] for p in paths: - lines.append( - textwrap.indent(f"* Clean the directory: {p}", indent * " ") - ) + lines.append(textwrap.indent(f"* Clean the directory: {p}", indent * " ")) return "\n".join(lines) @@ -732,8 +717,7 @@ def __str__(self, indent: int = 0) -> str: version_str = str(revision) return textwrap.indent( - f"* Project environment for: {project.name} @ {version_str}", - indent * " " + f"* Project environment for: {project.name} @ {version_str}", indent * " " ) @@ -750,9 +734,7 @@ def __init__( ) -> None: super().__init__(project) - self.revision = source.revision_from_str( - revision_strings, type(project) - ) + self.revision = source.revision_from_str(revision_strings, type(project)) def __call__(self) -> StepResult: project = self.project @@ -774,6 +756,5 @@ def __str__(self, indent: int = 0) -> str: version_str = str(self.revision) return textwrap.indent( - f"* Add project version {version_str} for: {project.name}", - indent * " " + f"* Add project version {version_str} for: {project.name}", indent * " " ) diff --git a/benchbuild/utils/bootstrap.py b/benchbuild/utils/bootstrap.py index bd6727ff7..6c6878e6a 100644 --- a/benchbuild/utils/bootstrap.py +++ b/benchbuild/utils/bootstrap.py @@ -1,4 +1,5 @@ -""" Helper functions for bootstrapping external dependencies. """ +"""Helper functions for bootstrapping external dependencies.""" + import logging import os import platform @@ -27,6 +28,7 @@ def find_package(binary: str) -> bool: True, if the binary name can be imported by benchbuild. """ from benchbuild.utils import cmd + c = cmd.__getattr__(binary) found = not isinstance(c, utils.ErrorCommand) @@ -43,50 +45,41 @@ def find_package(binary: str) -> bool: "gentoo base system": ["sys-fs/unionfs-fuse"], "ubuntu": ["unionfs-fuse"], "debian": ["unionfs-fuse"], - "suse": ["unionfs-fuse"] + "suse": ["unionfs-fuse"], }, "postgres": { "gentoo base system": ["dev-db/postgres", "dev-libs/libpqxx"], "ubuntu": ["libpq-dev", "libpqxx-dev"], "debian": ["libpq-dev", "libpqxx-dev"], - "suse": ["postgresql-devel", "libpqxx-devel"] + "suse": ["postgresql-devel", "libpqxx-devel"], }, "fusermount": { "gentoo base system": ["sys-fs/fuse"], "ubuntu": ["fuse"], "debian": ["fuse"], - "suse": ["fuse", "fuse-devel"] + "suse": ["fuse", "fuse-devel"], }, "cmake": { "gentoo base system": ["dev-util/cmake"], "ubuntu": ["cmake"], "debian": ["cmake"], - "suse": ["cmake"] + "suse": ["cmake"], }, "autoreconf": { "ubuntu": ["autoconf"], "debian": ["autoconf"], - "suse": ["autoconf"] - } + "suse": ["autoconf"], + }, } PACKAGE_MANAGER = { - "gentoo base system": { - "cmd": "emerge", - "args": ["-a"] - }, - "ubuntu": { - "cmd": "apt-get", - "args": ["install"] - }, - "debian": { - "cmd": "apt-get", - "args": ["install"] - }, + "gentoo base system": {"cmd": "emerge", "args": ["-a"]}, + "ubuntu": {"cmd": "apt-get", "args": ["install"]}, + "debian": {"cmd": "apt-get", "args": ["install"]}, "suse": { "cmd": "zypper", "args": ["install"], - } + }, } @@ -94,10 +87,10 @@ def install_uchroot(_): """Installer for erlent (contains uchroot).""" builddir = local.path(str(CFG["build_dir"].value)) with local.cwd(builddir): - erlent_src = local.path('erlent') - erlent_git = erlent_src / '.git' - erlent_repo = str(CFG['uchroot']['repo']) - erlent_build = erlent_src / 'build' + erlent_src = local.path("erlent") + erlent_git = erlent_src / ".git" + erlent_repo = str(CFG["uchroot"]["repo"]) + erlent_build = erlent_src / "build" if not erlent_git.exists(): git("clone", erlent_repo) else: @@ -109,21 +102,17 @@ def install_uchroot(_): cmake("../") make() - os.environ["PATH"] = os.path.pathsep.join([ - erlent_build, os.environ["PATH"] - ]) + os.environ["PATH"] = os.path.pathsep.join([erlent_build, os.environ["PATH"]]) local.env.update(PATH=os.environ["PATH"]) if not find_package("uchroot"): - LOG.error( - 'uchroot not found, after updating PATH to %s', os.environ['PATH'] - ) + LOG.error("uchroot not found, after updating PATH to %s", os.environ["PATH"]) sys.exit(-1) - env = CFG['env'].value - if 'PATH' not in env: - env['PATH'] = [] - env['PATH'].append(str(erlent_build)) + env = CFG["env"].value + if "PATH" not in env: + env["PATH"] = [] + env["PATH"].append(str(erlent_build)) def check_uchroot_config() -> None: @@ -132,20 +121,18 @@ def check_uchroot_config() -> None: """ print("Checking configuration of 'uchroot'") - fuse_grep = grep['-q', '-e'] + fuse_grep = grep["-q", "-e"] username = getuser() if not (fuse_grep["^user_allow_other", "/etc/fuse.conf"] & TF): print("uchroot needs 'user_allow_other' enabled in '/etc/fuse.conf'.") if not (fuse_grep["^{0}".format(username), "/etc/subuid"] & TF): print( - "uchroot needs an entry for user '{0}' in '/etc/subuid'.". - format(username) + "uchroot needs an entry for user '{0}' in '/etc/subuid'.".format(username) ) if not (fuse_grep["^{0}".format(username), "/etc/subgid"] & TF): print( - "uchroot needs an entry for user '{0}' in '/etc/subgid'.". - format(username) + "uchroot needs an entry for user '{0}' in '/etc/subgid'.".format(username) ) @@ -156,7 +143,7 @@ def linux_distribution_major() -> tp.Optional[str]: Returns: The name of the linux distribution, if known. """ - if not platform.system() == 'Linux': + if not platform.system() == "Linux": return None # python > 3.7 @@ -180,7 +167,7 @@ def install_package(pkg_name: str) -> bool: Args: pkg_name: The package name to install. """ - if not bool(CFG['bootstrap']['install']): + if not bool(CFG["bootstrap"]["install"]): return False if pkg_name not in PACKAGES: @@ -191,8 +178,7 @@ def install_package(pkg_name: str) -> bool: for pkg_name_on_host in packages: print("You are missing the package: '{0}'".format(pkg_name_on_host)) cmd = local["sudo"] - cmd = cmd[package_manager["cmd"], package_manager["args"], - pkg_name_on_host] + cmd = cmd[package_manager["cmd"], package_manager["args"], pkg_name_on_host] cmd_str = str(cmd) ret = False @@ -209,8 +195,7 @@ def install_package(pkg_name: str) -> bool: def provide_package( - pkg_name: str, - installer: tp.Callable[[str], bool] = install_package + pkg_name: str, installer: tp.Callable[[str], bool] = install_package ) -> None: """ Make sure the package is provided by the system, if required. diff --git a/benchbuild/utils/compiler.py b/benchbuild/utils/compiler.py index 76874200c..81a9d472b 100644 --- a/benchbuild/utils/compiler.py +++ b/benchbuild/utils/compiler.py @@ -18,6 +18,7 @@ Are just convencience methods that can be used when interacting with the configured llvm/clang source directories. """ + import os import typing as tp from typing import TYPE_CHECKING @@ -32,10 +33,9 @@ if TYPE_CHECKING: from benchbuild.project import Project - from benchbuild.experiment import Experiment -def cc(project: 'Project', detect_project: bool = False) -> BoundCommand: +def cc(project: "Project", detect_project: bool = False) -> BoundCommand: """ Return a clang that hides CFLAGS and LDFLAGS. @@ -58,7 +58,7 @@ def cc(project: 'Project', detect_project: bool = False) -> BoundCommand: return cmd["./{}".format(cc_name)] -def cxx(project: 'Project', detect_project: bool = False) -> BoundCommand: +def cxx(project: "Project", detect_project: bool = False) -> BoundCommand: """ Return a clang++ that hides CFLAGS and LDFLAGS. @@ -78,9 +78,7 @@ def cxx(project: 'Project', detect_project: bool = False) -> BoundCommand: """ cxx_name = str(CFG["compiler"]["cxx"]) - wrap_cc( - cxx_name, compiler(cxx_name), project, detect_project=detect_project - ) + wrap_cc(cxx_name, compiler(cxx_name), project, detect_project=detect_project) return cmd["./{name}".format(name=cxx_name)] @@ -115,8 +113,6 @@ def compiler(name: str) -> BoundCommand: pinfo = __get_paths() _compiler = local[name] _compiler = _compiler.setenv( - PATH=pinfo["path"], - LD_LIBRARY_PATH=pinfo["ld_library_path"], - HOME=pinfo["home"] + PATH=pinfo["path"], LD_LIBRARY_PATH=pinfo["ld_library_path"], HOME=pinfo["home"] ) return _compiler diff --git a/benchbuild/utils/container.py b/benchbuild/utils/container.py index fae14df0d..5975414c2 100644 --- a/benchbuild/utils/container.py +++ b/benchbuild/utils/container.py @@ -1,4 +1,5 @@ """Container utilites.""" + import logging import os from datetime import datetime @@ -63,9 +64,9 @@ def local(self): class Gentoo(Container): name = "gentoo" - _LATEST_TXT = \ - "http://distfiles.gentoo.org/releases/amd64/autobuilds/"\ - "latest-stage3-amd64.txt" + _LATEST_TXT = ( + "http://distfiles.gentoo.org/releases/amd64/autobuilds/latest-stage3-amd64.txt" + ) @property @cached @@ -77,12 +78,12 @@ def src_file(self): Latest src_uri from gentoo's distfiles mirror. """ try: - src_uri = (curl[Gentoo._LATEST_TXT] | tail["-n", "+3"] | - cut["-f1", "-d "])().strip() + src_uri = ( + curl[Gentoo._LATEST_TXT] | tail["-n", "+3"] | cut["-f1", "-d "] + )().strip() except ProcessExecutionError as proc_ex: src_uri = "NOT-FOUND" - LOG.error("Could not determine latest stage3 src uri: %s", - str(proc_ex)) + LOG.error("Could not determine latest stage3 src uri: %s", str(proc_ex)) return src_uri @property @@ -90,11 +91,10 @@ def src_file(self): def version(self): """Return the build date of the gentoo container.""" try: - _version = (curl[Gentoo._LATEST_TXT] | \ - awk['NR==2{print}'] | \ - cut["-f2", "-d="])().strip() - _version = datetime.utcfromtimestamp(int(_version))\ - .strftime("%Y-%m-%d") + _version = ( + curl[Gentoo._LATEST_TXT] | awk["NR==2{print}"] | cut["-f2", "-d="] + )().strip() + _version = datetime.utcfromtimestamp(int(_version)).strftime("%Y-%m-%d") except ProcessExecutionError as proc_ex: _version = "unknown" LOG.error("Could not determine timestamp: %s", str(proc_ex)) @@ -103,8 +103,9 @@ def version(self): @property def remote(self): """Get a remote URL of the requested container.""" - return "http://distfiles.gentoo.org/releases/amd64/autobuilds/{0}" \ - .format(self.src_file) + return "http://distfiles.gentoo.org/releases/amd64/autobuilds/{0}".format( + self.src_file + ) def is_valid(container, path): @@ -120,14 +121,14 @@ def is_valid(container, path): """ try: tmp_hash_path = container.filename + ".hash" - with open(tmp_hash_path, 'r') as tmp_file: + with open(tmp_hash_path, "r") as tmp_file: tmp_hash = tmp_file.readline() except IOError: LOG.info("No .hash-file in the tmp-directory.") container_hash_path = local.path(path) / "gentoo.tar.bz2.hash" if container_hash_path.exists(): - with open(container_hash_path, 'r') as hash_file: + with open(container_hash_path, "r") as hash_file: container_hash = hash_file.readline() return container_hash == tmp_hash return False @@ -159,14 +160,13 @@ def unpack(container, path): Wget(container.remote, name) uchroot = no_args() - uchroot = uchroot["-E", "-A", "-C", "-r", "/", "-w", - os.path.abspath("."), "--"] + uchroot = uchroot["-E", "-A", "-C", "-r", "/", "-w", os.path.abspath("."), "--"] # Check, if we need erlent support for this archive. has_erlent = bash[ - "-c", - "tar --list -f './{0}' | grep --silent '.erlent'".format(name)] - has_erlent = (has_erlent & TF) + "-c", "tar --list -f './{0}' | grep --silent '.erlent'".format(name) + ] + has_erlent = has_erlent & TF untar = local["/bin/tar"]["xf", "./" + name] if not has_erlent: @@ -177,8 +177,9 @@ def unpack(container, path): if not os.path.samefile(name, container.filename): rm(name) else: - LOG.warning("File contents do not match: %s != %s", name, - container.filename) + LOG.warning( + "File contents do not match: %s != %s", name, container.filename + ) cp(container.filename + ".hash", path) diff --git a/benchbuild/utils/db.py b/benchbuild/utils/db.py index 26adc4e54..1d193dc5b 100644 --- a/benchbuild/utils/db.py +++ b/benchbuild/utils/db.py @@ -1,4 +1,5 @@ """Database support module for the benchbuild study.""" + import logging from benchbuild.settings import CFG @@ -7,9 +8,8 @@ def validate(func): - def validate_run_func(run, session, *args, **kwargs): - if run.status == 'failed': + if run.status == "failed": LOG.debug("Run failed. Execution of '%s' cancelled", str(func)) return None @@ -47,7 +47,7 @@ def create_run(cmd, project, exp, grp): project_group=project.group, experiment_name=exp.name, run_group=str(grp), - experiment_group=exp.id + experiment_group=exp.id, ) session.add(run) session.commit() @@ -91,10 +91,13 @@ def persist_project(project): project: The project we want to persist. """ from benchbuild.utils.schema import Project, Session + session = Session() - projects = session.query(Project) \ - .filter(Project.name == project.name) \ + projects = ( + session.query(Project) + .filter(Project.name == project.name) .filter(Project.group_name == project.group) + ) name = project.name desc = project.__doc__ @@ -104,7 +107,7 @@ def persist_project(project): try: src_url = project.src_uri except AttributeError: - src_url = 'unknown' + src_url = "unknown" if projects.count() == 0: newp = Project() @@ -122,7 +125,7 @@ def persist_project(project): "src_url": src_url, "domain": domain, "group_name": group_name, - "version": version + "version": version, } projects.update(newp_value) @@ -158,7 +161,7 @@ def persist_experiment(experiment): session.add(newe) ret = newe else: - exps.update({'name': name, 'description': desc}) + exps.update({"name": name, "description": desc}) ret = exps.first() try: @@ -184,15 +187,9 @@ def persist_time(run, session, timings): from benchbuild.utils import schema as s for timing in timings: - session.add( - s.Metric(name="time.user_s", value=timing[0], run_id=run.id) - ) - session.add( - s.Metric(name="time.system_s", value=timing[1], run_id=run.id) - ) - session.add( - s.Metric(name="time.real_s", value=timing[2], run_id=run.id) - ) + session.add(s.Metric(name="time.user_s", value=timing[0], run_id=run.id)) + session.add(s.Metric(name="time.system_s", value=timing[1], run_id=run.id)) + session.add(s.Metric(name="time.real_s", value=timing[2], run_id=run.id)) def persist_perf(run, session, svg_path): @@ -210,11 +207,9 @@ def persist_perf(run, session, svg_path): # pylint: disable=import-outside-toplevel from benchbuild.utils import schema as s - with open(svg_path, 'r') as svg_file: + with open(svg_path, "r") as svg_file: svg_data = svg_file.read() - session.add( - s.Metadata(name="perf.flamegraph", value=svg_data, run_id=run.id) - ) + session.add(s.Metadata(name="perf.flamegraph", value=svg_data, run_id=run.id)) def persist_compilestats(run, session, stats): diff --git a/benchbuild/utils/dict.py b/benchbuild/utils/dict.py index 344158aa6..428da1c1a 100644 --- a/benchbuild/utils/dict.py +++ b/benchbuild/utils/dict.py @@ -1,4 +1,5 @@ """An extensible dictionary.""" + from contextlib import contextmanager @@ -7,12 +8,12 @@ def extend_as_list(original_dict, **kwargs): for k, v in kwargs.items(): if k in original_dict: oldv = new_dict[k] - if not hasattr(oldv, 'extend'): + if not hasattr(oldv, "extend"): oldv = [oldv] if isinstance(v, str): v = [v] - if hasattr(v, '__iter__'): + if hasattr(v, "__iter__"): oldv.extend(v) else: oldv.append(v) diff --git a/benchbuild/utils/download.py b/benchbuild/utils/download.py index 9264a1586..2b5200e5a 100644 --- a/benchbuild/utils/download.py +++ b/benchbuild/utils/download.py @@ -10,6 +10,7 @@ Supported methods: Copy, CopyNoFail, Wget, Git, Svn, Rsync """ + import hashlib import logging import os @@ -37,13 +38,13 @@ def get_hash_of_dirs(directory: str) -> str: """ sha = hashlib.sha512() if not os.path.exists(directory): - raise ValueError('Directory does not exist') + raise ValueError("Directory does not exist") for root, _, files in os.walk(directory): for name in files: filepath = local.path(root) / name if filepath.exists(): - with open(filepath, 'rb') as next_file: + with open(filepath, "rb") as next_file: for line in next_file: sha.update(line) return sha.hexdigest() @@ -68,18 +69,20 @@ def source_required(src_file: local.path) -> bool: LOG.debug("Hash file location: %s", hash_file) if hash_file.exists(): new_hash = get_hash_of_dirs(src_file) - with open(hash_file, 'r') as h_file: + with open(hash_file, "r") as h_file: old_hash = h_file.readline() required = not new_hash == old_hash if required: from benchbuild.utils.cmd import rm + rm("-r", src_file) rm(hash_file) if required: LOG.info("Source required for: %s", src_file) LOG.debug( - "Reason: src-exists: %s hash-exists: %s", src_file.exists(), - hash_file.exists() + "Reason: src-exists: %s hash-exists: %s", + src_file.exists(), + hash_file.exists(), ) return required @@ -93,7 +96,7 @@ def update_hash(src_file: local.path) -> str: root: The path of the given file. """ hash_file = local.path(src_file) + ".hash" - with open(hash_file, 'w') as h_file: + with open(hash_file, "w") as h_file: new_hash = get_hash_of_dirs(src_file) h_file.write(str(new_hash)) return new_hash @@ -108,6 +111,7 @@ def Copy(From, To): To (str): Path to the TARGET. """ from benchbuild.utils.cmd import cp + cp("-ar", "--reflink=auto", From, To) @@ -130,7 +134,7 @@ def CopyNoFail(src, root=None): src_path = local.path(root) / src if src_path.exists(): - Copy(src_path, '.') + Copy(src_path, ".") return True return False @@ -179,7 +183,6 @@ def with_wget(url_dict=None, target_file=None): """ def wget_decorator(cls): - def download_impl(self): """Download the selected version from the url_dict value.""" t_file = target_file if target_file else self.SRC_FILE @@ -212,26 +215,26 @@ def __clone_needed__(repository: str, directory: str) -> bool: """ from benchbuild.utils.cmd import git, rm - git_dir = local.path(directory) / '.git' + git_dir = local.path(directory) / ".git" if not git_dir.exists(): return True requires_clone = True with local.cwd(directory): - repo_origin_url = git('config', '--get', 'remote.origin.url') - requires_clone = repo_origin_url.strip('\n') != repository + repo_origin_url = git("config", "--get", "remote.origin.url") + requires_clone = repo_origin_url.strip("\n") != repository if requires_clone: - rm('-r', directory) + rm("-r", directory) return requires_clone def Git( repository: str, directory: str, - rev: str = '', - prefix: str = '', - shallow_clone: bool = True + rev: str = "", + prefix: str = "", + shallow_clone: bool = True, ) -> str: """ Get a clone of the given repo @@ -251,7 +254,7 @@ def Git( src_dir = local.path(repository_loc) / directory tgt_dir = local.path(local.cwd) / directory - lock_f = local.path(repository_loc + directory + '.lock') + lock_f = local.path(repository_loc + directory + ".lock") extra_param = [] if shallow_clone: @@ -259,15 +262,16 @@ def Git( extra_param.append("1") from benchbuild.utils.cmd import git, mkdir + if __clone_needed__(repository, src_dir): git("clone", extra_param, repository, src_dir) else: - worktree_rev = rev if rev else 'HEAD' + worktree_rev = rev if rev else "HEAD" with local.cwd(src_dir): - mkdir('-p', tgt_dir) + mkdir("-p", tgt_dir) with flocked(lock_f): - git('worktree', 'prune') - git('worktree', 'add', '--detach', tgt_dir, worktree_rev) + git("worktree", "prune") + git("worktree", "add", "--detach", tgt_dir, worktree_rev) return repository_loc @@ -280,7 +284,7 @@ def with_git( clone: bool = True, rev_list_args: Optional[List[str]] = None, shallow_clone: bool = True, - version_filter: Callable[[str], bool] = lambda version: True + version_filter: Callable[[str], bool] = lambda version: True, ) -> Callable[[AnyC], AnyC]: """ Decorate a project class with git-based version information. @@ -331,11 +335,18 @@ def versions_impl(): git("clone", repo, repo_loc) with local.cwd(repo_loc): - rev_list = git( - "rev-list", "--abbrev-commit", "--abbrev=10", refspec, - *rev_list_args - ).strip().split('\n') - git("rev-parse", "--short=10", refspec).strip().split('\n') + rev_list = ( + git( + "rev-list", + "--abbrev-commit", + "--abbrev=10", + refspec, + *rev_list_args, + ) + .strip() + .split("\n") + ) + git("rev-parse", "--short=10", refspec).strip().split("\n") if limit: return list(filter(version_filter, rev_list))[:limit] @@ -346,12 +357,7 @@ def download_impl(self): """Download the selected version.""" nonlocal target_dir, git directory = cls.SRC_FILE if target_dir is None else target_dir - Git( - self.repository, - directory, - self.version, - shallow_clone=shallow_clone - ) + Git(self.repository, directory, self.version, shallow_clone=shallow_clone) cls.versions = versions_impl cls.download = download_impl @@ -380,6 +386,7 @@ def Svn(url, fname, to=None): return from benchbuild.utils.cmd import svn + svn("co", url, src_dir) update_hash(src_dir) Copy(src_dir, ".") diff --git a/benchbuild/utils/log.py b/benchbuild/utils/log.py index 03e6392cf..defc7b0c6 100644 --- a/benchbuild/utils/log.py +++ b/benchbuild/utils/log.py @@ -7,18 +7,18 @@ def __create_handler__() -> RichHandler: - force_terminal = bool(settings.CFG['force_tty']) + force_terminal = bool(settings.CFG["force_tty"]) return RichHandler( rich_tracebacks=True, show_time=False, show_level=False, - console=Console(stderr=True, force_terminal=force_terminal) + console=Console(stderr=True, force_terminal=force_terminal), ) def configure_plumbum_log(): - plumbum_format = logging.Formatter('$> %(message)s') + plumbum_format = logging.Formatter("$> %(message)s") handler = __create_handler__() handler.setFormatter(plumbum_format) @@ -42,7 +42,7 @@ def configure(): 3: logging.INFO, 2: logging.WARNING, 1: logging.ERROR, - 0: logging.ERROR + 0: logging.ERROR, } handler = __create_handler__() @@ -50,13 +50,13 @@ def configure(): root_logger = logging.getLogger() if settings.CFG["debug"]: details_format = logging.Formatter( - '%(name)s (%(filename)s:%(lineno)s) [%(levelname)s] %(message)s' + "%(name)s (%(filename)s:%(lineno)s) [%(levelname)s] %(message)s" ) details_hdl = handler details_hdl.setFormatter(details_format) root_logger.addHandler(details_hdl) else: - brief_format = logging.Formatter('%(message)s') + brief_format = logging.Formatter("%(message)s") console_hdl = handler console_hdl.setFormatter(brief_format) root_logger.addHandler(console_hdl) diff --git a/benchbuild/utils/path.py b/benchbuild/utils/path.py index 0ebb9958c..4d1ff6bd6 100644 --- a/benchbuild/utils/path.py +++ b/benchbuild/utils/path.py @@ -50,7 +50,7 @@ def __self__() -> str: __ROOT__ = __self__() -__RESOURCES_ROOT__ = os.path.join(__ROOT__, '..', 'res') +__RESOURCES_ROOT__ = os.path.join(__ROOT__, "..", "res") def template_files(path: str, exts: Optional[List[str]] = None) -> List[str]: @@ -101,7 +101,7 @@ def template_str(template: str) -> str: template content as a single string. """ tmpl_file = template_path(template) - with open(tmpl_file, mode='r') as tmpl_strm: + with open(tmpl_file, mode="r") as tmpl_strm: return "".join(tmpl_strm.readlines()) @@ -119,10 +119,13 @@ def mkdir_interactive(dirpath: str) -> None: if os.path.exists(dirpath): return - response = ui.ask("The directory {dirname} does not exist yet. " - "Should I create it?".format(dirname=dirpath), - default_answer=True, - default_answer_str="yes") + response = ui.ask( + "The directory {dirname} does not exist yet. Should I create it?".format( + dirname=dirpath + ), + default_answer=True, + default_answer_str="yes", + ) if response: mkdir("-p", dirpath) @@ -143,7 +146,7 @@ def flocked(filename: str, lock_type: int = fcntl.LOCK_EX): Yields: the opened file descriptor we hold the lock for. """ - with open(filename, 'a') as fd: + with open(filename, "a") as fd: try: fcntl.flock(fd, lock_type) yield fd diff --git a/benchbuild/utils/requirements.py b/benchbuild/utils/requirements.py index 3b62f2138..86255c3b9 100644 --- a/benchbuild/utils/requirements.py +++ b/benchbuild/utils/requirements.py @@ -12,7 +12,7 @@ LOG = logging.getLogger(__name__) -RequirementSubType = tp.TypeVar("RequirementSubType", bound='Requirement') +RequirementSubType = tp.TypeVar("RequirementSubType", bound="Requirement") @attr.s @@ -36,8 +36,9 @@ def to_cli_option(self) -> str: @classmethod @abc.abstractmethod def merge_requirements( - cls: tp.Type[RequirementSubType], lhs_option: RequirementSubType, - rhs_option: RequirementSubType + cls: tp.Type[RequirementSubType], + lhs_option: RequirementSubType, + rhs_option: RequirementSubType, ) -> RequirementSubType: """ Merge the requirements of the same type together. @@ -88,6 +89,7 @@ class SlurmCoresPerSocket(SlurmRequirement): cores per socket. See additional information under -B option in the slurm documentation. Only works when task/affinity plugin is enabled. """ + cores: int = attr.ib() def to_slurm_cli_opt(self) -> str: @@ -95,9 +97,8 @@ def to_slurm_cli_opt(self) -> str: @classmethod def merge_requirements( - cls, lhs_option: 'SlurmCoresPerSocket', - rhs_option: 'SlurmCoresPerSocket' - ) -> 'SlurmCoresPerSocket': + cls, lhs_option: "SlurmCoresPerSocket", rhs_option: "SlurmCoresPerSocket" + ) -> "SlurmCoresPerSocket": """ Merge the requirements of the same type together. """ @@ -120,8 +121,8 @@ def __repr__(self) -> str: @classmethod def merge_requirements( - cls, lhs_option: 'SlurmExclusive', rhs_option: 'SlurmExclusive' - ) -> 'SlurmExclusive': + cls, lhs_option: "SlurmExclusive", rhs_option: "SlurmExclusive" + ) -> "SlurmExclusive": """ Merge the requirements of the same type together. """ @@ -137,6 +138,7 @@ class SlurmNiceness(SlurmRequirement): range is +/- 2147483645. Only privileged users can specify a negative adjustment. """ + niceness: int = attr.ib() def to_slurm_cli_opt(self) -> str: @@ -144,8 +146,8 @@ def to_slurm_cli_opt(self) -> str: @classmethod def merge_requirements( - cls, lhs_option: 'SlurmNiceness', rhs_option: 'SlurmNiceness' - ) -> 'SlurmNiceness': + cls, lhs_option: "SlurmNiceness", rhs_option: "SlurmNiceness" + ) -> "SlurmNiceness": """ Merge the requirements of the same type together. """ @@ -196,8 +198,8 @@ def __repr__(self) -> str: @classmethod def merge_requirements( - cls, lhs_option: 'SlurmHint', rhs_option: 'SlurmHint' - ) -> 'SlurmHint': + cls, lhs_option: "SlurmHint", rhs_option: "SlurmHint" + ) -> "SlurmHint": """ Merge the requirements of the same type together. """ @@ -205,9 +207,7 @@ def merge_requirements( combined_hints |= lhs_option.hints | rhs_option.hints if not cls.__hints_not_mutually_exclusive(combined_hints): - raise ValueError( - "Two mutally exclusive hints for slurm have be specified." - ) + raise ValueError("Two mutally exclusive hints for slurm have be specified.") return SlurmHint(combined_hints) @@ -221,13 +221,13 @@ def __hints_not_mutually_exclusive(hints: tp.Set[SlurmHints]) -> bool: True, if no mutally exclusive hints are in the list """ if ( - SlurmHint.SlurmHints.compute_bound in hints and - SlurmHint.SlurmHints.memory_bound in hints + SlurmHint.SlurmHints.compute_bound in hints + and SlurmHint.SlurmHints.memory_bound in hints ): return False if ( - SlurmHint.SlurmHints.nomultithread in hints and - SlurmHint.SlurmHints.multithread in hints + SlurmHint.SlurmHints.nomultithread in hints + and SlurmHint.SlurmHints.multithread in hints ): return False @@ -260,14 +260,14 @@ def _convert_to_time_tuple(time_specifier: str) -> tp.Tuple[int, int, int, int]: minutes = 0 seconds = 0 - if time_specifier.count('-'): + if time_specifier.count("-"): with_days = True - days = int(time_specifier.split('-')[0]) - time_specifier = time_specifier.split('-')[1] + days = int(time_specifier.split("-")[0]) + time_specifier = time_specifier.split("-")[1] else: with_days = False - num_colon = time_specifier.count(':') + num_colon = time_specifier.count(":") if num_colon == 0: if with_days: @@ -276,15 +276,15 @@ def _convert_to_time_tuple(time_specifier: str) -> tp.Tuple[int, int, int, int]: minutes = int(time_specifier) elif num_colon == 1: if with_days: - hours = int(time_specifier.split(':')[0]) - minutes = int(time_specifier.split(':')[1]) + hours = int(time_specifier.split(":")[0]) + minutes = int(time_specifier.split(":")[1]) else: - minutes = int(time_specifier.split(':')[0]) - seconds = int(time_specifier.split(':')[1]) + minutes = int(time_specifier.split(":")[0]) + seconds = int(time_specifier.split(":")[1]) elif num_colon == 2: - hours = int(time_specifier.split(':')[0]) - minutes = int(time_specifier.split(':')[1]) - seconds = int(time_specifier.split(':')[2]) + hours = int(time_specifier.split(":")[0]) + minutes = int(time_specifier.split(":")[1]) + seconds = int(time_specifier.split(":")[2]) return (days, hours, minutes, seconds) @@ -298,8 +298,8 @@ class SlurmTime(SlurmRequirement): time formats include "minutes", "minutes:seconds", "hours:minutes:seconds", "days-hours", "days-hours:minutes" and "days-hours:minutes:seconds". """ - timelimit: tp.Tuple[int, int, int, - int] = attr.ib(converter=_convert_to_time_tuple) + + timelimit: tp.Tuple[int, int, int, int] = attr.ib(converter=_convert_to_time_tuple) def to_slurm_time_format(self) -> str: """ @@ -333,8 +333,8 @@ def to_slurm_cli_opt(self) -> str: @classmethod def merge_requirements( - cls, lhs_option: 'SlurmTime', rhs_option: 'SlurmTime' - ) -> 'SlurmTime': + cls, lhs_option: "SlurmTime", rhs_option: "SlurmTime" + ) -> "SlurmTime": """ Merge the requirements of the same type together. """ @@ -374,7 +374,7 @@ def _to_bytes(byte_str: str) -> int: >>> _to_bytes("10G") 10737418240 """ - if (match := _BYTE_RGX.search(byte_str)): + if match := _BYTE_RGX.search(byte_str): size = int(match.group("size")) byte_suffix = match.group("byte_suffix") return size * _get_byte_size_factor(byte_suffix) @@ -423,8 +423,8 @@ def to_slurm_cli_opt(self) -> str: @classmethod def merge_requirements( - cls, lhs_option: 'SlurmMem', rhs_option: 'SlurmMem' - ) -> 'SlurmMem': + cls, lhs_option: "SlurmMem", rhs_option: "SlurmMem" + ) -> "SlurmMem": """ Merge the requirements of the same type together. """ @@ -443,9 +443,7 @@ def merge_slurm_options( key = type(opt) if key in merged_options: current_opt = merged_options[key] - merged_options[key] = current_opt.merge_requirements( - current_opt, opt - ) + merged_options[key] = current_opt.merge_requirements(current_opt, opt) else: merged_options[key] = opt @@ -458,13 +456,13 @@ def get_slurm_options_from_config() -> tp.List[Requirement]: config. """ slurm_options: tp.List[Requirement] = [] - if CFG['slurm']['exclusive']: + if CFG["slurm"]["exclusive"]: slurm_options.append(SlurmExclusive()) - if not CFG['slurm']['multithread']: + if not CFG["slurm"]["multithread"]: slurm_options.append(SlurmHint({SlurmHint.SlurmHints.nomultithread})) - slurm_options.append(SlurmTime(str(CFG['slurm']['timelimit']))) - slurm_options.append(SlurmNiceness(int(CFG['slurm']['nice']))) + slurm_options.append(SlurmTime(str(CFG["slurm"]["timelimit"]))) + slurm_options.append(SlurmNiceness(int(CFG["slurm"]["nice"]))) return slurm_options diff --git a/benchbuild/utils/revision_ranges.py b/benchbuild/utils/revision_ranges.py index 085f91629..6dfae4288 100644 --- a/benchbuild/utils/revision_ranges.py +++ b/benchbuild/utils/revision_ranges.py @@ -31,16 +31,18 @@ def _get_git_for_path(repo_path: str) -> LocalCommand: return local_git["-C", repo_path] -def _get_all_revisions_between(c_start: str, c_end: str, - git: LocalCommand) -> tp.List[str]: +def _get_all_revisions_between( + c_start: str, c_end: str, git: LocalCommand +) -> tp.List[str]: """ Returns a list of all revisions that are both descendants of c_start, and ancestors of c_end. """ result = [c_start] result.extend( - git("log", "--pretty=%H", "--ancestry-path", - f"{c_start}..{c_end}").strip().split() + git("log", "--pretty=%H", "--ancestry-path", f"{c_start}..{c_end}") + .strip() + .split() ) return result @@ -120,9 +122,7 @@ class RevisionRange(AbstractRevisionRange): comment: See :func:`AbstractRevisionRange.comment()`. """ - def __init__( - self, id_start: str, id_end: str, comment: tp.Optional[str] = None - ): + def __init__(self, id_start: str, id_end: str, comment: tp.Optional[str] = None): super().__init__(comment) self.__id_start = id_start self.__id_end = id_end @@ -160,9 +160,10 @@ class CommitState(IntFlag): def _find_blocked_commits( - commit: 'pygit2.Commit', good: tp.List['pygit2.Commit'], - bad: tp.List['pygit2.Commit'] -) -> tp.List['pygit2.Commit']: + commit: "pygit2.Commit", + good: tp.List["pygit2.Commit"], + bad: tp.List["pygit2.Commit"], +) -> tp.List["pygit2.Commit"]: """ Find all commits affected by a bad commit and not yet "fixed" by a good commit. This is done by performing a backwards search starting @@ -177,8 +178,8 @@ def _find_blocked_commits( All transitive parents of commit that have an ancestor from bad that is not fixed by some commit from good. """ - stack: tp.List['pygit2.Commit'] = [commit] - blocked: tp.Dict['pygit2.Commit', CommitState] = {} + stack: tp.List["pygit2.Commit"] = [commit] + blocked: tp.Dict["pygit2.Commit", CommitState] = {} while stack: current_commit = stack.pop() @@ -209,7 +210,8 @@ def _find_blocked_commits( blocked[current_commit] |= CommitState.BAD return [ - commit for commit in blocked + commit + for commit in blocked # for more aggressive blocking use: # if blocked[commit] & CommitState.BUGGY if blocked[commit] == CommitState.BAD @@ -232,7 +234,7 @@ def __init__( self, bad_commits: tp.List[str], good_commits: tp.List[str], - comment: tp.Optional[str] = None + comment: tp.Optional[str] = None, ): super().__init__(comment) self.__bad_commit_ids = bad_commits @@ -242,6 +244,7 @@ def __init__( def init_cache(self, repo_path: str) -> None: import pygit2 # pylint: disable=import-outside-toplevel + self.__revision_list = [] repo = pygit2.Repository(repo_path) git = _get_git_for_path(repo_path) @@ -252,11 +255,14 @@ def init_cache(self, repo_path: str) -> None: # start search from all branch heads heads = git("show-ref", "--heads", "-s").strip().split("\n") for head in heads: - self.__revision_list.extend([ - str(commit.id) for commit in _find_blocked_commits( - repo.get(head), good_commits, bad_commits - ) - ]) + self.__revision_list.extend( + [ + str(commit.id) + for commit in _find_blocked_commits( + repo.get(head), good_commits, bad_commits + ) + ] + ) @property def good_commits(self) -> tp.List[str]: @@ -277,7 +283,7 @@ def __str__(self) -> str: return f"{','.join(self.bad_commits)}\\{','.join(self.good_commits)}" -class block_revisions(): # pylint: disable=invalid-name +class block_revisions: # pylint: disable=invalid-name """ Decorator for git sources for blacklisting/blocking revisions. @@ -293,10 +299,7 @@ def __init__(self, blocks: tp.List[AbstractRevisionRange]) -> None: self.__blocks = blocks def __call__(self, git_source: Git) -> Git: - - def is_blocked_revision_impl( - rev_id: str - ) -> tp.Tuple[bool, tp.Optional[str]]: + def is_blocked_revision_impl(rev_id: str) -> tp.Tuple[bool, tp.Optional[str]]: """ Checks whether a revision is blocked or not. Also returns the reason for the block if available. diff --git a/benchbuild/utils/run.py b/benchbuild/utils/run.py index c0781208c..263d7e932 100644 --- a/benchbuild/utils/run.py +++ b/benchbuild/utils/run.py @@ -1,4 +1,5 @@ """Experiment helpers.""" + import datetime import functools import logging @@ -18,9 +19,7 @@ class WatchableCommand(Protocol): - - def __call__(self, *args: t.Any, **kwargs: t.Any) -> CommandResult: - ... + def __call__(self, *args: t.Any, **kwargs: t.Any) -> CommandResult: ... CFG = settings.CFG @@ -71,7 +70,7 @@ def __begin(self, command: BaseCommand, project, experiment, group): db_run, session = create_run(command, project, experiment, group) db_run.begin = datetime.datetime.now() - db_run.status = 'running' + db_run.status = "running" log = s.RunLog() log.run_id = db_run.id log.begin = datetime.datetime.now() @@ -110,7 +109,7 @@ def __end(self, stdout, stderr): log.end = datetime.datetime.now() self.db_run.end = datetime.datetime.now() - self.db_run.status = 'completed' + self.db_run.status = "completed" self.session.add(log) self.session.add(self.db_run) @@ -133,6 +132,7 @@ def __fail(self, retcode, stdout, stderr): # pylint: disable=import-outside-toplevel from benchbuild.utils.schema import RunLog + run_id = self.db_run.id log = self.session.query(RunLog).filter(RunLog.run_id == run_id).one() @@ -142,7 +142,7 @@ def __fail(self, retcode, stdout, stderr): log.end = datetime.datetime.now() self.db_run.end = datetime.datetime.now() - self.db_run.status = 'failed' + self.db_run.status = "failed" self.failed = True self.session.add(log) self.session.add(self.db_run) @@ -160,9 +160,7 @@ def __fail(self, retcode, stdout, stderr): payload = attr.ib(init=False, default=None, repr=False) def __attrs_post_init__(self): - self.__begin( - self.cmd, self.project, self.experiment, self.project.run_uuid - ) + self.__begin(self.cmd, self.project, self.experiment, self.project.run_uuid) signals.handlers.register(self.__fail, 15, "SIGTERM", "SIGTERM") if CFG["db"]["enabled"]: @@ -188,14 +186,13 @@ def __call__(self, *args, expected_retcode=0, ri=None, **kwargs): with local.env(**cmd_env): try: bin_name = sys.argv[0] - retcode, stdout, stderr = \ - self.cmd & TEE(retcode=expected_retcode) + retcode, stdout, stderr = self.cmd & TEE(retcode=expected_retcode) f_stdout = bin_name + ".stdout" f_stderr = bin_name + ".stderr" - with open(f_stdout, 'w') as fd_stdout: + with open(f_stdout, "w") as fd_stdout: fd_stdout.write(stdout) - with open(f_stderr, 'w') as fd_stderr: + with open(f_stderr, "w") as fd_stderr: fd_stderr.write(stderr) self.retcode = retcode @@ -246,7 +243,7 @@ def begin_run_group(project, experiment): group, session = create_run_group(project, experiment) group.begin = datetime.datetime.now() - group.status = 'running' + group.status = "running" session.commit() return group, session @@ -261,7 +258,7 @@ def end_run_group(group, session): session: The database transaction we will finish. """ group.end = datetime.datetime.now() - group.status = 'completed' + group.status = "completed" session.commit() @@ -274,7 +271,7 @@ def fail_run_group(group, session): session: The database transaction we will finish. """ group.end = datetime.datetime.now() - group.status = 'failed' + group.status = "failed" session.commit() @@ -336,10 +333,10 @@ def watch(command: BaseCommand) -> WatchableCommand: def f(*args: t.Any, retcode: int = 0, **kwargs: t.Any) -> CommandResult: final_command = command[args] - buffered = not bool(CFG['force_watch_unbuffered']) + buffered = not bool(CFG["force_watch_unbuffered"]) return t.cast( CommandResult, - final_command.run_tee(retcode=retcode, buffered=buffered, **kwargs) + final_command.run_tee(retcode=retcode, buffered=buffered, **kwargs), ) return f @@ -358,6 +355,7 @@ def with_env_recursive(cmd: BaseCommand, **envvars: str) -> BaseCommand: """ # pylint: disable=import-outside-toplevel from plumbum.commands.base import BoundCommand, BoundEnvCommand + if isinstance(cmd, BoundCommand): cmd.cmd = with_env_recursive(cmd.cmd, **envvars) elif isinstance(cmd, BoundEnvCommand): @@ -366,7 +364,7 @@ def with_env_recursive(cmd: BaseCommand, **envvars: str) -> BaseCommand: return cmd -def in_builddir(sub: str = '.'): +def in_builddir(sub: str = "."): """ Decorate a project phase with a local working directory change. diff --git a/benchbuild/utils/schedule_tree.py b/benchbuild/utils/schedule_tree.py index 0e0598368..701f2438b 100644 --- a/benchbuild/utils/schedule_tree.py +++ b/benchbuild/utils/schedule_tree.py @@ -1,4 +1,5 @@ -""" Parsing utilities for Polly's ScheduleTree representation. """ +"""Parsing utilities for Polly's ScheduleTree representation.""" + import logging import textwrap as t @@ -12,7 +13,7 @@ class Node: tok = attr.ib() - def indent(self, level=0, idt=' '): + def indent(self, level=0, idt=" "): val = self.tok[2] if not isinstance(self.tok[2], str): val = self.tok[2].indent(1) @@ -21,8 +22,7 @@ def indent(self, level=0, idt=' '): @attr.s class CoincidenceNode(Node): - - def indent(self, level=0, idt=' '): + def indent(self, level=0, idt=" "): ret = [str(child) for child in self.tok[3]] ret = ",".join(ret) @@ -31,13 +31,12 @@ def indent(self, level=0, idt=' '): @attr.s class RootNode(Node): - - def indent(self, level=0, idt=' '): + def indent(self, level=0, idt=" "): ret = [] ret = [child.indent(level + 2) for child in self.tok[1]] ret = ",\n".join(ret) - return t.indent('{{\n{:s}\n}}'.format(ret), level * idt) + return t.indent("{{\n{:s}\n}}".format(ret), level * idt) def __str__(self): return self.indent(0) @@ -45,20 +44,18 @@ def __str__(self): @attr.s class ChildNode(Node): - - def indent(self, level=0, idt=' '): + def indent(self, level=0, idt=" "): ret = self.tok[0].indent(level) return ret @attr.s class SequenceNode(Node): - - def indent(self, level=0, idt=' '): + def indent(self, level=0, idt=" "): ret = '"{:s}": [\n'.format(self.tok[0]) for child in self.tok[3]: - ret += child.indent(0) + ',\n' - ret += '\n]' + ret += child.indent(0) + ",\n" + ret += "\n]" return t.indent(ret, level * idt) @@ -93,8 +90,18 @@ def indent(self, level=0, idt=' '): SEQ_ELEM_LIST = p.delimitedList(ROOT) SEQUENCE = KW_SEQUENCE + ":" + "[" + p.Group(p.delimitedList(ROOT)) + "]" CHILD = KW_CHILD + ":" + ROOT -CHILD_NODE << (CHILD | COINCIDENT | DOMAIN | EXTENSION | FILTER | MARK | - OPTIONS | PERMUTABLE | SCHEDULE | SEQUENCE) +CHILD_NODE << ( + CHILD + | COINCIDENT + | DOMAIN + | EXTENSION + | FILTER + | MARK + | OPTIONS + | PERMUTABLE + | SCHEDULE + | SEQUENCE +) ROOT << ("{" + p.Group(p.delimitedList(CHILD_NODE)) + "}") CHILD.addParseAction(Node) diff --git a/benchbuild/utils/schema.py b/benchbuild/utils/schema.py index 91cd4fdef..2b3ef37fc 100644 --- a/benchbuild/utils/schema.py +++ b/benchbuild/utils/schema.py @@ -57,7 +57,7 @@ def metadata(): def exceptions( error_is_fatal: bool = True, - error_messages: tp.Optional[tp.Dict[Exception, str]] = None + error_messages: tp.Optional[tp.Dict[Exception, str]] = None, ) -> tp.Callable: """ Handle SQLAlchemy exceptions in a sane way. @@ -90,9 +90,7 @@ def exc_wrapper(*args, **kwargs): LOG.error("For developers: (%s) %s", err.__class__, str(err)) if error_is_fatal: sys.exit("Abort, SQL operation failed.") - if not ui.ask( - "I can continue at your own risk, do you want that?" - ): + if not ui.ask("I can continue at your own risk, do you want that?"): raise err return result @@ -107,6 +105,7 @@ class GUID(TypeDecorator): Uses Postgresql's UUID type, otherwise uses CHAR(32), storing as stringified hex values. """ + impl = CHAR as_uuid = False cache_ok = False @@ -116,7 +115,7 @@ def __init__(self, *args, as_uuid=False, **kwargs): super().__init__(*args, **kwargs) def load_dialect_impl(self, dialect): - if dialect.name == 'postgresql': + if dialect.name == "postgresql": return dialect.type_descriptor(UUID(as_uuid=self.as_uuid)) return dialect.type_descriptor(CHAR(32)) @@ -133,12 +132,14 @@ def process_result_value(self, value, dialect): class Run(BASE): """Store a run for each executed test binary.""" - __tablename__ = 'run' + __tablename__ = "run" __table_args__ = ( - ForeignKeyConstraint(['project_name', 'project_group'], - ['project.name', 'project.group_name'], - onupdate="CASCADE", - ondelete="CASCADE"), + ForeignKeyConstraint( + ["project_name", "project_group"], + ["project.name", "project.group_name"], + onupdate="CASCADE", + ondelete="CASCADE", + ), ) id = Column(Integer, primary_key=True) @@ -150,64 +151,65 @@ class Run(BASE): experiment_group = Column( GUID(as_uuid=True), ForeignKey("experiment.id", ondelete="CASCADE", onupdate="CASCADE"), - index=True + index=True, ) begin = Column(DateTime(timezone=False)) end = Column(DateTime(timezone=False)) - status = Column(Enum('completed', 'running', 'failed', name="run_state")) + status = Column(Enum("completed", "running", "failed", name="run_state")) metrics = sa.orm.relationship( "Metric", cascade="all, delete-orphan", passive_deletes=True, - passive_updates=True + passive_updates=True, ) logs = sa.orm.relationship( "RunLog", cascade="all, delete-orphan", passive_deletes=True, - passive_updates=True + passive_updates=True, ) stored_data = sa.orm.relationship( "Metadata", cascade="all, delete-orphan", passive_deletes=True, - passive_updates=True + passive_updates=True, ) configurations = sa.orm.relationship( "Config", cascade="all, delete-orphan", passive_deletes=True, - passive_updates=True + passive_updates=True, ) def __repr__(self): - return ("" - ).format(self.project_name, self.status, self.id) + return ("").format( + self.project_name, self.status, self.id + ) class RunGroup(BASE): - """ Store information about a run group. """ + """Store information about a run group.""" - __tablename__ = 'rungroup' + __tablename__ = "rungroup" id = Column(GUID(as_uuid=True), primary_key=True, index=True) experiment = Column( GUID(as_uuid=True), ForeignKey("experiment.id", ondelete="CASCADE", onupdate="CASCADE"), - index=True + index=True, ) begin = Column(DateTime(timezone=False)) end = Column(DateTime(timezone=False)) - status = Column(Enum('completed', 'running', 'failed', name="run_state")) + status = Column(Enum("completed", "running", "failed", name="run_state")) class Experiment(BASE): """Store metadata about experiments.""" - __tablename__ = 'experiment' + __tablename__ = "experiment" name = Column(String) description = Column(String) @@ -216,16 +218,13 @@ class Experiment(BASE): end = Column(DateTime(timezone=False)) runs = sa.orm.relationship( - "Run", - cascade="all, delete-orphan", - passive_deletes=True, - passive_updates=True + "Run", cascade="all, delete-orphan", passive_deletes=True, passive_updates=True ) run_groups = sa.orm.relationship( "RunGroup", cascade="all, delete-orphan", passive_deletes=True, - passive_updates=True + passive_updates=True, ) def __repr__(self): @@ -235,7 +234,7 @@ def __repr__(self): class Project(BASE): """Store project metadata.""" - __tablename__ = 'project' + __tablename__ = "project" name = Column(String, primary_key=True) description = Column(String) @@ -245,10 +244,7 @@ class Project(BASE): version = Column(String) runs = sa.orm.relationship( - "Run", - cascade="all, delete-orphan", - passive_deletes=True, - passive_updates=True + "Run", cascade="all, delete-orphan", passive_deletes=True, passive_updates=True ) def __repr__(self): @@ -256,14 +252,14 @@ def __repr__(self): group=self.group_name, domain=self.domain, name=self.name, - version=self.version + version=self.version, ) class Metric(BASE): """Store default metrics, simple name value store.""" - __tablename__ = 'metrics' + __tablename__ = "metrics" name = Column(String, primary_key=True, index=True, nullable=False) value = Column(Float) @@ -271,7 +267,7 @@ class Metric(BASE): Integer, ForeignKey("run.id", onupdate="CASCADE", ondelete="CASCADE"), index=True, - primary_key=True + primary_key=True, ) def __repr__(self): @@ -286,13 +282,13 @@ class RunLog(BASE): are stored here. """ - __tablename__ = 'log' + __tablename__ = "log" run_id = Column( Integer, ForeignKey("run.id", onupdate="CASCADE", ondelete="CASCADE"), index=True, - primary_key=True + primary_key=True, ) begin = Column(DateTime(timezone=False)) end = Column(DateTime(timezone=False)) @@ -316,7 +312,7 @@ class Metadata(BASE): Integer, ForeignKey("run.id", onupdate="CASCADE", ondelete="CASCADE"), index=True, - primary_key=True + primary_key=True, ) name = Column(String) value = Column(String) @@ -330,13 +326,13 @@ class Config(BASE): Use it for extended filtering against the run table. """ - __tablename__ = 'config' + __tablename__ = "config" run_id = Column( Integer, ForeignKey("run.id", onupdate="CASCADE", ondelete="CASCADE"), index=True, - primary_key=True + primary_key=True, ) name = Column(String, primary_key=True) value = Column(String) @@ -364,7 +360,6 @@ def needed_schema(connection, meta): class SessionManager: - def connect_engine(self): """ Establish a connection to the database. @@ -379,8 +374,7 @@ def connect_engine(self): return True except sa.exc.OperationalError as opex: LOG.critical( - "Could not connect to the database. The error was: '%s'", - str(opex) + "Could not connect to the database. The error was: '%s'", str(opex) ) return False @@ -401,12 +395,11 @@ def configure_engine(self): @exceptions( error_messages={ - sa.exc.NoSuchModuleError: - "Connect string contained an invalid backend." + sa.exc.NoSuchModuleError: "Connect string contained an invalid backend." } ) def __init__(self): - self.__test_mode = bool(settings.CFG['db']['rollback']) + self.__test_mode = bool(settings.CFG["db"]["rollback"]) self.engine = create_engine(str(settings.CFG["db"]["connect_string"])) if not (self.connect_engine() and self.configure_engine()): @@ -424,7 +417,7 @@ def get(self): return sessionmaker(bind=self.connection) def __del__(self): - if hasattr(self, '__transaction') and self.__transaction: + if hasattr(self, "__transaction") and self.__transaction: self.__transaction.rollback() diff --git a/benchbuild/utils/settings.py b/benchbuild/utils/settings.py index 23b38c24d..e96ca50b6 100644 --- a/benchbuild/utils/settings.py +++ b/benchbuild/utils/settings.py @@ -9,6 +9,7 @@ A leaf node in the dictionary tree is represented by an inner node that contains a value key. """ + import copy import logging import os @@ -31,8 +32,7 @@ class Indexable: - - def __getitem__(self: 'Indexable', key: str) -> 'Indexable': + def __getitem__(self: "Indexable", key: str) -> "Indexable": pass @@ -59,11 +59,10 @@ def available_cpu_count() -> int: # cpuset may restrict the number of *available* processors try: match = re.search( - r'(?m)^Cpus_allowed:\s*(.*)$', - open('/proc/self/status').read() + r"(?m)^Cpus_allowed:\s*(.*)$", open("/proc/self/status").read() ) if match: - res = bin(int(match.group(1).replace(',', ''), 16)).count('1') + res = bin(int(match.group(1).replace(",", ""), 16)).count("1") if res > 0: return res except IOError: @@ -72,13 +71,14 @@ def available_cpu_count() -> int: # http://code.google.com/p/psutil/ try: import psutil + return int(psutil.cpu_count()) # psutil.NUM_CPUS on old versions except (ImportError, AttributeError): LOG.debug("Could not get the number of allowed CPUs") # POSIX try: - res = int(os.sysconf('SC_NPROCESSORS_ONLN')) + res = int(os.sysconf("SC_NPROCESSORS_ONLN")) if res > 0: return res @@ -87,14 +87,14 @@ def available_cpu_count() -> int: # Linux try: - res = open('/proc/cpuinfo').read().count('processor\t:') + res = open("/proc/cpuinfo").read().count("processor\t:") if res > 0: return res except IOError: LOG.debug("Could not get the number of allowed CPUs") - raise Exception('Can not determine number of CPUs on this system') + raise Exception("Can not determine number of CPUs on this system") def current_available_threads() -> int: @@ -102,7 +102,7 @@ def current_available_threads() -> int: return len(os.sched_getaffinity(0)) -def get_number_of_jobs(config: 'Configuration') -> int: +def get_number_of_jobs(config: "Configuration") -> int: """Returns the number of jobs set in the config.""" jobs_configured = int(config["jobs"]) if jobs_configured == 0: @@ -121,7 +121,7 @@ def escape_yaml(raw_str: str) -> str: Args: raw_str: The unescaped string. """ - escape_list = [char for char in raw_str if char in ['!', '{', '[']] + escape_list = [char for char in raw_str if char in ["!", "{", "["]] if len(escape_list) == 0: return raw_str @@ -201,17 +201,18 @@ def to_env_var(env_var: str, value: tp.Any) -> str: # CFG['container']['strategy']['polyjit'] = { # 'sync': { 'default': True', 'desc': '...' } # } -_INNER_NODE_VALUE = schema.Schema({ - schema.Or('default', 'value'): object, - schema.Optional('desc'): str -}) -_INNER_NODE_SCHEMA = schema.Schema({ - schema.And(str, len): { - schema.Or('default', 'value'): object, - schema.Optional('desc'): str, - schema.Optional(str): dict +_INNER_NODE_VALUE = schema.Schema( + {schema.Or("default", "value"): object, schema.Optional("desc"): str} +) +_INNER_NODE_SCHEMA = schema.Schema( + { + schema.And(str, len): { + schema.Or("default", "value"): object, + schema.Optional("desc"): str, + schema.Optional(str): dict, + } } -}) +) class Configuration(Indexable): @@ -234,8 +235,8 @@ def __init__( self, parent_key: str, node: tp.Optional[InnerNode] = None, - parent: tp.Optional['Configuration'] = None, - init: bool = True + parent: tp.Optional["Configuration"] = None, + init: bool = True, ): self.parent = parent self.parent_key = parent_key @@ -260,31 +261,28 @@ def filter_exports(self) -> None: self.__dict__ = selfcopy.__dict__ def store(self, config_file: LocalPath) -> None: - """ Store the configuration dictionary to a file.""" + """Store the configuration dictionary to a file.""" selfcopy = copy.deepcopy(self) selfcopy.filter_exports() - with open(config_file, 'w') as outf: + with open(config_file, "w") as outf: yaml.dump( selfcopy.node, outf, width=80, indent=4, default_flow_style=False, - Dumper=ConfigDumper + Dumper=ConfigDumper, ) def load(self, _from: LocalPath) -> None: """Load the configuration dictionary from file.""" - def load_rec( - inode: tp.Dict[str, tp.Any], config: Configuration - ) -> None: + def load_rec(inode: tp.Dict[str, tp.Any], config: Configuration) -> None: """Recursive part of loading.""" for k in config: - if isinstance(config[k], dict) and \ - k not in ['value', 'default']: + if isinstance(config[k], dict) and k not in ["value", "default"]: if k in inode: load_rec(inode[k], config[k]) else: @@ -292,19 +290,19 @@ def load_rec( else: inode[k] = config[k] - with open(str(_from), 'r') as infile: + with open(str(_from), "r") as infile: obj: Configuration = yaml.load(infile, Loader=ConfigLoader) upgrade(obj) load_rec(self.node, obj) - self['config_file'] = os.path.abspath(_from) + self["config_file"] = os.path.abspath(_from) def has_value(self) -> bool: """Check, if the node contains a 'value'.""" - return isinstance(self.node, dict) and 'value' in self.node + return isinstance(self.node, dict) and "value" in self.node def has_default(self) -> bool: """Check, if the node contains a 'default' value.""" - return isinstance(self.node, dict) and 'default' in self.node + return isinstance(self.node, dict) and "default" in self.node def is_leaf(self) -> bool: """Check, if the node is a 'leaf' node.""" @@ -320,18 +318,16 @@ def init_from_env(self) -> None: Otherwise, init our children. """ - if 'default' in self.node: + if "default" in self.node: env_var = self.__to_env_var__().upper() if not self.has_value(): - self.node['value'] = self.node['default'] + self.node["value"] = self.node["default"] env_val = os.getenv(env_var, None) if env_val is not None: try: - self.node['value'] = yaml.load( - str(env_val), Loader=ConfigLoader - ) + self.node["value"] = yaml.load(str(env_val), Loader=ConfigLoader) except ValueError: - self.node['value'] = env_val + self.node["value"] = env_val else: if isinstance(self.node, dict): for k in self.node: @@ -342,20 +338,20 @@ def value(self) -> tp.Any: """Return the node value, if we're a leaf node.""" def validate(node_value: tp.Any) -> tp.Any: - if hasattr(node_value, 'validate'): + if hasattr(node_value, "validate"): node_value.validate() return node_value - if 'value' in self.node: - return validate(self.node['value']) + if "value" in self.node: + return validate(self.node["value"]) return self - def __getitem__(self, key: str) -> 'Configuration': + def __getitem__(self, key: str) -> "Configuration": if key not in self.node: warnings.warn( "Access to non-existing config element: {0}".format(key), category=InvalidConfigKey, - stacklevel=2 + stacklevel=2, ) return Configuration(key, init=False) return Configuration(key, parent=self, node=self.node[key], init=False) @@ -364,17 +360,17 @@ def __setitem__(self, key: str, val: tp.Any) -> None: if _INNER_NODE_SCHEMA.is_valid(val) or _INNER_NODE_VALUE.is_valid(val): self.node[key] = val elif key in self.node: - self.node[key]['value'] = val + self.node[key]["value"] = val else: - self.node[key] = {'value': val} + self.node[key] = {"value": val} def __iadd__(self, rhs: tp.Any) -> tp.Any: """Append a value to a list value.""" if not self.has_value(): raise TypeError("Inner configuration node does not support +=.") - value = self.node['value'] - if not hasattr(value, '__iadd__'): + value = self.node["value"] + if not hasattr(value, "__iadd__"): raise TypeError("Configuration node value does not support +=.") value += rhs @@ -383,9 +379,7 @@ def __iadd__(self, rhs: tp.Any) -> tp.Any: def __int__(self) -> int: """Convert the node's value to int, if available.""" if not self.has_value(): - raise ValueError( - 'Inner configuration nodes cannot be converted to int.' - ) + raise ValueError("Inner configuration nodes cannot be converted to int.") return int(self.value) def __bool__(self) -> bool: @@ -398,8 +392,8 @@ def __contains__(self, key: str) -> bool: return key in self.node def __str__(self) -> str: - if 'value' in self.node: - return str(self.node['value']) + if "value" in self.node: + return str(self.node["value"]) return str(self.node) def __repr__(self) -> str: @@ -409,9 +403,9 @@ def __repr__(self) -> str: _repr = [] if self.has_value(): - return to_env_var(self.__to_env_var__(), self.node['value']) + return to_env_var(self.__to_env_var__(), self.node["value"]) if self.has_default(): - return to_env_var(self.__to_env_var__(), self.node['default']) + return to_env_var(self.__to_env_var__(), self.node["default"]) for k in self.node: _repr.append(repr(self[k])) @@ -427,9 +421,9 @@ def __to_env_var__(self) -> str: def to_env_dict(self) -> tp.Mapping[str, tp.Any]: """Convert configuration object to a flat dictionary.""" if self.has_value(): - return {self.__to_env_var__(): self.node['value']} + return {self.__to_env_var__(): self.node["value"]} if self.has_default(): - return {self.__to_env_var__(): self.node['default']} + return {self.__to_env_var__(): self.node["default"]} entries: tp.Dict[str, str] = {} for k in self.node: @@ -447,13 +441,14 @@ def convert_components(value: tp.Union[str, tp.List[str]]) -> tp.List[str]: new_value = new_value.split(os.path.sep) else: new_value = [new_value] - new_value = [c for c in new_value if c != ''] + new_value = [c for c in new_value if c != ""] return new_value @attr.s(str=False, frozen=True) class ConfigPath: """Wrapper around paths represented as list of strings.""" + components = attr.ib(converter=convert_components) def validate(self) -> None: @@ -465,7 +460,7 @@ def validate(self) -> None: yes = ui.ask( "Should I create '%s' for you?" % path, default_answer=True, - default_answer_str="yes" + default_answer_str="yes", ) if yes: path.mkdir() @@ -488,11 +483,11 @@ def path_representer(dumper, data): """ Represent a ConfigPath object as a scalar YAML node. """ - return dumper.represent_scalar('!create-if-needed', '%s' % data) + return dumper.represent_scalar("!create-if-needed", "%s" % data) def path_constructor(loader, node): - """" + """ " Construct a ConfigPath object form a scalar YAML node. """ value = loader.construct_scalar(node) @@ -502,7 +497,7 @@ def path_constructor(loader, node): def find_config( test_file: tp.Optional[str] = None, defaults: tp.Optional[tp.List[str]] = None, - root: str = os.curdir + root: str = os.curdir, ) -> tp.Optional[LocalPath]: """ Find the path to the default config file. @@ -545,7 +540,7 @@ def walk_rec(cfg_name: str, root: str) -> LocalPath: def setup_config( cfg: Configuration, config_filenames: tp.Optional[tp.List[str]] = None, - env_var_name: tp.Optional[str] = None + env_var_name: tp.Optional[str] = None, ) -> None: """ This will initialize the given configuration object. @@ -588,9 +583,7 @@ def update_env(cfg: Configuration) -> None: lib_path = env.get("LD_LIBRARY_PATH", "") lib_path = os.path.pathsep.join(lib_path) if "LD_LIBRARY_PATH" in os.environ: - lib_path = os.path.pathsep.join([ - lib_path, os.environ["LD_LIBRARY_PATH"] - ]) + lib_path = os.path.pathsep.join([lib_path, os.environ["LD_LIBRARY_PATH"]]) os.environ["LD_LIBRARY_PATH"] = lib_path home = env.get("HOME", None) @@ -617,24 +610,26 @@ def upgrade(cfg: Configuration) -> None: "Converting to new connect_string. " "This will *not* be stored in the configuration automatically." ) - cfg["db"]["connect_string"] = \ + cfg["db"]["connect_string"] = ( "{dialect}://{user}:{password}@{host}:{port}/{name}".format( dialect=cfg["db"]["dialect"]["value"], user=cfg["db"]["user"]["value"], password=cfg["db"]["pass"]["value"], host=cfg["db"]["host"]["value"], port=cfg["db"]["port"]["value"], - name=cfg["db"]["name"]["value"]) + name=cfg["db"]["name"]["value"], + ) + ) def uuid_representer(dumper, data): """Represent a uuid.UUID object as a scalar YAML node.""" - return dumper.represent_scalar('!uuid', '%s' % data) + return dumper.represent_scalar("!uuid", "%s" % data) def uuid_constructor(loader, node): - """"Construct a uuid.UUID object form a scalar YAML node.""" + """ "Construct a uuid.UUID object form a scalar YAML node.""" value = loader.construct_scalar(node) return uuid.UUID(value) @@ -642,18 +637,18 @@ def uuid_constructor(loader, node): def uuid_add_implicit_resolver(loader=ConfigLoader, dumper=ConfigDumper): """Attach an implicit pattern resolver for UUID objects.""" - uuid_regex = r'^\b[a-f0-9]{8}-\b[a-f0-9]{4}-\b[a-f0-9]{4}-\b[a-f0-9]{4}-\b[a-f0-9]{12}$' + uuid_regex = ( + r"^\b[a-f0-9]{8}-\b[a-f0-9]{4}-\b[a-f0-9]{4}-\b[a-f0-9]{4}-\b[a-f0-9]{12}$" + ) pattern = re.compile(uuid_regex) - yaml.add_implicit_resolver('!uuid', pattern, Loader=loader, Dumper=dumper) + yaml.add_implicit_resolver("!uuid", pattern, Loader=loader, Dumper=dumper) def __init_module__() -> None: yaml.add_representer(uuid.UUID, uuid_representer, Dumper=ConfigDumper) yaml.add_representer(ConfigPath, path_representer, Dumper=ConfigDumper) - yaml.add_constructor('!uuid', uuid_constructor, Loader=ConfigLoader) - yaml.add_constructor( - '!create-if-needed', path_constructor, Loader=ConfigLoader - ) + yaml.add_constructor("!uuid", uuid_constructor, Loader=ConfigLoader) + yaml.add_constructor("!create-if-needed", path_constructor, Loader=ConfigLoader) uuid_add_implicit_resolver() diff --git a/benchbuild/utils/slurm.py b/benchbuild/utils/slurm.py index 165f76613..fa75d101e 100755 --- a/benchbuild/utils/slurm.py +++ b/benchbuild/utils/slurm.py @@ -4,6 +4,7 @@ This module can be used to generate bash scripts that can be executed by the SLURM controller either as batch or interactive script. """ + import logging import os import sys @@ -29,7 +30,7 @@ LOG = logging.getLogger(__name__) -def script(experiment: 'Experiment', *subcommands: str) -> str: +def script(experiment: "Experiment", *subcommands: str) -> str: """ Prepare a slurm script that executes the experiment for a given project. @@ -39,9 +40,7 @@ def script(experiment: 'Experiment', *subcommands: str) -> str: """ projects = __expand_project_versions__(experiment) benchbuild_c = local[local.path(sys.argv[0])] - slurm_script = local.cwd / experiment.name + "-" + str( - CFG['slurm']['script'] - ) + slurm_script = local.cwd / experiment.name + "-" + str(CFG["slurm"]["script"]) srun = cmd["srun"] srun_args = [] @@ -68,22 +67,24 @@ def __expand_project_versions__(experiment: Experiment) -> tp.Iterable[str]: def __path(): - host_path = os.getenv('PATH', default='') - env = CFG['env'].value - benchbuild_path = list_to_path(env.get('PATH', [])) + host_path = os.getenv("PATH", default="") + env = CFG["env"].value + benchbuild_path = list_to_path(env.get("PATH", [])) return os.path.pathsep.join([benchbuild_path, host_path]) def __ld_library_path(): - host_path = os.getenv('LD_LIBRARY_PATH', default='') - env = CFG['env'].value - benchbuild_path = list_to_path(env.get('LD_LIBRARY_PATH', [])) + host_path = os.getenv("LD_LIBRARY_PATH", default="") + env = CFG["env"].value + benchbuild_path = list_to_path(env.get("LD_LIBRARY_PATH", [])) return os.path.pathsep.join([benchbuild_path, host_path]) def __save__( - script_name: str, benchbuild: BoundCommand, experiment: 'Experiment', - projects: tp.Iterable[str] + script_name: str, + benchbuild: BoundCommand, + experiment: "Experiment", + projects: tp.Iterable[str], ) -> str: """ Dump a bash script that can be given to SLURM. @@ -98,12 +99,10 @@ def __save__( jinja2.exceptions.TemplateNotFound: If the modified template location does not exist. """ - logs_dir = Path(str(CFG['slurm']['logs'].value)) - if logs_dir.suffix != '': + logs_dir = Path(str(CFG["slurm"]["logs"].value)) + if logs_dir.suffix != "": logs_dir = logs_dir.parent / logs_dir.stem - LOG.warning( - 'Config slurm:logs should be a folder, defaulting to %s.', logs_dir - ) + LOG.warning("Config slurm:logs should be a folder, defaulting to %s.", logs_dir) if not logs_dir.exists(): logs_dir.mkdir() @@ -116,41 +115,36 @@ def __save__( template_name = local.path(template_name).basename loader = jinja2.FileSystemLoader(template_path) else: - loader = jinja2.PackageLoader('benchbuild', 'res') + loader = jinja2.PackageLoader("benchbuild", "res") - env = jinja2.Environment( - trim_blocks=True, lstrip_blocks=True, loader=loader - ) + env = jinja2.Environment(trim_blocks=True, lstrip_blocks=True, loader=loader) template = env.get_template(template_name) if len(experiment.projects) > 1: project_options = reduce( lambda x, y: merge_slurm_options(x, y.REQUIREMENTS), - experiment.projects, tp.cast(tp.List[Requirement], []) + experiment.projects, + tp.cast(tp.List[Requirement], []), ) elif len(experiment.projects) == 1: project_options = experiment.projects[0].REQUIREMENTS else: project_options = [] - slurm_options = merge_slurm_options( - project_options, experiment.REQUIREMENTS - ) - slurm_options = merge_slurm_options( - slurm_options, get_slurm_options_from_config() - ) + slurm_options = merge_slurm_options(project_options, experiment.REQUIREMENTS) + slurm_options = merge_slurm_options(slurm_options, get_slurm_options_from_config()) - prefix = local.path(str(CFG['slurm']['node_dir']), str(experiment.id)) + prefix = local.path(str(CFG["slurm"]["node_dir"]), str(experiment.id)) - with open(script_name, 'w') as slurm2: + with open(script_name, "w") as slurm2: slurm2.write( template.render( - config=["export " + x for x in repr(CFG).split('\n')], + config=["export " + x for x in repr(CFG).split("\n")], clean_lockdir=str(prefix), - clean_lockfile=prefix.with_suffix('.clean-in-progress.lock'), - cpus=int(CFG['slurm']['cpus_per_task']), - lockfile=prefix.with_suffix('.lock'), + clean_lockfile=prefix.with_suffix(".clean-in-progress.lock"), + cpus=int(CFG["slurm"]["cpus_per_task"]), + lockfile=prefix.with_suffix(".lock"), log=logs_dir.resolve() / str(experiment.id), - max_running=int(CFG['slurm']['max_running']), + max_running=int(CFG["slurm"]["max_running"]), name=experiment.name, nice_clean=int(CFG["slurm"]["nice_clean"]), node_command=node_command, @@ -159,9 +153,9 @@ def __save__( projects=projects, slurm_account=str(CFG["slurm"]["account"]), slurm_partition=str(CFG["slurm"]["partition"]), - sbatch_options='\n'.join([ - s_opt.to_option() for s_opt in slurm_options - ]), + sbatch_options="\n".join( + [s_opt.to_option() for s_opt in slurm_options] + ), ) ) diff --git a/benchbuild/utils/tasks.py b/benchbuild/utils/tasks.py index 8f540045b..bd6384b3f 100644 --- a/benchbuild/utils/tasks.py +++ b/benchbuild/utils/tasks.py @@ -1,6 +1,7 @@ """ The task module distributes benchbuild's excution plans over processes. """ + import typing as tp import benchbuild.utils.actions as actns @@ -16,7 +17,7 @@ def execute_plan(plan: Actions) -> StepResults: - """"Execute the plan. + """ "Execute the plan. Args: plan: The plan we want to execute. diff --git a/benchbuild/utils/uchroot.py b/benchbuild/utils/uchroot.py index 5e3f6e091..330c622e4 100644 --- a/benchbuild/utils/uchroot.py +++ b/benchbuild/utils/uchroot.py @@ -28,9 +28,17 @@ def uchroot(*args, **kwargs): def __default_opts__(uid=0, gid=0): return [ - "-C", "-w", "/", "-r", local.cwd, "-u", - str(uid), "-g", - str(gid), "-E", "-A" + "-C", + "-w", + "/", + "-r", + local.cwd, + "-u", + str(uid), + "-g", + str(gid), + "-E", + "-A", ] @@ -62,7 +70,7 @@ def no_args(**kwargs): uchrt = run.with_env_recursive( uchrt, LD_LIBRARY_PATH=path.list_to_path(p_libs), - PATH=path.list_to_path(p_paths) + PATH=path.list_to_path(p_paths), ) return uchrt @@ -83,7 +91,7 @@ def with_mounts(*args, uchroot_cmd_fn=no_args, **kwargs): uchroot_cmd = run.with_env_recursive( uchroot_cmd, LD_LIBRARY_PATH=path.list_to_path(libs + prefix_libs), - PATH=path.list_to_path(paths + prefix_paths) + PATH=path.list_to_path(paths + prefix_paths), ) return uchroot_cmd @@ -101,7 +109,7 @@ def retry( retries: int = 0, max_retries: int = 10, retcode: int = 0, - retry_retcodes: tp.Optional[tp.List[int]] = None + retry_retcodes: tp.Optional[tp.List[int]] = None, ) -> None: try: pb_cmd.run_fg(retcode=retcode) @@ -117,7 +125,7 @@ def retry( retries=retries + 1, max_retries=max_retries, retcode=retcode, - retry_retcodes=retry_retcodes + retry_retcodes=retry_retcodes, ) else: raise @@ -128,15 +136,15 @@ def uretry(cmd: BoundCommand, retcode: int = 0) -> None: cmd, retcode=retcode, retry_retcodes=[ - UchrootEC.MNT_PROC_FAILED.value, UchrootEC.MNT_DEV_FAILED.value, - UchrootEC.MNT_SYS_FAILED.value, UchrootEC.MNT_PTS_FAILED.value - ] + UchrootEC.MNT_PROC_FAILED.value, + UchrootEC.MNT_DEV_FAILED.value, + UchrootEC.MNT_SYS_FAILED.value, + UchrootEC.MNT_PTS_FAILED.value, + ], ) -def clean_env( - uchroot_cmd: BoundCommand, varnames: tp.List[str] -) -> BoundCommand: +def clean_env(uchroot_cmd: BoundCommand, varnames: tp.List[str]) -> BoundCommand: """Returns a uchroot cmd that runs inside a filtered environment.""" _env = uchroot_cmd["/usr/bin/env"] __clean_env = _env["-u", ",".join(varnames)] @@ -163,8 +171,7 @@ def mounts(prefix: str, __mounts: tp.List) -> tp.List[str]: return mntpoints -def __mounts__(prefix: str, - _mounts: tp.List) -> tp.Tuple[tp.List[str], tp.List[str]]: +def __mounts__(prefix: str, _mounts: tp.List) -> tp.Tuple[tp.List[str], tp.List[str]]: i = 0 mntpoints = [] uchroot_opts = [] @@ -183,7 +190,7 @@ def __mounts__(prefix: str, def env( - uchroot_mounts: tp.List[str] + uchroot_mounts: tp.List[str], ) -> tp.Tuple[tp.List[local.path], tp.List[local.path]]: """ Compute the environment of the change root for the user. diff --git a/benchbuild/utils/unionfs.py b/benchbuild/utils/unionfs.py index a48b73902..240134da6 100644 --- a/benchbuild/utils/unionfs.py +++ b/benchbuild/utils/unionfs.py @@ -13,7 +13,7 @@ LOG = logging.getLogger(__name__) -def unionfs(rw='rw', ro=None, union='union'): +def unionfs(rw="rw", ro=None, union="union"): """ Decorator for the UnionFS feature. @@ -60,8 +60,7 @@ def wrap_in_union_fs_func(project, *args, **kwargs): LOG.debug("UnionFS - Project builddir: %s", project.builddir) if __unionfs_is_active(root=build_dir): LOG.debug( - "UnionFS already active in %s, nesting not supported.", - build_dir + "UnionFS already active in %s, nesting not supported.", build_dir ) return func(project, *args, **kwargs) @@ -75,8 +74,7 @@ def wrap_in_union_fs_func(project, *args, **kwargs): project.builddir = un_dir proc = unionfs_cmd.popen() - while (not __unionfs_is_active(root=un_dir)) and \ - (proc.poll() is None): + while (not __unionfs_is_active(root=un_dir)) and (proc.poll() is None): pass ret = None @@ -153,11 +151,15 @@ def __unionfs_set_up(ro_dir, rw_dir, mount_dir): raise ValueError("Base directory does not exist") from benchbuild.utils.cmd import unionfs as unionfs_cmd - LOG.debug( - "Mounting UnionFS on %s with RO:%s RW:%s", mount_dir, ro_dir, rw_dir - ) - return unionfs_cmd["-f", "-o", "auto_unmount,allow_other,cow", - rw_dir + "=RW:" + ro_dir + "=RO", mount_dir] + + LOG.debug("Mounting UnionFS on %s with RO:%s RW:%s", mount_dir, ro_dir, rw_dir) + return unionfs_cmd[ + "-f", + "-o", + "auto_unmount,allow_other,cow", + rw_dir + "=RW:" + ro_dir + "=RO", + mount_dir, + ] class UnmountError(BaseException): diff --git a/benchbuild/utils/user_interface.py b/benchbuild/utils/user_interface.py index 370cce569..30d808e52 100644 --- a/benchbuild/utils/user_interface.py +++ b/benchbuild/utils/user_interface.py @@ -1,6 +1,7 @@ """ User interface helpers for benchbuild. """ + import logging import os import sys @@ -35,20 +36,15 @@ def query_yes_no(question, default="yes"): while True: sys.stdout.write(question + prompt) choice = input().lower() - if default is not None and choice == '': + if default is not None and choice == "": return valid[default] if choice in valid: return valid[choice] - sys.stdout.write( - "Please respond with 'yes' or 'no' " - "(or 'y' or 'n').\n" - ) + sys.stdout.write("Please respond with 'yes' or 'no' (or 'y' or 'n').\n") def ask( - question: str, - default_answer: bool = False, - default_answer_str: str = "no" + question: str, default_answer: bool = False, default_answer_str: str = "no" ) -> bool: """ Ask for user input. diff --git a/benchbuild/utils/versions.py b/benchbuild/utils/versions.py index e6cb7a474..d4448b407 100644 --- a/benchbuild/utils/versions.py +++ b/benchbuild/utils/versions.py @@ -48,6 +48,7 @@ def get_git_hash(from_url): git commit hash of HEAD, or empty string. """ from benchbuild.utils.cmd import git + if from_url is None: return "" diff --git a/benchbuild/utils/wrapping.py b/benchbuild/utils/wrapping.py index 29d3c1d05..92855a822 100644 --- a/benchbuild/utils/wrapping.py +++ b/benchbuild/utils/wrapping.py @@ -22,6 +22,7 @@ of the binary. We cannot guarantee that repeated execution is valid, therefore, we let the user decide what the program should do. """ + import logging import os import sys @@ -42,10 +43,10 @@ LOG = logging.getLogger(__name__) # Configure default settings for dill pickle/unpickle, globally -dill.settings['ignore'] = True -dill.settings['recurse'] = True -dill.settings['protocol'] = -1 -dill.settings['byref'] = True +dill.settings["ignore"] = True +dill.settings["recurse"] = True +dill.settings["protocol"] = -1 +dill.settings["byref"] = True if tp.TYPE_CHECKING: import jinja2 @@ -77,8 +78,9 @@ def strip_path_prefix(ipath: Path, prefix: Path) -> Path: return ipath -def __create_jinja_env() -> 'jinja2.Environment': +def __create_jinja_env() -> "jinja2.Environment": import jinja2 # pylint: disable=import-outside-toplevel + return jinja2.Environment( trim_blocks=True, lstrip_blocks=True, @@ -155,7 +157,7 @@ def wrap( python=python, collect_coverage=collect_coverage, coverage_config=coverage_config, - coverage_path=coverage_path + coverage_path=coverage_path, ) ) @@ -167,7 +169,7 @@ def wrap( def wrap_dynamic( project: "benchbuild.project.Project", name: str, - sprefix: Path = Path('.'), + sprefix: Path = Path("."), python: str = sys.executable, name_filters: tp.Optional[tp.List[str]] = None, bin_ext: str = ".bin", @@ -238,7 +240,7 @@ def wrap_dynamic( name_filters=name_filters, collect_coverage=collect_coverage, coverage_config=coverage_config, - coverage_path=coverage_path + coverage_path=coverage_path, ) ) @@ -292,7 +294,7 @@ def wrap_cc( detect_project=detect_project, collect_coverage=collect_coverage, coverage_config=coverage_config, - coverage_path=coverage_path + coverage_path=coverage_path, ) ) diff --git a/docs/source/conf.py b/docs/source/conf.py index 26382866f..6a2e1693a 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -9,54 +9,51 @@ import logging import os -from pkg_resources import DistributionNotFound, get_distribution +import importlib.metadata import benchbuild.utils # pylint: skip-file -try: - __version__ = get_distribution("benchbuild").version -except DistributionNotFound: - pass - -project = 'BenchBuild' -copyright = '2023, Andreas Simbürger' -author = 'Andreas Simbürger' +__version__ = importlib.metadata.version("benchbuild") + +project = "BenchBuild" +copyright = "2025, Andreas Simbürger" +author = "Andreas Simbürger" release = __version__ # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration extensions = [ - 'myst_parser', - 'sphinx.ext.napoleon', - 'sphinx.ext.autodoc', - 'sphinx_autodoc_typehints', - 'sphinx.ext.napoleon', - 'sphinx.ext.todo', - 'sphinx.ext.viewcode', - 'sphinx.ext.autosectionlabel', - 'sphinxcontrib.programoutput', - 'sphinx.ext.githubpages', + "myst_parser", + "sphinx.ext.napoleon", + "sphinx.ext.autodoc", + "sphinx_autodoc_typehints", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "sphinx.ext.autosectionlabel", + "sphinxcontrib.programoutput", + "sphinx.ext.githubpages", ] exclude_patterns = [] source_suffix = { - '.rst': 'restructuredtext', - '.md': 'markdown', + ".rst": "restructuredtext", + ".md": "markdown", } # -- Options for HTML output ------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output -html_theme = 'press' -pygments_style = 'monokai' -html_static_path = ['_static'] +html_theme = "press" +pygments_style = "monokai" +html_static_path = ["_static"] napoleon_google_docstring = True napoleon_use_admonition_for_examples = True # Configure MyST Parser -#myst_gfm_only = True +# myst_gfm_only = True myst_enable_extensions = ["linkify"] diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..f937fb4bc --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,101 @@ +[project] +name = "benchbuild" +dynamic = ["version"] +readme = "README.md" +description = "This is the experiment driver for the benchbuild study" +authors = [ + { name = "Andreas Simbuerger", email = "simbuerg@lairosiel.de" } +] +license-files = ["LICENSE.txt"] +requires-python = ">=3.11" +classifiers = [ + 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', + 'Topic :: Software Development :: Testing', + 'License :: OSI Approved :: MIT License', + 'Programming Language :: Python :: 3' +] +keywords = ["benchbuild", "experiments", "run-time"] +dependencies = [ + "attrs>=22", + "dill>=0", + "jinja2>=3", + "parse>=1", + "pathos>=0", + "plumbum>=1", + "psutil>=5", + "psycopg2-binary>=2", + "pygit2>=1.19", + "pygtrie>=2", + "pyparsing>=3", + "pyyaml>=6.0", + "result>=0", + "rich>=12", + "schema>=0", + "six>=1.17.0", + "sqlalchemy>=2", + "typing-extensions>=4", + "virtualenv>=20", +] + +[project.urls] +Repository = "https://github.com/PolyJIT/benchbuild" + +[project.scripts] +benchbuild = "benchbuild.driver:main" +container = "benchbuild.container:main" + +[build-system] +requires = ["setuptools>=64", "setuptools-scm>=8"] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] +version_file = "benchbuild/_version.py" +local_scheme = "no-local-version" + +[tool.setuptools.package-data] +benchbuild = [ + "res/misc/slurm.sh.inc", + "res/sql/func.compare_region_wise2.sql", + "res/sql/func.experiments.sql", + "res/sql/func.recompilation.sql", + "res/sql/func.run_regions.sql", + "res/sql/func.total_dyncov_clean.sql", + "res/sql/func.total_speedup.sql", + "res/sql/func.compare_region_wise.sql", + "res/sql/func.project_region_time.sql", + "res/sql/func.run_durations.sql", + "res/sql/func.speedup.sql", + "res/sql/func.total_dyncov.sql", + "res/sql/func.pj-test-eval.sql", + "res/sql/func.compilestats_eval.sql", + "res/sql/func.polly_mse.sql", + "res/sql/func.profileScopDetection-eval.sql", + "res/wrapping/run_compiler.py.inc", + "res/wrapping/run_static.py.inc", + "res/wrapping/run_dynamic.py.inc", + "res/patches/linpack.patch" +] + +[dependency-groups] +dev = [ + "codecov>=2.1.13", + "faker>=37.4.2", + "gitpython>=3.1.45", + "mock>=5.2.0", + "pre-commit>=4.2.0", + "pytest>=8.4.1", + "pytest-cov>=6.2.1", + "pytest-describe>=2.2.0", + "pytest-git>=1.8.0", +] +docs = [ + "linkify>=1.4", + "mkdocs>=1.6", + "myst-parser[linkify]>=4", + "pymdown-extensions>=10.16", + "sphinx>=8", + "sphinx-autodoc-typehints>=3", + "sphinx-markdown>=1", + "sphinx-press-theme>=0.9", + "sphinxcontrib-programoutput>=0.18", +] diff --git a/setup.py b/setup.py index 6076d849a..9d63b8b75 100644 --- a/setup.py +++ b/setup.py @@ -1,58 +1,88 @@ from setuptools import find_packages, setup -with open('README.md') as f: +with open("README.md") as f: long_description = f.read() RESOURCES = [ - 'res/misc/slurm.sh.inc', "res/sql/func.compare_region_wise2.sql", - "res/sql/func.experiments.sql", "res/sql/func.recompilation.sql", - "res/sql/func.run_regions.sql", "res/sql/func.total_dyncov_clean.sql", - "res/sql/func.total_speedup.sql", "res/sql/func.compare_region_wise.sql", - "res/sql/func.project_region_time.sql", "res/sql/func.run_durations.sql", - "res/sql/func.speedup.sql", "res/sql/func.total_dyncov.sql", - "res/sql/func.pj-test-eval.sql", "res/sql/func.compilestats_eval.sql", - "res/sql/func.polly_mse.sql", "res/sql/func.profileScopDetection-eval.sql", - "res/wrapping/run_compiler.py.inc", "res/wrapping/run_static.py.inc", - "res/wrapping/run_dynamic.py.inc", "res/patches/linpack.patch" + "res/misc/slurm.sh.inc", + "res/sql/func.compare_region_wise2.sql", + "res/sql/func.experiments.sql", + "res/sql/func.recompilation.sql", + "res/sql/func.run_regions.sql", + "res/sql/func.total_dyncov_clean.sql", + "res/sql/func.total_speedup.sql", + "res/sql/func.compare_region_wise.sql", + "res/sql/func.project_region_time.sql", + "res/sql/func.run_durations.sql", + "res/sql/func.speedup.sql", + "res/sql/func.total_dyncov.sql", + "res/sql/func.pj-test-eval.sql", + "res/sql/func.compilestats_eval.sql", + "res/sql/func.polly_mse.sql", + "res/sql/func.profileScopDetection-eval.sql", + "res/wrapping/run_compiler.py.inc", + "res/wrapping/run_static.py.inc", + "res/wrapping/run_dynamic.py.inc", + "res/patches/linpack.patch", ] setup( - name='benchbuild', + name="benchbuild", use_scm_version=True, - url='https://github.com/PolyJIT/benchbuild', + url="https://github.com/PolyJIT/benchbuild", packages=find_packages( exclude=[ - "docs", "extern", "filters", "linker", "src", "statistics", "tests", - "results" + "docs", + "extern", + "filters", + "linker", + "src", + "statistics", + "tests", + "results", ] ), package_data={"benchbuild": RESOURCES}, include_package_data=True, setup_requires=["pytest-runner", "setuptools_scm"], install_requires=[ - "Jinja2>=3", "PyYAML>=6", "attrs>=22", "dill>=0", "pathos>=0.3", - "parse>=1", "plumbum>=1", "psutil>=5", "psycopg2-binary>=2", - "pygit2>=1", "pygtrie>=2", "pyparsing>=3", "rich>=13", - "SQLAlchemy>=2", "typing-extensions>=4", "virtualenv>=20", - "schema>=0", "result>=0" + "Jinja2>=3", + "PyYAML>=6", + "attrs>=22", + "dill>=0", + "pathos>=0.3", + "parse>=1", + "plumbum>=1", + "psutil>=5", + "psycopg2-binary>=2", + "pygit2>=1", + "pygtrie>=2", + "pyparsing>=3", + "rich>=13", + "SQLAlchemy>=2", + "typing-extensions>=4", + "virtualenv>=20", + "schema>=0", + "result>=0", ], author="Andreas Simbuerger", author_email="simbuerg@fim.uni-passau.de", description="This is the experiment driver for the benchbuild study", long_description=long_description, - long_description_content_type='text/markdown', + long_description_content_type="text/markdown", license="MIT", entry_points={ - 'console_scripts': [ - 'benchbuild=benchbuild.driver:main', - 'container=benchbuild.container:main' + "console_scripts": [ + "benchbuild=benchbuild.driver:main", + "container=benchbuild.container:main", ] }, classifiers=[ - 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', - 'Topic :: Software Development :: Testing', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 3' + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Topic :: Software Development :: Testing", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", ], - keywords="benchbuild experiments run-time" + keywords="benchbuild experiments run-time", ) diff --git a/tests/command/test_workloads.py b/tests/command/test_workloads.py index a106fc128..910d0ee84 100644 --- a/tests/command/test_workloads.py +++ b/tests/command/test_workloads.py @@ -45,16 +45,10 @@ def run_tests(self) -> None: @pytest.fixture def project(bb_git_repo: GitRepo) -> ProjectT: DefaultWorkloadProject.SOURCE = [ - Git( - remote=bb_git_repo.workspace, - local="workload-test.git", - shallow=False - ) + Git(remote=bb_git_repo.workspace, local="workload-test.git", shallow=False) ] DefaultWorkloadProject.WORKLOADS = { - WorkloadSet("always"): [ - Command(SourceRoot("workload-test.git") / "test") - ] + WorkloadSet("always"): [Command(SourceRoot("workload-test.git") / "test")] } return DefaultWorkloadProject @@ -62,19 +56,13 @@ def project(bb_git_repo: GitRepo) -> ProjectT: @pytest.fixture def only_in_project(bb_git_repo: GitRepo) -> ProjectT: OnlyInWorkloadProject.SOURCE = [ - Git( - remote=bb_git_repo.workspace, - local="workload-test.git", - shallow=False - ) + Git(remote=bb_git_repo.workspace, local="workload-test.git", shallow=False) ] OnlyInWorkloadProject.WORKLOADS = { OnlyIn(RevisionRange("HEAD~1", "HEAD"), WorkloadSet("sometimes")): [ Command(SourceRoot("workload-test.git") / "test") ], - WorkloadSet("always"): [ - Command(SourceRoot("workload-test.git") / "test") - ] + WorkloadSet("always"): [Command(SourceRoot("workload-test.git") / "test")], } return OnlyInWorkloadProject @@ -91,7 +79,7 @@ def test_workload_can_unwrap(project: ProjectT, only_in_project: ProjectT): # FIXME: Make project unpickleable -#def test_workload_run(project: ProjectT): +# def test_workload_run(project: ProjectT): # plan = generate_plan([NoMeasurement], [project]) # res = execute_plan(plan) # diff --git a/tests/conftest.py b/tests/conftest.py index c78e7d782..1058fae3c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -20,8 +20,7 @@ def mk_git_repo(): fake.add_provider(file) def _git_repository( - num_commits: int = 2, - git_submodule: tp.Optional[git.Repo] = None + num_commits: int = 2, git_submodule: tp.Optional[git.Repo] = None ) -> RepoT: nonlocal tmp_dir @@ -33,22 +32,19 @@ def _git_repository( some_content = fake.text() a_name = fake.file_name() a_file = a_repo_base / a_name - with open(a_file, 'w') as a_file_handle: + with open(a_file, "w") as a_file_handle: a_file_handle.writelines(some_content) repo.index.add(a_file) - repo.index.commit(f'Add {a_name}') + repo.index.commit(f"Add {a_name}") if git_submodule: a_sm_path = fake.file_name() a_sm_name = fake.file_name() repo.create_submodule( - a_sm_name, - a_sm_path, - url=git_submodule.git_dir, - branch='master' + a_sm_name, a_sm_path, url=git_submodule.git_dir, branch="master" ) repo.index.commit( - f'Add submodule {a_sm_name} to {a_sm_path} from: {repo.git_dir}' + f"Add submodule {a_sm_name} to {a_sm_path} from: {repo.git_dir}" ) return (tmp_dir, repo) diff --git a/tests/e2e/test_containers.py b/tests/e2e/test_containers.py index 0a77231fb..c70b90451 100644 --- a/tests/e2e/test_containers.py +++ b/tests/e2e/test_containers.py @@ -35,8 +35,7 @@ def container_uow() -> uow.ContainerUnitOfWork: @pytest.fixture -def publish(image_uow, - container_uow) -> tp.Callable[[messagebus.Message], None]: +def publish(image_uow, container_uow) -> tp.Callable[[messagebus.Message], None]: evt_handlers = { events.LayerCreated: [], events.ImageCreated: [], @@ -45,22 +44,26 @@ def publish(image_uow, events.ContainerStarted: [], events.LayerCreationFailed: [], events.DebugImageKept: [], - events.ImageCreationFailed: [] + events.ImageCreationFailed: [], } cmd_handlers = { - commands.CreateImage: - handlers.bootstrap(handlers.create_image, image_uow), - commands.CreateBenchbuildBase: - handlers.bootstrap(handlers.create_benchbuild_base, image_uow), - commands.RunProjectContainer: - handlers.bootstrap(handlers.run_project_container, container_uow), - commands.ExportImage: - handlers.bootstrap(handlers.export_image_handler, image_uow), - commands.ImportImage: - handlers.bootstrap(handlers.import_image_handler, image_uow), - commands.DeleteImage: - handlers.bootstrap(handlers.delete_image_handler, image_uow), + commands.CreateImage: handlers.bootstrap(handlers.create_image, image_uow), + commands.CreateBenchbuildBase: handlers.bootstrap( + handlers.create_benchbuild_base, image_uow + ), + commands.RunProjectContainer: handlers.bootstrap( + handlers.run_project_container, container_uow + ), + commands.ExportImage: handlers.bootstrap( + handlers.export_image_handler, image_uow + ), + commands.ImportImage: handlers.bootstrap( + handlers.import_image_handler, image_uow + ), + commands.DeleteImage: handlers.bootstrap( + handlers.delete_image_handler, image_uow + ), } yield partial(messagebus.handle, cmd_handlers, evt_handlers) @@ -76,34 +79,36 @@ def publish(image_uow, @pytest.fixture def true_image() -> decl.ContainerImage: - def prepare_container() -> None: - true_path = which('true').strip() - cp(true_path, 'true') - - return decl.ContainerImage() \ - .from_('scratch') \ - .context(prepare_container) \ - .workingdir('/') \ - .entrypoint('/true') + true_path = which("true").strip() + cp(true_path, "true") + + return ( + decl.ContainerImage() + .from_("scratch") + .context(prepare_container) + .workingdir("/") + .entrypoint("/true") + ) @pytest.fixture def no_entrypoint() -> decl.ContainerImage: - def prepare_container() -> None: - true_path = which('true').strip() - cp(true_path, 'true') - - return decl.ContainerImage() \ - .from_('scratch') \ - .context(prepare_container) \ - .workingdir('/') \ - .command('/true') + true_path = which("true").strip() + cp(true_path, "true") + + return ( + decl.ContainerImage() + .from_("scratch") + .context(prepare_container) + .workingdir("/") + .command("/true") + ) def test_image_creation(true_image, publish, image_uow) -> None: - name = 'benchbuild-e2e-test_image_creation' + name = "benchbuild-e2e-test_image_creation" maybe_image = image_uow.registry.find(name) assert maybe_image is None @@ -116,12 +121,12 @@ def test_image_creation(true_image, publish, image_uow) -> None: def test_image_run_no_args(true_image, publish) -> None: - name = 'benchbuild-e2e-test_image_run' + name = "benchbuild-e2e-test_image_run" cmd = commands.CreateImage(name, true_image) publish(cmd) - run_cmd_no_args = commands.RunProjectContainer(name, name, '/') + run_cmd_no_args = commands.RunProjectContainer(name, name, "/") try: publish(run_cmd_no_args) except ContainerCreateError: @@ -131,14 +136,12 @@ def test_image_run_no_args(true_image, publish) -> None: def test_image_run_args(true_image, publish) -> None: - name = 'benchbuild-e2e-test_image_run' + name = "benchbuild-e2e-test_image_run" cmd = commands.CreateImage(name, true_image) publish(cmd) - run_cmd_args = commands.RunProjectContainer( - name, name, '/', ('arg1', 'arg2') - ) + run_cmd_args = commands.RunProjectContainer(name, name, "/", ("arg1", "arg2")) try: publish(run_cmd_args) except ContainerCreateError: @@ -148,14 +151,12 @@ def test_image_run_args(true_image, publish) -> None: def test_interactive_without_entrypoint(no_entrypoint, publish, config) -> None: - name = 'benchbuild-e2e-test_interactive_without_entrypoint' + name = "benchbuild-e2e-test_interactive_without_entrypoint" cmd = commands.CreateImage(name, no_entrypoint) publish(cmd) - run_cmd_args = commands.RunProjectContainer( - name, name, '/', ('arg1', 'arg2') - ) + run_cmd_args = commands.RunProjectContainer(name, name, "/", ("arg1", "arg2")) config["container"]["interactive"] = True try: publish(run_cmd_args) diff --git a/tests/e2e/test_experiment_sampling.py b/tests/e2e/test_experiment_sampling.py index dba7b5428..df0f6451e 100644 --- a/tests/e2e/test_experiment_sampling.py +++ b/tests/e2e/test_experiment_sampling.py @@ -21,13 +21,12 @@ class NoopExtension(Extension): - def __call__(self, *args, **kwargs) -> tp.List[run.RunInfo]: return [run.RunInfo()] class SampleExperiment(bb.Experiment): - NAME = 'test-experiment-sampling' + NAME = "test-experiment-sampling" CONTAINER = ContainerImage() def actions_for_project(self, project): @@ -36,8 +35,7 @@ def actions_for_project(self, project): return self.default_runtime_actions(project) @classmethod - def sample(cls, - prj_cls: bb.project.ProjectT) -> tp.Sequence[source.Revision]: + def sample(cls, prj_cls: bb.project.ProjectT) -> tp.Sequence[source.Revision]: return source.enumerate_revisions(prj_cls)[:EXPECTED_COMMITS] @@ -45,9 +43,10 @@ class SampleProject(bb.Project): """ E2E Test project for experiment version sampling. """ - NAME = 'test-experiment-sampling' - DOMAIN = 'tests' - GROUP = 'tests' + + NAME = "test-experiment-sampling" + DOMAIN = "tests" + GROUP = "tests" SOURCE = [] CONTAINER = ContainerImage() @@ -71,20 +70,20 @@ def repo_with_submodule(mk_git_repo) -> ct.RepoT: def project_cls(repo_with_submodule): base_dir, repo = repo_with_submodule - build_dir = str(CFG['build_dir']) - tmp_dir = str(CFG['tmp_dir']) + build_dir = str(CFG["build_dir"]) + tmp_dir = str(CFG["tmp_dir"]) - CFG['build_dir'] = str(base_dir) - CFG['tmp_dir'] = str(base_dir) + CFG["build_dir"] = str(base_dir) + CFG["tmp_dir"] = str(base_dir) SampleProject.SOURCE = [ - Git(remote=repo.git_dir, local='test.git'), - GitSubmodule(remote=repo.submodules[0].url, local='test.git/sub.git') + Git(remote=repo.git_dir, local="test.git"), + GitSubmodule(remote=repo.submodules[0].url, local="test.git/sub.git"), ] yield SampleProject - CFG['build_dir'] = build_dir - CFG['tmp_dir'] = tmp_dir + CFG["build_dir"] = build_dir + CFG["tmp_dir"] = tmp_dir def test_experiment_can_sample(project_cls): @@ -92,16 +91,14 @@ def test_experiment_can_sample(project_cls): assert len(contexts) == EXPECTED_COMMITS -@mock.patch('tests.e2e.test_experiment_sampling.SampleExperiment.sample') +@mock.patch("tests.e2e.test_experiment_sampling.SampleExperiment.sample") def test_sampling_is_used_by_benchbuild_run(mocked_sample, project_cls): """ Check, if a generated plan has used the experiment's sample method. This is the path used by 'benchbuild run' """ - ngn = engine.Experimentator( - experiments=[SampleExperiment], projects=[project_cls] - ) + ngn = engine.Experimentator(experiments=[SampleExperiment], projects=[project_cls]) num_actions = ngn.num_actions assert num_actions > 0 @@ -109,10 +106,8 @@ def test_sampling_is_used_by_benchbuild_run(mocked_sample, project_cls): mocked_sample.assert_called_with(project_cls) -@mock.patch('tests.e2e.test_experiment_sampling.SampleExperiment.sample') -def test_sampling_is_used_by_benchbuild_slurm( - mocked_sample, tmp_path, project_cls -): +@mock.patch("tests.e2e.test_experiment_sampling.SampleExperiment.sample") +def test_sampling_is_used_by_benchbuild_slurm(mocked_sample, tmp_path, project_cls): """ Check, if a generated plan has used the experiment's sample method. @@ -121,6 +116,6 @@ def test_sampling_is_used_by_benchbuild_slurm( exp = SampleExperiment(projects=[project_cls]) with pb.local.cwd(str(tmp_path)): - slurm.script(exp, 'run') + slurm.script(exp, "run") mocked_sample.assert_called_with(project_cls) diff --git a/tests/e2e/test_git_submodules.py b/tests/e2e/test_git_submodules.py index 1a0dfc98e..81c93bb80 100644 --- a/tests/e2e/test_git_submodules.py +++ b/tests/e2e/test_git_submodules.py @@ -15,13 +15,12 @@ class NoopExtension(Extension): - def __call__(self, *args, **kwargs) -> tp.List[run.RunInfo]: return [run.RunInfo()] class ExperimentTest(bb.Experiment): - NAME = 'test-experiment' + NAME = "test-experiment" CONTAINER = ContainerImage() def actions_for_project(self, project): @@ -34,9 +33,10 @@ class GitSubmoduleTestProject(bb.Project): """ E2E Test for GitSubmodule """ - NAME = 'test-git-submodule' - DOMAIN = 'tests' - GROUP = 'tests' + + NAME = "test-git-submodule" + DOMAIN = "tests" + GROUP = "tests" SOURCE = [] CONTAINER = ContainerImage() @@ -51,20 +51,20 @@ def compile(self): def project_cls(repo_with_submodule): base_dir, repo = repo_with_submodule - build_dir = str(CFG['build_dir']) - tmp_dir = str(CFG['tmp_dir']) + build_dir = str(CFG["build_dir"]) + tmp_dir = str(CFG["tmp_dir"]) - CFG['build_dir'] = str(base_dir) - CFG['tmp_dir'] = str(base_dir) + CFG["build_dir"] = str(base_dir) + CFG["tmp_dir"] = str(base_dir) GitSubmoduleTestProject.SOURCE = [ - Git(remote=repo.git_dir, local='test.git'), - GitSubmodule(remote=repo.submodules[0].url, local='test.git/sub.git') + Git(remote=repo.git_dir, local="test.git"), + GitSubmodule(remote=repo.submodules[0].url, local="test.git/sub.git"), ] yield GitSubmoduleTestProject - CFG['build_dir'] = build_dir - CFG['tmp_dir'] = tmp_dir + CFG["build_dir"] = build_dir + CFG["tmp_dir"] = tmp_dir def test_project_creates_variants(project_cls): @@ -72,9 +72,7 @@ def test_project_creates_variants(project_cls): def test_project_environment_with_submodules(project_cls): - ngn = engine.Experimentator( - experiments=[ExperimentTest], projects=[project_cls] - ) + ngn = engine.Experimentator(experiments=[ExperimentTest], projects=[project_cls]) failed = ngn.start() diff --git a/tests/environments/domain/test_commands.py b/tests/environments/domain/test_commands.py index 095597fff..ce3e40dc8 100644 --- a/tests/environments/domain/test_commands.py +++ b/tests/environments/domain/test_commands.py @@ -1,21 +1,20 @@ """Test command validation & conversion.""" + from benchbuild.environments.domain import commands, declarative def test_image_name_is_lowercase(): - expected = ['test-1', 'test-2'] + expected = ["test-1", "test-2"] - cmd_1 = commands.CreateBenchbuildBase( - 'TEST-1', declarative.ContainerImage() - ) - cmd_2 = commands.CreateImage('TEST-2', declarative.ContainerImage()) + cmd_1 = commands.CreateBenchbuildBase("TEST-1", declarative.ContainerImage()) + cmd_2 = commands.CreateImage("TEST-2", declarative.ContainerImage()) assert [cmd_1.name, cmd_2.name] == expected def test_containerimage_name_is_lowercase(): - expected = ['test-1', 'containername1'] + expected = ["test-1", "containername1"] - cmd_1 = commands.RunProjectContainer('TEST-1', 'ContainerName1', '') + cmd_1 = commands.RunProjectContainer("TEST-1", "ContainerName1", "") assert [cmd_1.image, cmd_1.name] == expected diff --git a/tests/environments/domain/test_declarative.py b/tests/environments/domain/test_declarative.py index fc656287a..a7cddb34c 100644 --- a/tests/environments/domain/test_declarative.py +++ b/tests/environments/domain/test_declarative.py @@ -1,6 +1,7 @@ """ Test declarative API """ + from typing import Hashable from benchbuild.environments.domain import declarative as decl @@ -14,97 +15,114 @@ def test_container_image_default_is_false(): def test_container_image_layer_is_true(): img = decl.ContainerImage() - img.from_('base') + img.from_("base") assert bool(img) def test_container_image_can_model_from(): img = decl.ContainerImage() - img.from_('base') + img.from_("base") - assert img == [m.FromLayer('base')] + assert img == [m.FromLayer("base")] def test_container_image_can_model_add(): img = decl.ContainerImage() - img.add(('a',), 'c') + img.add(("a",), "c") - assert img == [m.AddLayer(('a',), 'c')] + assert img == [m.AddLayer(("a",), "c")] def test_container_image_can_model_copy(): img = decl.ContainerImage() - img.copy_(('a',), 'c') + img.copy_(("a",), "c") - assert img == [m.CopyLayer(('a',), 'c')] + assert img == [m.CopyLayer(("a",), "c")] def test_container_image_can_model_run(): img = decl.ContainerImage() - img.run('cmd', 'arg0', 'arg1') + img.run("cmd", "arg0", "arg1") - assert img == [m.RunLayer('cmd', ( - 'arg0', - 'arg1', - ), {})] + assert img == [ + m.RunLayer( + "cmd", + ( + "arg0", + "arg1", + ), + {}, + ) + ] def test_container_image_can_model_context(): img = decl.ContainerImage() - img.context('test') + img.context("test") - assert img == [m.ContextLayer('test')] + assert img == [m.ContextLayer("test")] def test_container_image_can_model_env(): img = decl.ContainerImage() - img.env(a='test') + img.env(a="test") - assert img == [m.UpdateEnv({'a': 'test'})] + assert img == [m.UpdateEnv({"a": "test"})] def test_container_image_can_model_workingdir(): img = decl.ContainerImage() - img.workingdir('test') + img.workingdir("test") - assert img == [m.WorkingDirectory('test')] + assert img == [m.WorkingDirectory("test")] def test_container_image_can_model_entrypoint(): img = decl.ContainerImage() - img.entrypoint('cmd', 'arg0', 'arg1') + img.entrypoint("cmd", "arg0", "arg1") - assert img == [m.EntryPoint(( - 'cmd', - 'arg0', - 'arg1', - ))] + assert img == [ + m.EntryPoint( + ( + "cmd", + "arg0", + "arg1", + ) + ) + ] def test_container_image_can_model_command(): img = decl.ContainerImage() - img.command('cmd', 'arg0', 'arg1') + img.command("cmd", "arg0", "arg1") - assert img == [m.SetCommand(( - 'cmd', - 'arg0', - 'arg1', - ))] + assert img == [ + m.SetCommand( + ( + "cmd", + "arg0", + "arg1", + ) + ) + ] def test_container_image_is_hashable(): - layers = decl.ContainerImage() \ - .from_('a') \ - .add(['a', 'b', 'c'], 'd') \ - .add(('a', 'b', 'c'), 'd') \ - .copy_(['a', 'b', 'c'], 'd') \ - .copy_(('a', 'b', 'c'), 'd') \ - .run('cmd', 'a', 'b', 'c', a='a', b='b', c='c') \ - .context(lambda: None).env(a='a', b='b', c='c') \ - .workingdir('a') \ - .entrypoint('a', 'b', 'c') \ - .command('a', 'b', 'c') - - img = m.Image('a', layers.base, layers) + layers = ( + decl.ContainerImage() + .from_("a") + .add(["a", "b", "c"], "d") + .add(("a", "b", "c"), "d") + .copy_(["a", "b", "c"], "d") + .copy_(("a", "b", "c"), "d") + .run("cmd", "a", "b", "c", a="a", b="b", c="c") + .context(lambda: None) + .env(a="a", b="b", c="c") + .workingdir("a") + .entrypoint("a", "b", "c") + .command("a", "b", "c") + ) + + img = m.Image("a", layers.base, layers) assert isinstance(img, Hashable) diff --git a/tests/environments/domain/test_model.py b/tests/environments/domain/test_model.py index 34b022bd9..6f746391f 100644 --- a/tests/environments/domain/test_model.py +++ b/tests/environments/domain/test_model.py @@ -1,28 +1,29 @@ """ Describe usage of our default container/image domain. """ + from typing import Hashable from benchbuild.environments.domain import model def test_layers_from_is_hashable(): - layer = model.FromLayer('a') + layer = model.FromLayer("a") assert isinstance(layer, Hashable) def test_layers_add_is_hashable(): - layer = model.AddLayer(('a', 'b', 'c'), 'd') + layer = model.AddLayer(("a", "b", "c"), "d") assert isinstance(layer, Hashable) def test_layers_copy_is_hashable(): - layer = model.CopyLayer(('a', 'b', 'c'), 'd') + layer = model.CopyLayer(("a", "b", "c"), "d") assert isinstance(layer, Hashable) def test_layers_run_is_hashable(): - layer = model.RunLayer('cmd', ('a', 'b', 'c'), dict(a='a', b='b', c='c')) + layer = model.RunLayer("cmd", ("a", "b", "c"), dict(a="a", b="b", c="c")) assert isinstance(layer, Hashable) @@ -32,65 +33,57 @@ def test_layers_context_is_hashable(): def test_layers_env_is_hashable(): - layer = model.UpdateEnv(dict(a='a', b='b', c='c')) + layer = model.UpdateEnv(dict(a="a", b="b", c="c")) assert isinstance(layer, Hashable) def test_layers_workdir_is_hashable(): - layer = model.WorkingDirectory('a') + layer = model.WorkingDirectory("a") assert isinstance(layer, Hashable) def test_layers_entrypoint_is_hashable(): - layer = model.EntryPoint(('a', 'b', 'c')) + layer = model.EntryPoint(("a", "b", "c")) assert isinstance(layer, Hashable) def test_layers_cmd_is_hashable(): - layer = model.SetCommand(('a', 'b', 'c')) + layer = model.SetCommand(("a", "b", "c")) assert isinstance(layer, Hashable) def test_image_image_requires_name_and_base(): - img = model.Image('name', model.FromLayer('base'), []) - assert img.name == 'name' - assert img.from_ == model.FromLayer('base') + img = model.Image("name", model.FromLayer("base"), []) + assert img.name == "name" + assert img.from_ == model.FromLayer("base") assert len(img.layers) == 0 def test_image_can_append_layers_to_image(): - img = model.Image('-', model.FromLayer('-'), [model.FromLayer('base')]) - img.append(model.WorkingDirectory('abc')) + img = model.Image("-", model.FromLayer("-"), [model.FromLayer("base")]) + img.append(model.WorkingDirectory("abc")) - assert img.layers == [ - model.FromLayer('base'), - model.WorkingDirectory('abc') - ] + assert img.layers == [model.FromLayer("base"), model.WorkingDirectory("abc")] def test_image_can_prepend_layers_to_image(): - img = model.Image( - '-', model.FromLayer('-'), [model.WorkingDirectory('abc')] - ) - img.prepend(model.FromLayer('base')) - - assert img.layers == [ - model.FromLayer('base'), - model.WorkingDirectory('abc') - ] + img = model.Image("-", model.FromLayer("-"), [model.WorkingDirectory("abc")]) + img.prepend(model.FromLayer("base")) + + assert img.layers == [model.FromLayer("base"), model.WorkingDirectory("abc")] def test_image_is_hashable(): layers = [ - model.FromLayer('a'), - model.AddLayer(('a', 'b', 'c'), 'd'), - model.CopyLayer(('a', 'b', 'c'), 'd'), - model.RunLayer('cmd', ('a', 'b', 'c'), dict(a='a', b='b', c='c')), + model.FromLayer("a"), + model.AddLayer(("a", "b", "c"), "d"), + model.CopyLayer(("a", "b", "c"), "d"), + model.RunLayer("cmd", ("a", "b", "c"), dict(a="a", b="b", c="c")), model.ContextLayer(lambda: None), - model.UpdateEnv(dict(a='a', b='b', c='c')), - model.WorkingDirectory('a'), - model.EntryPoint(('a', 'b', 'c')), - model.SetCommand(('a', 'b', 'c')) + model.UpdateEnv(dict(a="a", b="b", c="c")), + model.WorkingDirectory("a"), + model.EntryPoint(("a", "b", "c")), + model.SetCommand(("a", "b", "c")), ] - img = model.Image('-', model.FromLayer('-'), layers) + img = model.Image("-", model.FromLayer("-"), layers) assert isinstance(img, Hashable) diff --git a/tests/environments/entrypoints/test_cli.py b/tests/environments/entrypoints/test_cli.py index 474307ecd..51f767291 100644 --- a/tests/environments/entrypoints/test_cli.py +++ b/tests/environments/entrypoints/test_cli.py @@ -1,6 +1,7 @@ """ Test declarative API """ + from benchbuild.environments.entrypoints import cli from benchbuild.experiments.empty import Empty from tests.project.test_project import DummyPrj, DummyPrjNoContainerImage @@ -8,10 +9,10 @@ def test_cli_enumerates_only_supported_projects(): prj_index = { - 'TestPrj/TestGrp': DummyPrj, - 'TestPrjNoContainer/TestGrp': DummyPrjNoContainerImage + "TestPrj/TestGrp": DummyPrj, + "TestPrjNoContainer/TestGrp": DummyPrjNoContainerImage, } - exp_index = {'empty': Empty} + exp_index = {"empty": Empty} prjs = list(cli.enumerate_projects(exp_index, prj_index)) assert len(prjs) == 1 diff --git a/tests/experiments/test_discovery.py b/tests/experiments/test_discovery.py index 4fcadae8c..7c6c0eefb 100644 --- a/tests/experiments/test_discovery.py +++ b/tests/experiments/test_discovery.py @@ -5,21 +5,27 @@ def test_discovery(caplog): - caplog.set_level(logging.DEBUG, logger='benchbuild') - CFG['plugins']['projects'] = [] + caplog.set_level(logging.DEBUG, logger="benchbuild") + CFG["plugins"]["projects"] = [] CFG["plugins"]["experiments"] = ["benchbuild.non_existing"] discover() assert caplog.record_tuples == [ - ('benchbuild.plugins', logging.ERROR, - "Could not find 'benchbuild.non_existing'"), - ('benchbuild.plugins', logging.DEBUG, - "ImportError: No module named 'benchbuild.non_existing'"), + ( + "benchbuild.plugins", + logging.ERROR, + "Could not find 'benchbuild.non_existing'", + ), + ( + "benchbuild.plugins", + logging.DEBUG, + "ImportError: No module named 'benchbuild.non_existing'", + ), ] - def_exps = CFG['plugins']['experiments'].node['default'] - CFG['plugins']['experiments'] = def_exps + def_exps = CFG["plugins"]["experiments"].node["default"] + CFG["plugins"]["experiments"] = def_exps - def_prjs = CFG['plugins']['projects'].node['default'] - CFG['plugins']['projects'] = def_prjs + def_prjs = CFG["plugins"]["projects"].node["default"] + CFG["plugins"]["projects"] = def_prjs discover() diff --git a/tests/integration/test_cli_slurm.py b/tests/integration/test_cli_slurm.py index f5d30c59c..3e38f9a30 100644 --- a/tests/integration/test_cli_slurm.py +++ b/tests/integration/test_cli_slurm.py @@ -9,19 +9,18 @@ @pytest.fixture def cmd_mock() -> tp.Callable[[str], None]: - def _cmd_mock(name: str): - cmd.__overrides__[name] = ['/bin/true'] + cmd.__overrides__[name] = ["/bin/true"] yield _cmd_mock cmd.__overrides__ = {} def test_slurm_command(tmp_path, cmd_mock): - cmd_mock('srun') + cmd_mock("srun") with pytest.raises(SystemExit) as pytest_wrapped_e: with local.cwd(tmp_path): - Slurm.run(argv=['slurm', '-E', 'empty', 'test']) + Slurm.run(argv=["slurm", "-E", "empty", "test"]) assert pytest_wrapped_e.type == SystemExit assert pytest_wrapped_e.value.code == 0 diff --git a/tests/project/test_project.py b/tests/project/test_project.py index 6138e6c47..1c602dced 100644 --- a/tests/project/test_project.py +++ b/tests/project/test_project.py @@ -25,46 +25,47 @@ class DummyPrj(Project): - NAME = 'TestPrj' - DOMAIN = 'TestDom' - GROUP = 'TestGrp' + NAME = "TestPrj" + DOMAIN = "TestDom" + GROUP = "TestGrp" SOURCE = [nosource()] - CONTAINER = declarative.ContainerImage().from_('benchbuild:alpine') + CONTAINER = declarative.ContainerImage().from_("benchbuild:alpine") def run_tests(self): raise NotImplementedError() class DummyPrjEmptySource(Project): - NAME = 'TestPrj' - DOMAIN = 'TestDom' - GROUP = 'TestGrp' + NAME = "TestPrj" + DOMAIN = "TestDom" + GROUP = "TestGrp" SOURCE = [] - CONTAINER = declarative.ContainerImage().from_('benchbuild:alpine') + CONTAINER = declarative.ContainerImage().from_("benchbuild:alpine") def run_tests(self): raise NotImplementedError() class DummyPrjNoContainerImage(Project): - NAME = 'TestPrjNoContainer' - DOMAIN = 'TestDom' - GROUP = 'TestGrp' + NAME = "TestPrjNoContainer" + DOMAIN = "TestDom" + GROUP = "TestGrp" SOURCE = [nosource()] def run_tests(self): raise NotImplementedError() -@pytest.fixture(params=[['1'], ['1', '2']], ids=['single', 'multi']) +@pytest.fixture(params=[["1"], ["1", "2"]], ids=["single", "multi"]) def mksource(request) -> tp.Callable[[str], FetchableSource]: - class VersionSource(FetchableSource): test_versions: tp.List[str] def __init__( - self, local: str, remote: tp.Union[str, tp.Dict[str, str]], - test_versions: tp.List[str] + self, + local: str, + remote: tp.Union[str, tp.Dict[str, str]], + test_versions: tp.List[str], ): super().__init__(local, remote) @@ -80,24 +81,20 @@ def version(self, target_dir, version): def versions(self): return [Variant(self, version) for version in self.test_versions] - def source_factory(name: str = 'VersionSource') -> FetchableSource: + def source_factory(name: str = "VersionSource") -> FetchableSource: cls = type(name, (VersionSource,), {}) - return cls(local=name, remote='', test_versions=request.param) + return cls(local=name, remote="", test_versions=request.param) return source_factory @pytest.fixture def mkproject(mksource): - - def project_factory(name: str = 'VersionedProject', num_sources: int = 1): - + def project_factory(name: str = "VersionedProject", num_sources: int = 1): class VersionedProject(Project): NAME = DOMAIN = GROUP = name - SOURCE = [ - mksource(f'VersionSource_{i}') for i in range(num_sources) - ] - CONTAINER = declarative.ContainerImage().from_('benchbuild:alpine') + SOURCE = [mksource(f"VersionSource_{i}") for i in range(num_sources)] + CONTAINER = declarative.ContainerImage().from_("benchbuild:alpine") return VersionedProject @@ -114,22 +111,24 @@ class TI: @pytest.fixture( params=[ - TI(__add_single_filter__, "1", "ni", '1'), - TI(__add_indexed_filters__, ["1"], ["ni"], '1'), + TI(__add_single_filter__, "1", "ni", "1"), + TI(__add_indexed_filters__, ["1"], ["ni"], "1"), TI( - __add_named_filters__, {"VersionSource_0": "1"}, - {"VersionSource_0": "ni"}, '1' + __add_named_filters__, + {"VersionSource_0": "1"}, + {"VersionSource_0": "ni"}, + "1", ), ], - ids=['single-1', 'indexed-1', 'named-1'] + ids=["single-1", "indexed-1", "named-1"], ) def filter_test(request): return request.param @pytest.fixture( - params=[('SingleSource', 1), ('MultiSource', 2)], - ids=['SingleSource', 'MultiSource'] + params=[("SingleSource", 1), ("MultiSource", 2)], + ids=["SingleSource", "MultiSource"], ) def project(mkproject, request): name, num = request.param @@ -142,21 +141,19 @@ def project_instance(project): def test_project_has_version_of(project_instance): - assert hasattr(project_instance, 'version_of_primary') + assert hasattr(project_instance, "version_of_primary") def test_project_has_source_of(project_instance): - assert hasattr(project_instance, 'source_of_primary') + assert hasattr(project_instance, "source_of_primary") def test_project_has_version_of_primary(project_instance): - assert project_instance.version_of_primary == '1' + assert project_instance.version_of_primary == "1" def test_project_has_source_of_primary(project_instance): - assert local.path( - project_instance.source_of_primary - ).name == 'VersionSource_0' + assert local.path(project_instance.source_of_primary).name == "VersionSource_0" def test_project_source_must_contain_elements(): @@ -193,19 +190,19 @@ def test_filters_matches_return_only_one_version(project, filter_test): # pylin def test_filters_named_unchanged_if_unmatched(project): # pylint: disable=unused-variable - um_filter = {'not-in': '1'} + um_filter = {"not-in": "1"} filtered = __add_named_filters__(project, um_filter) for src in filtered.SOURCE: assert not isinstance(src, SingleVersionFilter) -@pytest.fixture(scope='class', params=discovered().values()) +@pytest.fixture(scope="class", params=discovered().values()) def prj_cls(request): return request.param def test_default_projects_containerimage_is_optional_on_subclass(prj_cls): - if not hasattr(prj_cls, 'CONTAINER'): + if not hasattr(prj_cls, "CONTAINER"): prj = prj_cls() assert prj.container is not None diff --git a/tests/settings/test_configuration.py b/tests/settings/test_configuration.py index ffc479018..94487d3eb 100644 --- a/tests/settings/test_configuration.py +++ b/tests/settings/test_configuration.py @@ -11,104 +11,102 @@ @pytest.fixture def bb(): - yield Configuration('bb') + yield Configuration("bb") def test_inner_nodes_dict_can_be_inner_node(): - assert _INNER_NODE_VALUE.is_valid({'default': 0, 'desc': 'a'}) + assert _INNER_NODE_VALUE.is_valid({"default": 0, "desc": "a"}) def test_inner_nodes_dict_can_be_nested_once(): - assert _INNER_NODE_SCHEMA.is_valid({'a': {'default': 0, 'desc': 'a'}}) + assert _INNER_NODE_SCHEMA.is_valid({"a": {"default": 0, "desc": "a"}}) def test_inner_nodes_inner_node_value_can_be_assigned(bb): - bb['a'] = {'default': 0, 'desc': 'a'} + bb["a"] = {"default": 0, "desc": "a"} assert _INNER_NODE_SCHEMA.is_valid(bb.node) def test_inner_nodes_inner_node_needs_to_be_initialized(): - cfg = Configuration('fresh') - cfg['a'] = {'default': 0, 'desc': 'a'} + cfg = Configuration("fresh") + cfg["a"] = {"default": 0, "desc": "a"} - assert not hasattr(cfg['a'].node, 'value') + assert not hasattr(cfg["a"].node, "value") cfg.init_from_env() - assert hasattr(cfg['a'], 'value') + assert hasattr(cfg["a"], "value") def test_simple_construction(bb): """Test simple construction.""" - bb['test'] = 42 - assert repr(bb['test']) == 'BB_TEST=42' - assert str(bb['test']) == '42' - assert type(bb['test']) == Configuration + bb["test"] = 42 + assert repr(bb["test"]) == "BB_TEST=42" + assert str(bb["test"]) == "42" + assert type(bb["test"]) == Configuration def test_value(bb): """Test value retrieval.""" - bb['x'] = {"y": {"value": None}, "z": {"value": 2}} - assert bb['x']['y'].value == None - assert bb['x']['z'].value == 2 - assert repr(bb['x'].value) == "BB_X_Y=null\nBB_X_Z=2" + bb["x"] = {"y": {"value": None}, "z": {"value": 2}} + assert bb["x"]["y"].value == None + assert bb["x"]["z"].value == 2 + assert repr(bb["x"].value) == "BB_X_Y=null\nBB_X_Z=2" def test_append_to_list(bb): - bb['t'] = [] - assert repr(bb['t']) == 'BB_T="[]"' + bb["t"] = [] + assert repr(bb["t"]) == 'BB_T="[]"' - bb['t'] += 'a' - assert repr(bb['t']) == 'BB_T="[a]"' + bb["t"] += "a" + assert repr(bb["t"]) == 'BB_T="[a]"' def test_append_to_scalar(bb): - bb['t'] = 0 - assert repr(bb['t']) == "BB_T=0" + bb["t"] = 0 + assert repr(bb["t"]) == "BB_T=0" with pytest.raises(TypeError): - bb['t'] += 2 + bb["t"] += 2 def test_conversion_to_int(bb): - bb['i'] = 1 - assert int(bb['i']) == 1 + bb["i"] = 1 + assert int(bb["i"]) == 1 - bb['d'] = [] + bb["d"] = [] with pytest.raises(TypeError): - int(bb['d']) + int(bb["d"]) def test_conversion_to_bool(bb): - bb['b'] = True - assert bool(bb['b']) == True + bb["b"] = True + assert bool(bb["b"]) == True - bb['b'] = [] - assert bool(bb['b']) == False + bb["b"] = [] + assert bool(bb["b"]) == False def test_representation(bb): - bb['int'] = {'default': 3} - assert repr(bb['int']) == 'BB_INT=3' + bb["int"] = {"default": 3} + assert repr(bb["int"]) == "BB_INT=3" - bb['str'] = {'default': 'test'} - assert repr(bb['str']) == 'BB_STR=test' + bb["str"] = {"default": "test"} + assert repr(bb["str"]) == "BB_STR=test" - bb['bool'] = {'default': True} - assert repr(bb['bool']) == 'BB_BOOL=true' + bb["bool"] = {"default": True} + assert repr(bb["bool"]) == "BB_BOOL=true" - bb['dict'] = {'default': {'test': True}} - assert repr(bb['dict']) == 'BB_DICT="{test: true}"' + bb["dict"] = {"default": {"test": True}} + assert repr(bb["dict"]) == 'BB_DICT="{test: true}"' - bb['uuid'] = {'default': uuid.UUID('cc3702ca-699a-4aa6-8226-4c938f294d9b')} - assert repr(bb['uuid']) == 'BB_UUID=cc3702ca-699a-4aa6-8226-4c938f294d9b' + bb["uuid"] = {"default": uuid.UUID("cc3702ca-699a-4aa6-8226-4c938f294d9b")} + assert repr(bb["uuid"]) == "BB_UUID=cc3702ca-699a-4aa6-8226-4c938f294d9b" - bb['nested_uuid'] = { - 'A': { - 'default': { - 'a': uuid.UUID('cc3702ca-699a-4aa6-8226-4c938f294d9b') - } - } + bb["nested_uuid"] = { + "A": {"default": {"a": uuid.UUID("cc3702ca-699a-4aa6-8226-4c938f294d9b")}} } - assert repr(bb['nested_uuid']['A'].value) == \ - 'BB_NESTED_UUID_A="{a: cc3702ca-699a-4aa6-8226-4c938f294d9b}"' + assert ( + repr(bb["nested_uuid"]["A"].value) + == 'BB_NESTED_UUID_A="{a: cc3702ca-699a-4aa6-8226-4c938f294d9b}"' + ) diff --git a/tests/settings/test_path.py b/tests/settings/test_path.py index cb5cc65bd..06eaca2ea 100644 --- a/tests/settings/test_path.py +++ b/tests/settings/test_path.py @@ -8,8 +8,7 @@ import pytest import yaml -from benchbuild.utils.settings import (ConfigPath, path_constructor, - path_representer) +from benchbuild.utils.settings import ConfigPath, path_constructor, path_representer def test_root_path(): @@ -75,4 +74,4 @@ def test_path_validation(capsys, tmp_path, monkeypatch): captured = capsys.readouterr() # Should print the path requirement message - assert "is required by your configuration." in captured.out + assert "is required by your configuration." in captured.out \ No newline at end of file diff --git a/tests/settings/test_yaml.py b/tests/settings/test_yaml.py index 2c81129fe..7fb2596fc 100644 --- a/tests/settings/test_yaml.py +++ b/tests/settings/test_yaml.py @@ -1,6 +1,7 @@ """ Test YAML functions from benchbuild's settings module. """ + import unittest import uuid @@ -15,16 +16,18 @@ class FakeLoader(yaml.SafeLoader): """FaketLoader for unit-testing.""" + pass class FakeDumper(yaml.SafeDumper): """FakeDumper for unit-testing.""" + pass -TEST_UUID = 'cc3702ca-699a-4aa6-8226-4c938f294d9b' -EXPECTED_UUID_OBJ = {'test': uuid.UUID(TEST_UUID)} +TEST_UUID = "cc3702ca-699a-4aa6-8226-4c938f294d9b" +EXPECTED_UUID_OBJ = {"test": uuid.UUID(TEST_UUID)} EXPECTED_UUID_SCALAR = "test: !uuid 'cc3702ca-699a-4aa6-8226-4c938f294d9b'\n" UUID_SCALAR = "{{'test': !uuid '{uuid}'}}".format(uuid=TEST_UUID) UUID_OUT = "{{test: {uuid}}}".format(uuid=TEST_UUID) @@ -36,18 +39,16 @@ class TestUUID(unittest.TestCase): def test_uuid_resolver(self): """Test dump and load of uuid objects.""" - uuid_in = {'test': uuid.UUID(TEST_UUID)} + uuid_in = {"test": uuid.UUID(TEST_UUID)} yaml.add_representer(uuid.UUID, uuid_representer, Dumper=FakeDumper) uuid_add_implicit_resolver(loader=FakeLoader, dumper=FakeDumper) self.assertEqual( yaml.dump(uuid_in, Dumper=FakeDumper), - 'test: cc3702ca-699a-4aa6-8226-4c938f294d9b\n' - ) - self.assertEqual( - yaml.load(UUID_OUT, Loader=FakeLoader), EXPECTED_UUID_OBJ + "test: cc3702ca-699a-4aa6-8226-4c938f294d9b\n", ) + self.assertEqual(yaml.load(UUID_OUT, Loader=FakeLoader), EXPECTED_UUID_OBJ) def test_uuid_construction(self): """Test uuid construction from scalar YAML nodes.""" @@ -58,9 +59,5 @@ def test_uuid_construction(self): def test_uuid_representer(self): """Test uuid representation as a scalar YAML node.""" - yaml.add_representer( - uuid.UUID, uuid_representer, Dumper=yaml.SafeDumper - ) - self.assertEqual( - yaml.safe_dump(EXPECTED_UUID_OBJ), EXPECTED_UUID_SCALAR - ) + yaml.add_representer(uuid.UUID, uuid_representer, Dumper=yaml.SafeDumper) + self.assertEqual(yaml.safe_dump(EXPECTED_UUID_OBJ), EXPECTED_UUID_SCALAR) diff --git a/tests/source/conftest.py b/tests/source/conftest.py index 83c1547a8..99a6e7bec 100644 --- a/tests/source/conftest.py +++ b/tests/source/conftest.py @@ -10,8 +10,10 @@ class VersionSource(FetchableSource): known_versions: tp.List[str] def __init__( - self, local: str, remote: tp.Union[str, tp.Dict[str, str]], - known_versions: tp.List[str] + self, + local: str, + remote: tp.Union[str, tp.Dict[str, str]], + known_versions: tp.List[str], ): super().__init__(local, remote) @@ -22,10 +24,10 @@ def fetch(self) -> pb.LocalPath: @property def default(self) -> Variant: - return Variant(owner=self, version='1') + return Variant(owner=self, version="1") def version(self, target_dir: str, version: str) -> pb.LocalPath: - return '.' + return "." def versions(self) -> tp.List[Variant]: return [Variant(self, str(v)) for v in self.known_versions] @@ -41,7 +43,7 @@ def make_source(): def _make_version_source(versions: tp.List[int]): str_versions = [str(v) for v in versions] - return VersionSource('ls', 'rs', str_versions) + return VersionSource("ls", "rs", str_versions) return _make_version_source @@ -80,7 +82,6 @@ class Config0(CAWSource): """ def versions_with_context(self, ctx: Revision) -> tp.Sequence[Variant]: - if ctx.primary.version == "0": ret = [Variant(self, "v0.1"), Variant(self, "v0.2")] return ret @@ -95,7 +96,6 @@ class Config1(CAWSource): """ def versions_with_context(self, ctx: Revision) -> tp.Sequence[Variant]: - if ctx.primary.version == "1": ret = [Variant(self, "v1.1"), Variant(self, "v1.2")] return ret @@ -104,9 +104,9 @@ def versions_with_context(self, ctx: Revision) -> tp.Sequence[Variant]: @pytest.fixture def caw_src_0() -> FetchableSource: - return Config0(local='local', remote='remote') + return Config0(local="local", remote="remote") @pytest.fixture def caw_src_1() -> FetchableSource: - return Config1(local='local', remote='remote') + return Config1(local="local", remote="remote") diff --git a/tests/source/test_caw_source.py b/tests/source/test_caw_source.py index f804ac75f..26b1998ce 100644 --- a/tests/source/test_caw_source.py +++ b/tests/source/test_caw_source.py @@ -13,6 +13,7 @@ class TestProject(Project): """Test project that does nothing.""" + NAME = "test" DOMAIN = "test" GROUP = "test" @@ -34,14 +35,10 @@ def test_enumerate_output(make_source, caw_src_0, caw_src_1): TestProject.SOURCE = [src_primary, caw_src_0, caw_src_1] expected_variants = [ - [Variant(src_primary, "0"), - Variant(caw_src_0, "v0.1")], - [Variant(src_primary, "0"), - Variant(caw_src_0, "v0.2")], - [Variant(src_primary, "1"), - Variant(caw_src_1, "v1.1")], - [Variant(src_primary, "1"), - Variant(caw_src_1, "v1.2")], + [Variant(src_primary, "0"), Variant(caw_src_0, "v0.1")], + [Variant(src_primary, "0"), Variant(caw_src_0, "v0.2")], + [Variant(src_primary, "1"), Variant(caw_src_1, "v1.1")], + [Variant(src_primary, "1"), Variant(caw_src_1, "v1.2")], ] revs = enumerate_revisions(TestProject) @@ -59,12 +56,12 @@ def test_caw_filter(make_source, caw_src_0): TestProject.SOURCE = [src_primary, v_filter] revs = enumerate_revisions(TestProject) - assert v_filter.is_context_free() is False, \ - "is_context_free needs to be delegated to child." + assert v_filter.is_context_free() is False, ( + "is_context_free needs to be delegated to child." + ) expected_variants = [ - [Variant(src_primary, "0"), - Variant(v_filter, "v0.2")], + [Variant(src_primary, "0"), Variant(v_filter, "v0.2")], [Variant(src_primary, "1")], ] @@ -80,17 +77,17 @@ def test_source_mapping(make_source, caw_src_0): src_secondary = make_source(["s1", "s2"]) TestProject.SOURCE = [src_primary, caw_src_0, src_secondary] - with patch.dict( - ProjectRegistry.projects, {"test/test": TestProject}, clear=True - ): + with patch.dict(ProjectRegistry.projects, {"test/test": TestProject}, clear=True): res: tp.Mapping[str, tp.Type[Project]] = populate(["test/test"]) assert res["test/test"] == TestProject expected_revisions = [ str( Revision( - TestProject, Variant(src_primary, "0"), - Variant(caw_src_0, "v0.1"), Variant(src_secondary, "s1") + TestProject, + Variant(src_primary, "0"), + Variant(caw_src_0, "v0.1"), + Variant(src_secondary, "s1"), ) ) ] diff --git a/tests/source/test_source.py b/tests/source/test_source.py index 60d6d7627..1ac2c691e 100644 --- a/tests/source/test_source.py +++ b/tests/source/test_source.py @@ -15,8 +15,10 @@ class SimpleSource(FetchableSource): test_versions: tp.List[str] = attr.ib() def __init__( - self, local: str, remote: tp.Union[str, tp.Dict[str, str]], - test_versions: tp.List[str] + self, + local: str, + remote: tp.Union[str, tp.Dict[str, str]], + test_versions: tp.List[str], ): super().__init__(local, remote) @@ -27,7 +29,7 @@ def default(self) -> Variant: return Variant(owner=self, version=self.test_versions[0]) def version(self, target_dir: str, version: str) -> pb.LocalPath: - return pb.local.path('.') / f'bb-test-{version}' + return pb.local.path(".") / f"bb-test-{version}" def versions(self) -> tp.Sequence[Variant]: return [Variant(self, v) for v in self.test_versions] @@ -46,14 +48,14 @@ def versions_b(): @pytest.fixture def src_a(versions_a): return SimpleSource( - local='src_A_local', remote='src_A_remote', test_versions=versions_a + local="src_A_local", remote="src_A_remote", test_versions=versions_a ) @pytest.fixture def src_b(versions_b): return SimpleSource( - local='src_B_local', remote='src_B_remote', test_versions=versions_b + local="src_B_local", remote="src_B_remote", test_versions=versions_b ) @@ -66,9 +68,7 @@ def test_revision_from_str_can_link_revision_from_single() -> None: rev = source.revision_from_str(select, prj_cls) assert rev, "No revision has been created." - assert rev.source_by_name( - expected - ).local == src.local, "wrong source in revision." + assert rev.source_by_name(expected).local == src.local, "wrong source in revision." def test_revision_from_str_can_select_revision_from_multiple() -> None: @@ -106,17 +106,13 @@ def test_revision_from_str_finds_all_requested_revisions(src_a, src_b) -> None: assert rev, "No context has been created." - assert rev.source_by_name( - expected - ).local == src.local, "src not in context." + assert rev.source_by_name(expected).local == src.local, "src not in context." with pytest.raises(KeyError): - assert rev.source_by_name( - not_expected - ) == caw_src, "caw_src not in context." + assert rev.source_by_name(not_expected) == caw_src, "caw_src not in context." -#def test_base_context(src_a): +# def test_base_context(src_a): # var = src_a.default # ctx = source.context(var) # @@ -175,19 +171,19 @@ def test_single_versions_filter(make_source): src_1 = make_source([0]) src_2 = make_source(range(2)) - src = source.SingleVersionFilter(src_1, '0') + src = source.SingleVersionFilter(src_1, "0") src_vs = [str(v) for v in src.versions()] - assert ['0'] == src_vs + assert ["0"] == src_vs - src = source.SingleVersionFilter(src_2, '-1') + src = source.SingleVersionFilter(src_2, "-1") src_vs = [str(v) for v in src.versions()] assert [] == src_vs - src = source.SingleVersionFilter(src_2, '1') + src = source.SingleVersionFilter(src_2, "1") src_vs = [str(v) for v in src.versions()] - assert ['1'] == src_vs + assert ["1"] == src_vs - src = source.SingleVersionFilter(src_2, '2') + src = source.SingleVersionFilter(src_2, "2") src_vs = [str(v) for v in src.versions()] assert [] == src_vs @@ -200,74 +196,68 @@ def test_explore_with_filter(make_source): """ src_1 = make_source(range(2)) - src = source.SingleVersionFilter(src_1, '0') + src = source.SingleVersionFilter(src_1, "0") src_vs = [str(v) for v in src.versions()] src_explore = [str(v) for v in src.explore()] - assert ['0'] == src_vs - assert ['0', '1'] == src_explore + assert ["0"] == src_vs + assert ["0", "1"] == src_explore def test_git_submodule_versions_do_not_get_expanded(): - git_repo = source.Git('remote.git', 'local.git', clone=False) - git_repo.versions = mock.MagicMock(name='versions') - git_repo.versions.return_value = ['1', '2', '3'] + git_repo = source.Git("remote.git", "local.git", clone=False) + git_repo.versions = mock.MagicMock(name="versions") + git_repo.versions.return_value = ["1", "2", "3"] - git_repo_sub = source.GitSubmodule( - 'remote.sub.git', 'local.git/sub', clone=False - ) - git_repo_sub.versions = mock.MagicMock(name='versions') - git_repo_sub.versions.return_value = ['sub1', 'sub2', 'sub3'] + git_repo_sub = source.GitSubmodule("remote.sub.git", "local.git/sub", clone=False) + git_repo_sub.versions = mock.MagicMock(name="versions") + git_repo_sub.versions.return_value = ["sub1", "sub2", "sub3"] variants = list(source.product(git_repo, git_repo_sub)) - expected_variants = [('1',), ('2',), ('3',)] + expected_variants = [("1",), ("2",), ("3",)] assert variants == expected_variants -@mock.patch('benchbuild.source.git.base.target_prefix') +@mock.patch("benchbuild.source.git.base.target_prefix") def test_git_repo_can_be_fetched(mocked_prefix, simple_repo): base_dir, repo = simple_repo mocked_prefix.return_value = str(base_dir) - a_repo = source.Git(remote=repo.git_dir, local='test.git') + a_repo = source.Git(remote=repo.git_dir, local="test.git") cache_path = a_repo.fetch() - assert (base_dir / 'test.git').exists() - assert cache_path == str(base_dir / 'test.git') + assert (base_dir / "test.git").exists() + assert cache_path == str(base_dir / "test.git") -@mock.patch('benchbuild.source.git.base.target_prefix') +@mock.patch("benchbuild.source.git.base.target_prefix") def test_git_repo_clones_submodules(mocked_prefix, repo_with_submodule): base_dir, repo = repo_with_submodule mocked_prefix.return_value = str(base_dir) - a_repo = source.Git(remote=repo.git_dir, local='test.git') + a_repo = source.Git(remote=repo.git_dir, local="test.git") a_repo.fetch() for submodule in repo.submodules: - expected_sm_path = base_dir / 'test.git' / submodule.path + expected_sm_path = base_dir / "test.git" / submodule.path assert expected_sm_path.exists() assert expected_sm_path.list() != [] -@mock.patch('benchbuild.source.git.base.target_prefix') -def test_git_submodule_can_be_fetched_outside_main( - mocked_prefix, repo_with_submodule -): +@mock.patch("benchbuild.source.git.base.target_prefix") +def test_git_submodule_can_be_fetched_outside_main(mocked_prefix, repo_with_submodule): base_dir, repo = repo_with_submodule mocked_prefix.return_value = str(base_dir) - a_repo = source.Git(remote=repo.git_dir, local='test.git') + a_repo = source.Git(remote=repo.git_dir, local="test.git") a_repo.fetch() for submodule in repo.submodules: - expected_sub_path_outside = base_dir / 'outside_main.git' - expected_sub_path_inside = base_dir / 'test.git' / submodule.path + expected_sub_path_outside = base_dir / "outside_main.git" + expected_sub_path_inside = base_dir / "test.git" / submodule.path - a_sub_repo = source.GitSubmodule( - remote=submodule.url, local='outside_main.git' - ) + a_sub_repo = source.GitSubmodule(remote=submodule.url, local="outside_main.git") a_sub_repo.fetch() assert expected_sub_path_outside.exists() @@ -277,24 +267,22 @@ def test_git_submodule_can_be_fetched_outside_main( assert expected_sub_path_inside.list() != [] -@mock.patch('benchbuild.source.git.base.target_prefix') +@mock.patch("benchbuild.source.git.base.target_prefix") def test_git_submodule_can_be_fetched_inside_fetched_main( mocked_prefix, repo_with_submodule ): base_dir, repo = repo_with_submodule mocked_prefix.return_value = str(base_dir) - a_repo = source.Git(remote=repo.git_dir, local='test.git') + a_repo = source.Git(remote=repo.git_dir, local="test.git") a_repo.fetch() for submodule in repo.submodules: - expected_sub_path = base_dir / 'test.git' / submodule.path - expected_flat_sub_path = base_dir / f'test.git-{submodule.path}' + expected_sub_path = base_dir / "test.git" / submodule.path + expected_flat_sub_path = base_dir / f"test.git-{submodule.path}" - sub_path = f'test.git/{submodule.path}' - a_sub_repo = source.GitSubmodule( - remote=submodule.url, local=str(sub_path) - ) + sub_path = f"test.git/{submodule.path}" + a_sub_repo = source.GitSubmodule(remote=submodule.url, local=str(sub_path)) cache_patch = a_sub_repo.fetch() assert expected_sub_path.exists() @@ -304,14 +292,14 @@ def test_git_submodule_can_be_fetched_inside_fetched_main( assert cache_patch == expected_flat_sub_path -@mock.patch('benchbuild.source.git.base.target_prefix') +@mock.patch("benchbuild.source.git.base.target_prefix") def test_git_repo_can_list_versions(mocked_prefix, simple_repo): base_dir, repo = simple_repo mocked_prefix.return_value = str(base_dir) master = repo.head.reference - a_repo = source.Git(remote=repo.git_dir, local='test.git') - expected_versions = [v.newhexsha[0:a_repo.limit] for v in master.log()] + a_repo = source.Git(remote=repo.git_dir, local="test.git") + expected_versions = [v.newhexsha[0 : a_repo.limit] for v in master.log()] found_versions = [str(v) for v in reversed(a_repo.versions())] assert expected_versions == found_versions diff --git a/tests/test_213_error_tracking.py b/tests/test_213_error_tracking.py index d78e7ef25..7c3e19ba4 100644 --- a/tests/test_213_error_tracking.py +++ b/tests/test_213_error_tracking.py @@ -1,6 +1,7 @@ """ Test issue 213: Wrong error tracking for failed commands """ + import unittest import attr @@ -38,7 +39,7 @@ class EmptyProject(prj.Project): DOMAIN = "debug" GROUP = "debug" SOURCE = [nosource()] - CONTAINER = declarative.ContainerImage().from_('benchbuild:alpine') + CONTAINER = declarative.ContainerImage().from_("benchbuild:alpine") def compile(self): pass @@ -68,24 +69,24 @@ class TrackErrorsTestCase(unittest.TestCase): def test_exception(self): plan = list( - tasks.generate_plan({"test_exception": ExceptionExp}.values(), - {"test_empty": EmptyProject}.values()) - ) - self.assertEqual( - len(plan), 1, msg="The test plan must have a length of 1." + tasks.generate_plan( + {"test_exception": ExceptionExp}.values(), + {"test_empty": EmptyProject}.values(), + ) ) + self.assertEqual(len(plan), 1, msg="The test plan must have a length of 1.") failed = tasks.execute_plan(plan) self.assertEqual(len(failed), 1, msg="One step must fail!") def test_error_state(self): plan = list( - tasks.generate_plan({"test_error_state": ErrorStateExp}.values(), - {"test_empty": EmptyProject}.values()) - ) - self.assertEqual( - len(plan), 1, msg="The test plan must have a length of 1." + tasks.generate_plan( + {"test_error_state": ErrorStateExp}.values(), + {"test_empty": EmptyProject}.values(), + ) ) + self.assertEqual(len(plan), 1, msg="The test plan must have a length of 1.") failed = tasks.execute_plan(plan) self.assertEqual(len(failed), 1, msg="One step must fail!") diff --git a/tests/test_actions.py b/tests/test_actions.py index 10e332321..45ca16376 100644 --- a/tests/test_actions.py +++ b/tests/test_actions.py @@ -1,6 +1,7 @@ """ Test the actions module. """ + import copy import importlib import sys @@ -24,7 +25,7 @@ class EmptyProject(Project): DOMAIN = "debug" GROUP = "debug" SOURCE = [nosource()] - CONTAINER = ContainerImage().from_('benchbuild:alpine') + CONTAINER = ContainerImage().from_("benchbuild:alpine") def build(self): pass @@ -61,7 +62,6 @@ def __call__(self) -> a.StepResult: class ActionsTestCase(unittest.TestCase): - def test_for_all_pass(self): ep = EmptyProject() actn = a.RequireAll(actions=[PassAlways(ep)]) @@ -79,20 +79,12 @@ def test_for_all_except(self): class TestProject(Project): - NAME = '-' - DOMAIN = '-' - GROUP = '-' + NAME = "-" + DOMAIN = "-" + GROUP = "-" SOURCE = [ - HTTP(local='src-a', remote={ - 'v1a': '-', - 'v2a': '-', - 'v3': '-' - }), - HTTP(local='src-b', remote={ - 'v1b': '-', - 'v2b': '-', - 'v3': '-' - }) + HTTP(local="src-a", remote={"v1a": "-", "v2a": "-", "v3": "-"}), + HTTP(local="src-b", remote={"v1b": "-", "v2b": "-", "v3": "-"}), ] def compile(self): @@ -109,10 +101,9 @@ def t_project() -> tp.Type[Project]: class TestExperiment(Experiment): - NAME = '-' + NAME = "-" - def actions_for_project(self, - project: Project) -> tp.MutableSequence[a.Step]: + def actions_for_project(self, project: Project) -> tp.MutableSequence[a.Step]: return [] @@ -121,13 +112,13 @@ def test_SetProjectVersion_can_partially_update() -> None: context = exp.sample(TestProject)[0] prj = TestProject(revision=context) - assert prj.active_revision.variant_by_name("src-a").version == 'v1a' + assert prj.active_revision.variant_by_name("src-a").version == "v1a" - spv = SetProjectVersion(prj, RevisionStr('v2a')) + spv = SetProjectVersion(prj, RevisionStr("v2a")) with pytest.raises(ProcessExecutionError): spv() - assert prj.active_revision.variant_by_name("src-a").version == 'v2a' + assert prj.active_revision.variant_by_name("src-a").version == "v2a" def test_SetProjectVersion_can_update_full() -> None: @@ -135,15 +126,15 @@ def test_SetProjectVersion_can_update_full() -> None: context = exp.sample(TestProject)[0] prj = TestProject(revision=context) - assert prj.active_revision.variant_by_name("src-a").version == 'v1a' - assert prj.active_revision.variant_by_name("src-b").version == 'v1b' + assert prj.active_revision.variant_by_name("src-a").version == "v1a" + assert prj.active_revision.variant_by_name("src-b").version == "v1b" - spv = SetProjectVersion(prj, RevisionStr('v2a'), RevisionStr('v2b')) + spv = SetProjectVersion(prj, RevisionStr("v2a"), RevisionStr("v2b")) with pytest.raises(ProcessExecutionError): spv() - assert prj.active_revision.variant_by_name("src-a").version == 'v2a' - assert prj.active_revision.variant_by_name("src-b").version == 'v2b' + assert prj.active_revision.variant_by_name("src-a").version == "v2a" + assert prj.active_revision.variant_by_name("src-b").version == "v2b" def test_SetProjectVersion_suffers_from_version_collision() -> None: @@ -151,15 +142,15 @@ def test_SetProjectVersion_suffers_from_version_collision() -> None: context = exp.sample(TestProject)[0] prj = TestProject(revision=context) - assert prj.active_revision.variant_by_name("src-a").version == 'v1a' - assert prj.active_revision.variant_by_name("src-b").version == 'v1b' + assert prj.active_revision.variant_by_name("src-a").version == "v1a" + assert prj.active_revision.variant_by_name("src-b").version == "v1b" - spv = SetProjectVersion(prj, RevisionStr('v3')) + spv = SetProjectVersion(prj, RevisionStr("v3")) with pytest.raises(ProcessExecutionError): spv() - assert prj.active_revision.variant_by_name("src-a").version == 'v3' - assert prj.active_revision.variant_by_name("src-b").version == 'v3' + assert prj.active_revision.variant_by_name("src-a").version == "v3" + assert prj.active_revision.variant_by_name("src-b").version == "v3" def test_SetProjectVersion_can_set_revision_through_filter(t_project) -> None: @@ -168,20 +159,20 @@ def test_SetProjectVersion_can_set_revision_through_filter(t_project) -> None: """ source_backup = copy.deepcopy(t_project.SOURCE) - project_cls = __add_single_filter__(t_project, 'v3') + project_cls = __add_single_filter__(t_project, "v3") exp = TestExperiment(projects=[project_cls]) context = exp.sample(project_cls)[0] prj = project_cls(revision=context) - assert prj.active_revision.variant_by_name("src-a").version == 'v3' - assert prj.active_revision.variant_by_name("src-b").version == 'v1b' + assert prj.active_revision.variant_by_name("src-a").version == "v3" + assert prj.active_revision.variant_by_name("src-b").version == "v1b" - spv = SetProjectVersion(prj, RevisionStr('v1a')) + spv = SetProjectVersion(prj, RevisionStr("v1a")) with pytest.raises(ProcessExecutionError): spv() - assert prj.active_revision.variant_by_name("src-a").version == 'v1a' - assert prj.active_revision.variant_by_name("src-b").version == 'v1b' + assert prj.active_revision.variant_by_name("src-a").version == "v1a" + assert prj.active_revision.variant_by_name("src-b").version == "v1b" def test_SetProjectVersion_raises_error_when_no_revision_is_found() -> None: @@ -192,10 +183,10 @@ def test_SetProjectVersion_raises_error_when_no_revision_is_found() -> None: context = exp.sample(TestProject)[0] prj = TestProject(revision=context) - assert prj.active_revision.variant_by_name("src-a").version == 'v1a' - assert prj.active_revision.variant_by_name("src-b").version == 'v1b' + assert prj.active_revision.variant_by_name("src-a").version == "v1a" + assert prj.active_revision.variant_by_name("src-b").version == "v1b" with pytest.raises( - ValueError, match='Revisions (.+) not found in any available source.' + ValueError, match="Revisions (.+) not found in any available source." ): - spv = SetProjectVersion(prj, RevisionStr('does-not-exist')) + spv = SetProjectVersion(prj, RevisionStr("does-not-exist")) diff --git a/tests/test_dict.py b/tests/test_dict.py index 3096575ba..88d8f8396 100644 --- a/tests/test_dict.py +++ b/tests/test_dict.py @@ -1,6 +1,7 @@ """ Test the dict module. """ + import unittest from benchbuild.utils.dict import ExtensibleDict as edict @@ -8,43 +9,42 @@ class DictTestCase(unittest.TestCase): - def test_store_value(self): a = edict() - a['TEST'] = 0 - self.assertEqual(a['TEST'], 0) + a["TEST"] = 0 + self.assertEqual(a["TEST"], 0) def test_nesting_storage(self): a = edict() - a['TEST'] = 0 + a["TEST"] = 0 with a(TEST=1): - self.assertEqual(a['TEST'], 1) - self.assertEqual(a['TEST'], 0) + self.assertEqual(a["TEST"], 1) + self.assertEqual(a["TEST"], 0) def test_extending_storage_single_element(self): a = edict() - a['TEST'] = 0 + a["TEST"] = 0 with a(extender_fn=extend_as_list, TEST=1): - self.assertEqual(a['TEST'], [0, 1]) - self.assertEqual(a['TEST'], 0) + self.assertEqual(a["TEST"], [0, 1]) + self.assertEqual(a["TEST"], 0) def test_extending_storage_list_element(self): a = edict() - a['TEST'] = 0 + a["TEST"] = 0 with a(extender_fn=extend_as_list, TEST=[1, 2]): - self.assertEqual(a['TEST'], [0, 1, 2]) - self.assertEqual(a['TEST'], 0) + self.assertEqual(a["TEST"], [0, 1, 2]) + self.assertEqual(a["TEST"], 0) def test_default_extender_fn(self): a = edict(extend_as_list) - a['TEST'] = 0 + a["TEST"] = 0 with a(TEST=[1, 2]): - self.assertEqual(a['TEST'], [0, 1, 2]) - self.assertEqual(a['TEST'], 0) + self.assertEqual(a["TEST"], [0, 1, 2]) + self.assertEqual(a["TEST"], 0) def test_non_existing_key(self): b = edict(extend_as_list) b.clear() with b(TEST=[1, 2]): - self.assertEqual(b['TEST'], [1, 2]) - self.assertNotIn('TEST', b) + self.assertEqual(b["TEST"], [1, 2]) + self.assertNotIn("TEST", b) diff --git a/tests/test_missing_extension.py b/tests/test_missing_extension.py index 1b28d5377..bc83af2f6 100644 --- a/tests/test_missing_extension.py +++ b/tests/test_missing_extension.py @@ -16,6 +16,7 @@ make any assumptions about the quality/types of the 'extensions'. This can be enforced on the project level using attr's validators. """ + import pytest from benchbuild import extensions, project, source @@ -23,11 +24,11 @@ class DummyPrj(project.Project): - NAME: str = 'TestMissingExtension' - GROUP: str = 'TestMissingExtension' - DOMAIN: str = 'TestMissingExtension' + NAME: str = "TestMissingExtension" + GROUP: str = "TestMissingExtension" + DOMAIN: str = "TestMissingExtension" SOURCE = [source.nosource()] - CONTAINER: ContainerImage = ContainerImage().from_('benchbuild:alpine') + CONTAINER: ContainerImage = ContainerImage().from_("benchbuild:alpine") def run_tests(self): raise NotImplementedError() diff --git a/tests/test_path.py b/tests/test_path.py index baa662053..eb3acfb7c 100644 --- a/tests/test_path.py +++ b/tests/test_path.py @@ -3,7 +3,6 @@ class TestPathToList(unittest.TestCase): - def test_path_to_list(self): from benchbuild.utils.path import path_to_list test_path = f"a{os.pathsep}b" @@ -18,6 +17,7 @@ def test_path_to_list(self): def test_list_to_path(self): from benchbuild.utils.path import list_to_path + p = list_to_path(["a", "b"]) expected = f"a{os.pathsep}b" self.assertEqual(p, expected) diff --git a/tests/test_registry.py b/tests/test_registry.py index 5d1c178d3..14581d643 100644 --- a/tests/test_registry.py +++ b/tests/test_registry.py @@ -37,22 +37,25 @@ def project_registry(): return ProjectRegistry -def make_experiment(cls_name: str, - name: tp.Optional[str] = None, - bases: tp.Tuple[type, ...] = (Experiment,), - always_set: bool = True) -> tp.Type[Experiment]: +def make_experiment( + cls_name: str, + name: tp.Optional[str] = None, + bases: tp.Tuple[type, ...] = (Experiment,), + always_set: bool = True, +) -> tp.Type[Experiment]: """ Dynamically create a subclass of Experiment to test registration. """ if name or always_set: - return type(cls_name, bases, { - 'actions_for_project': empty_afp, - 'NAME': name - }) + return type(cls_name, bases, {"actions_for_project": empty_afp, "NAME": name}) - return type(cls_name, bases, { - 'actions_for_project': empty_afp, - }) + return type( + cls_name, + bases, + { + "actions_for_project": empty_afp, + }, + ) def make_project( @@ -65,7 +68,7 @@ def make_project( Dynamically create a subclass of Project to test registration """ - interface = {'run_tests': empty_run_tests, 'compile': empty_compile} + interface = {"run_tests": empty_run_tests, "compile": empty_compile} if attrs or always_set: interface.update(attrs) @@ -78,29 +81,29 @@ def test_experiment_registry_named(registry: ExperimentRegistry): """ An experiment must have a NAME to be registered. """ - cls = make_experiment('Child', 'Child') - assert 'Child' in registry.experiments - assert registry.experiments['Child'] == cls + cls = make_experiment("Child", "Child") + assert "Child" in registry.experiments + assert registry.experiments["Child"] == cls def test_project_registry_named(project_registry: ProjectRegistry): """ A project must have a NAME, GROUP and DOMAIN registered. """ - cls = make_project('Child', NAME='C', DOMAIN='CD', GROUP='CG') + cls = make_project("Child", NAME="C", DOMAIN="CD", GROUP="CG") - assert 'C/CG' in project_registry.projects - assert project_registry.projects['C/CG'] == cls - assert project_registry.projects['C/CG'].NAME == 'C' - assert project_registry.projects['C/CG'].DOMAIN == 'CD' - assert project_registry.projects['C/CG'].GROUP == 'CG' + assert "C/CG" in project_registry.projects + assert project_registry.projects["C/CG"] == cls + assert project_registry.projects["C/CG"].NAME == "C" + assert project_registry.projects["C/CG"].DOMAIN == "CD" + assert project_registry.projects["C/CG"].GROUP == "CG" def test_registry_unnamed(registry: ExperimentRegistry): """ An experiment must not lack a NAME attribute to be registered. """ - cls = make_experiment('UnnamedChild', always_set=False) + cls = make_experiment("UnnamedChild", always_set=False) assert cls not in registry.experiments.values() @@ -109,7 +112,7 @@ def test_project_registry_unnamed(project_registry: ProjectRegistry): A project must not lack a NAME, DOMAIN, or GROUP attribute to be registered. """ - cls = make_project('UnnamedChild', always_set=False) + cls = make_project("UnnamedChild", always_set=False) assert cls not in project_registry.projects.values() @@ -120,10 +123,10 @@ def test_registry_named_from_unnamed(registry: ExperimentRegistry): This is allowed, and should work. The partial experiment is not allowed to be included in the registry. """ - unnamed = make_experiment('UnnamedChild', always_set=False) - named = make_experiment('NamedFromUnnamed', 'Named', (unnamed,)) - assert 'Named' in registry.experiments - assert registry.experiments['Named'] == named + unnamed = make_experiment("UnnamedChild", always_set=False) + named = make_experiment("NamedFromUnnamed", "Named", (unnamed,)) + assert "Named" in registry.experiments + assert registry.experiments["Named"] == named assert unnamed not in registry.experiments.values() @@ -134,16 +137,15 @@ def test_project_registry_named_from_unnamed(project_registry: ProjectRegistry): This is allowed, and should work. The partial project is not allowed to be included in the registry. """ - unnamed = make_project('UnnamedChild', always_set=False) - named = make_project('NamedFromUnnamed', (unnamed,), - NAME='NC', - GROUP='NCG', - DOMAIN='NCD') - assert 'NC/NCG' in project_registry.projects - assert project_registry.projects['NC/NCG'] == named - assert project_registry.projects['NC/NCG'].NAME == 'NC' - assert project_registry.projects['NC/NCG'].DOMAIN == 'NCD' - assert project_registry.projects['NC/NCG'].GROUP == 'NCG' + unnamed = make_project("UnnamedChild", always_set=False) + named = make_project( + "NamedFromUnnamed", (unnamed,), NAME="NC", GROUP="NCG", DOMAIN="NCD" + ) + assert "NC/NCG" in project_registry.projects + assert project_registry.projects["NC/NCG"] == named + assert project_registry.projects["NC/NCG"].NAME == "NC" + assert project_registry.projects["NC/NCG"].DOMAIN == "NCD" + assert project_registry.projects["NC/NCG"].GROUP == "NCG" assert unnamed not in project_registry.projects.values() @@ -154,11 +156,10 @@ def test_registry_unnamed_from_named(registry: ExperimentRegistry): This would overwrite an existing experiment in the registry and is forbidden. """ - named = make_experiment('Child', 'Child') - unnamed = make_experiment('UnnamedFromNamed', - name=None, - bases=(named,), - always_set=False) + named = make_experiment("Child", "Child") + unnamed = make_experiment( + "UnnamedFromNamed", name=None, bases=(named,), always_set=False + ) assert unnamed not in registry.experiments.values() @@ -169,11 +170,14 @@ def test_project_registry_unnamed_from_named(project_registry: ProjectRegistry): This would overwrite an existing project in the registry and is forbidden. """ - named = make_project('Child', NAME='C', DOMAIN='CD', GROUP='CG') - unnamed = make_project('UnnamedFromNamed', (named,), - NAME=None, - DOMAIN=None, - GROUP=None, - always_set=False) + named = make_project("Child", NAME="C", DOMAIN="CD", GROUP="CG") + unnamed = make_project( + "UnnamedFromNamed", + (named,), + NAME=None, + DOMAIN=None, + GROUP=None, + always_set=False, + ) assert unnamed not in project_registry.projects.values() diff --git a/tests/test_requirements.py b/tests/test_requirements.py index 278113cae..14c2c2d25 100644 --- a/tests/test_requirements.py +++ b/tests/test_requirements.py @@ -1,6 +1,7 @@ """ Test the SLURM options generator. """ + import unittest import benchbuild.utils.requirements as req @@ -40,10 +41,10 @@ def test_merge_requirements(self): option = req.SlurmCoresPerSocket(4) other_option = req.SlurmCoresPerSocket(8) - merged_option = req.SlurmCoresPerSocket.merge_requirements( - option, other_option) + merged_option = req.SlurmCoresPerSocket.merge_requirements(option, other_option) merged_option_swapped = req.SlurmCoresPerSocket.merge_requirements( - other_option, option) + other_option, option + ) self.assertEqual(merged_option.cores, 8) self.assertEqual(merged_option_swapped.cores, 8) @@ -91,10 +92,10 @@ def test_merge_requirements(self): option = req.SlurmNiceness(4) other_option = req.SlurmNiceness(8) - merged_option = req.SlurmNiceness.merge_requirements( - option, other_option) + merged_option = req.SlurmNiceness.merge_requirements(option, other_option) merged_option_swapped = req.SlurmNiceness.merge_requirements( - other_option, option) + other_option, option + ) self.assertEqual(merged_option.niceness, 4) self.assertEqual(merged_option_swapped.niceness, 4) @@ -119,8 +120,7 @@ def test_init_hint(self): """ option = req.SlurmHint({req.SlurmHint.SlurmHints.compute_bound}) - self.assertSetEqual(option.hints, - {req.SlurmHint.SlurmHints.compute_bound}) + self.assertSetEqual(option.hints, {req.SlurmHint.SlurmHints.compute_bound}) def test_merge_requirements_disj(self): """ @@ -130,44 +130,52 @@ def test_merge_requirements_disj(self): other_option = req.SlurmHint({req.SlurmHint.SlurmHints.multithread}) merged_option = req.SlurmHint.merge_requirements(option, other_option) - merged_option_swapped = req.SlurmHint.merge_requirements( - other_option, option) + merged_option_swapped = req.SlurmHint.merge_requirements(other_option, option) self.assertSetEqual( - merged_option.hints, { + merged_option.hints, + { req.SlurmHint.SlurmHints.compute_bound, - req.SlurmHint.SlurmHints.multithread - }) + req.SlurmHint.SlurmHints.multithread, + }, + ) self.assertSetEqual( - merged_option_swapped.hints, { + merged_option_swapped.hints, + { req.SlurmHint.SlurmHints.compute_bound, - req.SlurmHint.SlurmHints.multithread - }) + req.SlurmHint.SlurmHints.multithread, + }, + ) def test_merge_requirements_additional(self): """ Checks if hint options are correctly merged together. """ option = req.SlurmHint({req.SlurmHint.SlurmHints.compute_bound}) - other_option = req.SlurmHint({ - req.SlurmHint.SlurmHints.multithread, - req.SlurmHint.SlurmHints.compute_bound - }) + other_option = req.SlurmHint( + { + req.SlurmHint.SlurmHints.multithread, + req.SlurmHint.SlurmHints.compute_bound, + } + ) merged_option = req.SlurmHint.merge_requirements(option, other_option) - merged_option_swapped = req.SlurmHint.merge_requirements( - other_option, option) + merged_option_swapped = req.SlurmHint.merge_requirements(other_option, option) self.assertSetEqual( - merged_option.hints, { + merged_option.hints, + { req.SlurmHint.SlurmHints.compute_bound, - req.SlurmHint.SlurmHints.multithread - }) + req.SlurmHint.SlurmHints.multithread, + }, + ) self.assertSetEqual( - merged_option_swapped.hints, { + merged_option_swapped.hints, + { req.SlurmHint.SlurmHints.compute_bound, - req.SlurmHint.SlurmHints.multithread - }) + req.SlurmHint.SlurmHints.multithread, + }, + ) def test_merge_requirements_mutally_exclusive(self): """ @@ -176,8 +184,9 @@ def test_merge_requirements_mutally_exclusive(self): option = req.SlurmHint({req.SlurmHint.SlurmHints.compute_bound}) other_option = req.SlurmHint({req.SlurmHint.SlurmHints.memory_bound}) - self.assertRaises(ValueError, req.SlurmHint.merge_requirements, option, - other_option) + self.assertRaises( + ValueError, req.SlurmHint.merge_requirements, option, other_option + ) def test_cli_opt(self): """ @@ -191,10 +200,12 @@ def test_cli_opt_multiple(self): """ Checks that the correct slurm cli option is generated. """ - option = req.SlurmHint({ - req.SlurmHint.SlurmHints.compute_bound, - req.SlurmHint.SlurmHints.nomultithread - }) + option = req.SlurmHint( + { + req.SlurmHint.SlurmHints.compute_bound, + req.SlurmHint.SlurmHints.nomultithread, + } + ) output_string = option.to_slurm_cli_opt() self.assertTrue(output_string.startswith("--hint=")) @@ -223,8 +234,7 @@ def test_merge_requirements(self): other_option = req.SlurmTime("3:3:1") merged_option = req.SlurmTime.merge_requirements(option, other_option) - merged_option_swapped = req.SlurmTime.merge_requirements( - other_option, option) + merged_option_swapped = req.SlurmTime.merge_requirements(other_option, option) self.assertEqual(merged_option.timelimit, (0, 3, 3, 1)) self.assertEqual(merged_option_swapped.timelimit, (0, 3, 3, 1)) diff --git a/tests/test_revision_ranges.py b/tests/test_revision_ranges.py index c7f19c647..a9c931d24 100644 --- a/tests/test_revision_ranges.py +++ b/tests/test_revision_ranges.py @@ -1,4 +1,5 @@ """Test the revision_ranges module.""" + from pathlib import Path import unittest from unittest import mock @@ -8,6 +9,7 @@ class TestRevisionRanges(unittest.TestCase): """Test the revision range classes.""" + def test_single_revision(self): revision_range = ranges.SingleRevision("1234abc") self.assertIn("1234abc", revision_range) @@ -29,8 +31,16 @@ def test_revision_range(self, get_all_revs_between_mock): @mock.patch("pygit2.Commit") @mock.patch("pygit2.Commit") @mock.patch("pygit2.Commit") - def test_find_blocked_commits(self, commit_head, commit_good, commit_bad, - commit_r, commit_s, commit_x, commit_y): + def test_find_blocked_commits( + self, + commit_head, + commit_good, + commit_bad, + commit_r, + commit_s, + commit_x, + commit_y, + ): """ X---R---G---HEAD \\ / / / @@ -46,8 +56,7 @@ def test_find_blocked_commits(self, commit_head, commit_good, commit_bad, commit_x.parents = [] commit_y.parents = [] - result = ranges._find_blocked_commits(commit_head, [commit_good], - [commit_bad]) + result = ranges._find_blocked_commits(commit_head, [commit_good], [commit_bad]) self.assertIn(commit_r, result) self.assertIn(commit_s, result) self.assertIn(commit_bad, result) diff --git a/tests/test_run.py b/tests/test_run.py index 00fb58f56..31b08ace6 100644 --- a/tests/test_run.py +++ b/tests/test_run.py @@ -1,15 +1,14 @@ """ This Test will run through benchbuild's execution pipeline. """ + import unittest from benchbuild.utils import cmd def shadow_commands(command): - def shadow_command_fun(func): - def shadow_command_wrapped_fun(self, *args, **kwargs): cmd.__override_all__ = command res = func(self, *args, **kwargs) @@ -22,30 +21,35 @@ def shadow_command_wrapped_fun(self, *args, **kwargs): class TestShadow(unittest.TestCase): - def test_shadow(self): inside = None true = cmd.true mkdir = cmd.mkdir class test_class(object): - @shadow_commands("true") def shadow_hook(self): return cmd.mkdir outside = cmd.mkdir inside = test_class().shadow_hook() - self.assertEqual(inside.formulate(), - true.formulate(), - msg="true (before) is not the same as true (inside)") + self.assertEqual( + inside.formulate(), + true.formulate(), + msg="true (before) is not the same as true (inside)", + ) self.assertNotEqual( mkdir.formulate(), inside.formulate(), - msg="mkdir (before) is not the same as mkdir (inside)") - self.assertNotEqual(inside.formulate(), - outside.formulate(), - msg="true (before) is not the same as true (after)") - self.assertEqual(mkdir.formulate(), - outside.formulate(), - msg="mkdir (before) is not the same as mkdir (after)") + msg="mkdir (before) is not the same as mkdir (inside)", + ) + self.assertNotEqual( + inside.formulate(), + outside.formulate(), + msg="true (before) is not the same as true (after)", + ) + self.assertEqual( + mkdir.formulate(), + outside.formulate(), + msg="mkdir (before) is not the same as mkdir (after)", + ) diff --git a/tests/test_schedule_tree.py b/tests/test_schedule_tree.py index 7587a3a37..93a6a3654 100644 --- a/tests/test_schedule_tree.py +++ b/tests/test_schedule_tree.py @@ -1,4 +1,5 @@ """Tests for the schedule_tree parser.""" + import benchbuild.utils.schedule_tree as st @@ -9,7 +10,8 @@ def test_domain_1(): def test_domain_2(): res = st.DOMAIN.parseString( - 'domain: "{ Stmt2[i1, i1] : 0 <= i0 <= 254 and 0 <= i1 <= 255 }"') + 'domain: "{ Stmt2[i1, i1] : 0 <= i0 <= 254 and 0 <= i1 <= 255 }"' + ) assert res != None @@ -34,22 +36,22 @@ def test_mark_1(): def test_permutable_1(): - res = st.PERMUTABLE.parseString('permutable: 1') + res = st.PERMUTABLE.parseString("permutable: 1") assert res != None def test_permutable_2(): - res = st.PERMUTABLE.parseString('permutable: 0') + res = st.PERMUTABLE.parseString("permutable: 0") assert res != None def test_coincident_1(): - res = st.COINCIDENT.parseString('coincident: [0]') + res = st.COINCIDENT.parseString("coincident: [0]") assert res != None def test_coincident_2(): - res = st.COINCIDENT.parseString('coincident: [0, 1]') + res = st.COINCIDENT.parseString("coincident: [0, 1]") assert res != None @@ -59,66 +61,63 @@ def test_sequence_1(): def test_sequence_2(): - res = st.SEQUENCE.parseString('sequence: [ { ' \ - 'filter: "", ' \ - 'child: { ' \ - 'permutable: 1 ' \ - '}' - '}' \ - ']') + res = st.SEQUENCE.parseString( + 'sequence: [ { filter: "", child: { permutable: 1 }}]' + ) assert res != None def test_schedule_tree_1(): - test_1 = \ -'{ domain: "{ Stmt2[i1, i1] : 0 <= i0 <= 254 and 0 <= i1 <= 255 }", ' \ - 'child: { ' \ - 'schedule: "[{ Stmt2[i0, i1] -> [(i0)] }]", ' \ - 'child: { ' \ - 'schedule: "[{ Stmt2[i0, i1] -> [(i1)] }]" ' \ - '} ' \ - '} ' \ -'}' + test_1 = ( + '{ domain: "{ Stmt2[i1, i1] : 0 <= i0 <= 254 and 0 <= i1 <= 255 }", ' + "child: { " + 'schedule: "[{ Stmt2[i0, i1] -> [(i0)] }]", ' + "child: { " + 'schedule: "[{ Stmt2[i0, i1] -> [(i1)] }]" ' + "} " + "} " + "}" + ) res = st.parse_schedule_tree(test_1) assert res != None def test_schedule_tree_2(): - test_2 = \ -'{ domain: " ", ' \ - 'child: { schedule: "", ' \ - 'child: { sequence: [ { filter: "", ' \ - 'child: { schedule: "" } } ] } } }' + test_2 = ( + '{ domain: " ", ' + 'child: { schedule: "", ' + 'child: { sequence: [ { filter: "", ' + 'child: { schedule: "" } } ] } } }' + ) res = st.parse_schedule_tree(test_2) assert res != None def test_schedule_tree_3(): - test_3 = \ -'{ domain: "", child: { schedule: "", child: { sequence: [ { filter: "", child: { schedule: "", permutable: 1, coincident: [ 1 ] } }, { filter: "", child: { mark: "", child: { schedule: "", permutable: 1, coincident: [ 1, 1 ], child: { mark: "", child: { schedule: "", permutable: 1, coincident: [ 1, 1 ] } } } } }, { filter: "", child: { mark: "", child: { schedule: "", permutable: 1, coincident: [ 1, 1 ], child: { mark: "", child: { schedule: "", permutable: 1, coincident: [ 1, 1 ] } } } } }, { filter: "", child: { mark: "", child: { schedule: "", permutable: 1, coincident: [ 1, 1 ], child: { mark: "", child: { schedule: "", permutable: 1, coincident: [ 1, 1 ] } } } } } ] } } }' + test_3 = '{ domain: "", child: { schedule: "", child: { sequence: [ { filter: "", child: { schedule: "", permutable: 1, coincident: [ 1 ] } }, { filter: "", child: { mark: "", child: { schedule: "", permutable: 1, coincident: [ 1, 1 ], child: { mark: "", child: { schedule: "", permutable: 1, coincident: [ 1, 1 ] } } } } }, { filter: "", child: { mark: "", child: { schedule: "", permutable: 1, coincident: [ 1, 1 ], child: { mark: "", child: { schedule: "", permutable: 1, coincident: [ 1, 1 ] } } } } }, { filter: "", child: { mark: "", child: { schedule: "", permutable: 1, coincident: [ 1, 1 ], child: { mark: "", child: { schedule: "", permutable: 1, coincident: [ 1, 1 ] } } } } } ] } } }' res = st.parse_schedule_tree(test_3) assert res != None def test_schedule_tree_4(): - test_4 = \ -'{ domain: "[p_0, p_1, p_2] -> { Stmt6[i0, i1] : 0 <= i0 < p_0 and 0 <= i1 < p_1; Stmt14[i0, i1, i2] : 0 <= i0 < p_0 and 0 <= i1 < p_2 and 0 <= i2 < p_1 }", ' \ - 'child: { sequence: [ { filter: "[p_0, p_1, p_2] -> { Stmt6[i0, i1] }", ' \ - 'child: { mark: "1st level tiling - Tiles", child: { schedule: "[p_0, p_1, p_2] -> [{ Stmt6[i0, i1] -> [(floor((i0)/32))] }, { Stmt6[i0, i1] -> [(floor((i1)/32))] }]", ' \ - 'permutable: 1, ' \ - 'coincident: [ 1, 1 ], ' \ - 'child: { mark: "1st level tiling - Points", ' \ - 'child: { mark: "Register tiling - Tiles", ' \ - 'child: { schedule: "[p_0, p_1, p_2] -> [{ Stmt6[i0, i1] -> [(floor((i0)/2) - 16*floor((i0)/32))] }, { Stmt6[i0, i1] -> [(floor((i1)/2) - 16*floor((i1)/32))] }]", ' \ - 'permutable: 1, ' \ - 'coincident: [ 1, 1 ], ' \ - 'child: { mark: "Register tiling - Points", child: { schedule: "[p_0, p_1, p_2] -> [{ Stmt6[i0, i1] -> [(i0 - 2*floor((i0)/2))] }, { Stmt6[i0, i1] -> [(i1 - 2*floor((i1)/2))] }]", permutable: 1, coincident: [ 1, 1 ], options: "{ unroll[i0] : 0 <= i0 <= 1 }" } } } } } } } }, { filter: "[p_0, p_1, p_2] -> { Stmt14[i0, i1, i2] }", child: { mark: "Inter iteration alias-free", child: { mark: "1st level tiling - Tiles", child: { schedule: "[p_0, p_1, p_2] -> [{ Stmt14[i0, i1, i2] -> [(floor((i2)/1024))] }, { Stmt14[i0, i1, i2] -> [(floor((i1)/384))] }]", child: { extension: "[p_0, p_1, p_2] -> { [i0, i1] -> CopyStmt_0[0, o1, o2] : p_0 > 0 and o1 >= 384i1 and 0 <= o1 <= 383 + 384i1 and o1 < p_2 and o2 >= 1024i0 and 0 <= o2 <= 1023 + 1024i0 and o2 < p_1 }", child: { sequence: [ { filter: "[p_0, p_1, p_2] -> { CopyStmt_0[i0, i1, i2] }" }, { filter: "[p_0, p_1, p_2] -> { Stmt14[i0, i1, i2] }", child: { schedule: "[p_0, p_1, p_2] -> [{ Stmt14[i0, i1, i2] -> [(floor((i0)/64))] }]", child: { extension: "[p_0, p_1, p_2] -> { [0, i1, i2] -> CopyStmt_1[o0, o1, 0] : p_1 > 0 and o0 >= 64i2 and 0 <= o0 <= 63 + 64i2 and o0 < p_0 and o1 >= 384i1 and 0 <= o1 <= 383 + 384i1 and o1 < p_2 }", child: { sequence: [ { filter: "[p_0, p_1, p_2] -> { CopyStmt_1[i0, i1, i2] }" }, { filter: "[p_0, p_1, p_2] -> { Stmt14[i0, i1, i2] }", child: { mark: "1st level tiling - Points", child: { mark: "Register tiling - Tiles", child: { schedule: "[p_0, p_1, p_2] -> [{ Stmt14[i0, i1, i2] -> [(floor((i2)/4) - 256*floor((i2)/1024))] }, { Stmt14[i0, i1, i2] -> [(floor((i0)/4) - 16*floor((i0)/64))] }, { Stmt14[i0, i1, i2] -> [(i1 - 384*floor((i1)/384))] }]", options: "[p_0, p_1, p_2] -> { isolate[[i0, i1, i2] -> [i3, i4, i5]] : i3 >= 0 and -256i0 <= i3 <= 255 and 4i3 <= -4 + p_1 - 1024i0 and i4 >= 0 and -16i2 <= i4 <= 15 and 4i4 <= -4 + p_0 - 64i2 and i5 >= 0 and -384i1 <= i5 <= 383 and i5 < p_2 - 384i1; separate[i0] : 0 <= i0 <= 2 }", child: { mark: "Loop Vectorizer Disabled", child: { mark: "Register tiling - Points", child: { schedule: "[p_0, p_1, p_2] -> [{ Stmt14[i0, i1, i2] -> [(i0 - 4*floor((i0)/4))] }, { Stmt14[i0, i1, i2] -> [(i2 - 4*floor((i2)/4))] }, { Stmt14[i0, i1, i2] -> [(0)] }]", options: "[p_0, p_1, p_2] -> { isolate[[i0, i1, i2, i3, i4, i5] -> [i6, i7, i8]] : i3 >= 0 and -256i0 <= i3 <= 255 and 4i3 <= -4 + p_1 - 1024i0 and i4 >= 0 and -16i2 <= i4 <= 15 and 4i4 <= -4 + p_0 - 64i2 and i5 >= 0 and -384i1 <= i5 <= 383 and i5 < p_2 - 384i1; unroll[i0] : 0 <= i0 <= 2; [isolate[] -> unroll[i0]] : 0 <= i0 <= 2 }" } } } } } } } ] } } } } ] } } } } } } ] } }' + test_4 = ( + '{ domain: "[p_0, p_1, p_2] -> { Stmt6[i0, i1] : 0 <= i0 < p_0 and 0 <= i1 < p_1; Stmt14[i0, i1, i2] : 0 <= i0 < p_0 and 0 <= i1 < p_2 and 0 <= i2 < p_1 }", ' + 'child: { sequence: [ { filter: "[p_0, p_1, p_2] -> { Stmt6[i0, i1] }", ' + 'child: { mark: "1st level tiling - Tiles", child: { schedule: "[p_0, p_1, p_2] -> [{ Stmt6[i0, i1] -> [(floor((i0)/32))] }, { Stmt6[i0, i1] -> [(floor((i1)/32))] }]", ' + "permutable: 1, " + "coincident: [ 1, 1 ], " + 'child: { mark: "1st level tiling - Points", ' + 'child: { mark: "Register tiling - Tiles", ' + 'child: { schedule: "[p_0, p_1, p_2] -> [{ Stmt6[i0, i1] -> [(floor((i0)/2) - 16*floor((i0)/32))] }, { Stmt6[i0, i1] -> [(floor((i1)/2) - 16*floor((i1)/32))] }]", ' + "permutable: 1, " + "coincident: [ 1, 1 ], " + 'child: { mark: "Register tiling - Points", child: { schedule: "[p_0, p_1, p_2] -> [{ Stmt6[i0, i1] -> [(i0 - 2*floor((i0)/2))] }, { Stmt6[i0, i1] -> [(i1 - 2*floor((i1)/2))] }]", permutable: 1, coincident: [ 1, 1 ], options: "{ unroll[i0] : 0 <= i0 <= 1 }" } } } } } } } }, { filter: "[p_0, p_1, p_2] -> { Stmt14[i0, i1, i2] }", child: { mark: "Inter iteration alias-free", child: { mark: "1st level tiling - Tiles", child: { schedule: "[p_0, p_1, p_2] -> [{ Stmt14[i0, i1, i2] -> [(floor((i2)/1024))] }, { Stmt14[i0, i1, i2] -> [(floor((i1)/384))] }]", child: { extension: "[p_0, p_1, p_2] -> { [i0, i1] -> CopyStmt_0[0, o1, o2] : p_0 > 0 and o1 >= 384i1 and 0 <= o1 <= 383 + 384i1 and o1 < p_2 and o2 >= 1024i0 and 0 <= o2 <= 1023 + 1024i0 and o2 < p_1 }", child: { sequence: [ { filter: "[p_0, p_1, p_2] -> { CopyStmt_0[i0, i1, i2] }" }, { filter: "[p_0, p_1, p_2] -> { Stmt14[i0, i1, i2] }", child: { schedule: "[p_0, p_1, p_2] -> [{ Stmt14[i0, i1, i2] -> [(floor((i0)/64))] }]", child: { extension: "[p_0, p_1, p_2] -> { [0, i1, i2] -> CopyStmt_1[o0, o1, 0] : p_1 > 0 and o0 >= 64i2 and 0 <= o0 <= 63 + 64i2 and o0 < p_0 and o1 >= 384i1 and 0 <= o1 <= 383 + 384i1 and o1 < p_2 }", child: { sequence: [ { filter: "[p_0, p_1, p_2] -> { CopyStmt_1[i0, i1, i2] }" }, { filter: "[p_0, p_1, p_2] -> { Stmt14[i0, i1, i2] }", child: { mark: "1st level tiling - Points", child: { mark: "Register tiling - Tiles", child: { schedule: "[p_0, p_1, p_2] -> [{ Stmt14[i0, i1, i2] -> [(floor((i2)/4) - 256*floor((i2)/1024))] }, { Stmt14[i0, i1, i2] -> [(floor((i0)/4) - 16*floor((i0)/64))] }, { Stmt14[i0, i1, i2] -> [(i1 - 384*floor((i1)/384))] }]", options: "[p_0, p_1, p_2] -> { isolate[[i0, i1, i2] -> [i3, i4, i5]] : i3 >= 0 and -256i0 <= i3 <= 255 and 4i3 <= -4 + p_1 - 1024i0 and i4 >= 0 and -16i2 <= i4 <= 15 and 4i4 <= -4 + p_0 - 64i2 and i5 >= 0 and -384i1 <= i5 <= 383 and i5 < p_2 - 384i1; separate[i0] : 0 <= i0 <= 2 }", child: { mark: "Loop Vectorizer Disabled", child: { mark: "Register tiling - Points", child: { schedule: "[p_0, p_1, p_2] -> [{ Stmt14[i0, i1, i2] -> [(i0 - 4*floor((i0)/4))] }, { Stmt14[i0, i1, i2] -> [(i2 - 4*floor((i2)/4))] }, { Stmt14[i0, i1, i2] -> [(0)] }]", options: "[p_0, p_1, p_2] -> { isolate[[i0, i1, i2, i3, i4, i5] -> [i6, i7, i8]] : i3 >= 0 and -256i0 <= i3 <= 255 and 4i3 <= -4 + p_1 - 1024i0 and i4 >= 0 and -16i2 <= i4 <= 15 and 4i4 <= -4 + p_0 - 64i2 and i5 >= 0 and -384i1 <= i5 <= 383 and i5 < p_2 - 384i1; unroll[i0] : 0 <= i0 <= 2; [isolate[] -> unroll[i0]] : 0 <= i0 <= 2 }" } } } } } } } ] } } } } ] } } } } } } ] } }' + ) res = st.parse_schedule_tree(test_4) assert res != None def test_schedule_tree_5(): - test_5 = \ -'{ domain: "{ Stmt14[i0, i1, i2] : 0 <= i0 <= 1999 and 0 <= i1 <= 2599 and 0 <= i2 <= 2299; Stmt6[i0, i1] : 0 <= i0 <= 1999 and 0 <= i1 <= 2299 }", child: { sequence: [ { filter: "{ Stmt6[i0, i1] }", child: { mark: "1st level tiling - Tiles", child: { schedule: "[{ Stmt6[i0, i1] -> [(floor((i0)/32))] }, { Stmt6[i0, i1] -> [(floor((i1)/32))] }]", permutable: 1, coincident: [ 1, 1 ], child: { mark: "1st level tiling - Points", child: { mark: "Register tiling - Tiles", child: { schedule: "[{ Stmt6[i0, i1] -> [(floor((i0)/2) - 16*floor((i0)/32))] }, { Stmt6[i0, i1] -> [(floor((i1)/2) - 16*floor((i1)/32))] }]", permutable: 1, coincident: [ 1, 1 ], child: { mark: "Register tiling - Points", child: { schedule: "[{ Stmt6[i0, i1] -> [(i0 - 2*floor((i0)/2))] }, { Stmt6[i0, i1] -> [(i1 - 2*floor((i1)/2))] }]", permutable: 1, coincident: [ 1, 1 ], options: "{ unroll[i0] : 0 <= i0 <= 1 }" } } } } } } } }, { filter: "{ Stmt14[i0, i1, i2] }", child: { mark: "1st level tiling - Tiles", child: { schedule: "[{ Stmt14[i0, i1, i2] -> [(floor((i0)/32))] }, { Stmt14[i0, i1, i2] -> [(floor((i2)/32))] }, { Stmt14[i0, i1, i2] -> [(floor((i1)/32))] }]", permutable: 1, coincident: [ 1, 1, 0 ], child: { mark: "1st level tiling - Points", child: { mark: "Register tiling - Tiles", child: { schedule: "[{ Stmt14[i0, i1, i2] -> [(floor((i0)/2) - 16*floor((i0)/32))] }, { Stmt14[i0, i1, i2] -> [(floor((i2)/2) - 16*floor((i2)/32))] }, { Stmt14[i0, i1, i2] -> [(floor((i1)/2) - 16*floor((i1)/32))] }]", permutable: 1, coincident: [ 1, 1, 0 ], child: { mark: "Register tiling - Points", child: { schedule: "[{ Stmt14[i0, i1, i2] -> [(i0 - 2*floor((i0)/2))] }, { Stmt14[i0, i1, i2] -> [(i2 - 2*floor((i2)/2))] }, { Stmt14[i0, i1, i2] -> [(i1 - 2*floor((i1)/2))] }]", permutable: 1, coincident: [ 1, 1, 0 ], options: "{ unroll[i0] : 0 <= i0 <= 2 }" } } } } } } } } ] } }' + test_5 = '{ domain: "{ Stmt14[i0, i1, i2] : 0 <= i0 <= 1999 and 0 <= i1 <= 2599 and 0 <= i2 <= 2299; Stmt6[i0, i1] : 0 <= i0 <= 1999 and 0 <= i1 <= 2299 }", child: { sequence: [ { filter: "{ Stmt6[i0, i1] }", child: { mark: "1st level tiling - Tiles", child: { schedule: "[{ Stmt6[i0, i1] -> [(floor((i0)/32))] }, { Stmt6[i0, i1] -> [(floor((i1)/32))] }]", permutable: 1, coincident: [ 1, 1 ], child: { mark: "1st level tiling - Points", child: { mark: "Register tiling - Tiles", child: { schedule: "[{ Stmt6[i0, i1] -> [(floor((i0)/2) - 16*floor((i0)/32))] }, { Stmt6[i0, i1] -> [(floor((i1)/2) - 16*floor((i1)/32))] }]", permutable: 1, coincident: [ 1, 1 ], child: { mark: "Register tiling - Points", child: { schedule: "[{ Stmt6[i0, i1] -> [(i0 - 2*floor((i0)/2))] }, { Stmt6[i0, i1] -> [(i1 - 2*floor((i1)/2))] }]", permutable: 1, coincident: [ 1, 1 ], options: "{ unroll[i0] : 0 <= i0 <= 1 }" } } } } } } } }, { filter: "{ Stmt14[i0, i1, i2] }", child: { mark: "1st level tiling - Tiles", child: { schedule: "[{ Stmt14[i0, i1, i2] -> [(floor((i0)/32))] }, { Stmt14[i0, i1, i2] -> [(floor((i2)/32))] }, { Stmt14[i0, i1, i2] -> [(floor((i1)/32))] }]", permutable: 1, coincident: [ 1, 1, 0 ], child: { mark: "1st level tiling - Points", child: { mark: "Register tiling - Tiles", child: { schedule: "[{ Stmt14[i0, i1, i2] -> [(floor((i0)/2) - 16*floor((i0)/32))] }, { Stmt14[i0, i1, i2] -> [(floor((i2)/2) - 16*floor((i2)/32))] }, { Stmt14[i0, i1, i2] -> [(floor((i1)/2) - 16*floor((i1)/32))] }]", permutable: 1, coincident: [ 1, 1, 0 ], child: { mark: "Register tiling - Points", child: { schedule: "[{ Stmt14[i0, i1, i2] -> [(i0 - 2*floor((i0)/2))] }, { Stmt14[i0, i1, i2] -> [(i2 - 2*floor((i2)/2))] }, { Stmt14[i0, i1, i2] -> [(i1 - 2*floor((i1)/2))] }]", permutable: 1, coincident: [ 1, 1, 0 ], options: "{ unroll[i0] : 0 <= i0 <= 2 }" } } } } } } } } ] } }' res = st.parse_schedule_tree(test_5) assert res != None diff --git a/tests/test_slurm.py b/tests/test_slurm.py index 43c49e364..7479a1b31 100644 --- a/tests/test_slurm.py +++ b/tests/test_slurm.py @@ -1,6 +1,7 @@ """ Test the SLURM script generator. """ + import tempfile import unittest import unittest.mock @@ -14,7 +15,6 @@ class TestSLURM(unittest.TestCase): - def setUp(self): # Disable database interaction. test.TestProject.__attrs_post_init__ = unittest.mock.MagicMock() @@ -28,9 +28,11 @@ def tearDown(self): def test_script(self): script_path = local.path( - slurm.__save__(self.tmp_file, true, self.exp, [self.prj])) - - self.assertTrue(self.tmp_file == script_path, - msg="Generated file does not match temporary file.") - self.assertTrue(slurm.__verify__(self.tmp_file), - msg="Syntax check failed.") + slurm.__save__(self.tmp_file, true, self.exp, [self.prj]) + ) + + self.assertTrue( + self.tmp_file == script_path, + msg="Generated file does not match temporary file.", + ) + self.assertTrue(slurm.__verify__(self.tmp_file), msg="Syntax check failed.") diff --git a/tests/test_strategy_polyjit.py b/tests/test_strategy_polyjit.py index 84eaa4c10..79b892608 100644 --- a/tests/test_strategy_polyjit.py +++ b/tests/test_strategy_polyjit.py @@ -4,7 +4,6 @@ class TestPolyJITPackages(unittest.TestCase): - def test_package_defaults(self): packages = CFG["container"]["strategy"]["polyjit"]["packages"].value self.assertIsInstance(packages, list, msg="Not a list!") @@ -12,27 +11,21 @@ def test_package_defaults(self): def test_package_atoms_name(self): packages = CFG["container"]["strategy"]["polyjit"]["packages"].value for pkg in packages: - self.assertIn("name", - pkg, - msg="{0} lacks 'name' attribute".format(str(pkg))) + self.assertIn( + "name", pkg, msg="{0} lacks 'name' attribute".format(str(pkg)) + ) def test_package_atoms_env(self): packages = CFG["container"]["strategy"]["polyjit"]["packages"].value for pkg in packages: - self.assertIn("env", - pkg, - msg="{0} lacks 'env' attribute".format(str(pkg))) + self.assertIn("env", pkg, msg="{0} lacks 'env' attribute".format(str(pkg))) def test_package_atoms_use_is_list(self): packages = CFG["container"]["strategy"]["polyjit"]["packages"].value for pkg in packages: - self.assertIsInstance(pkg["env"], - dict, - msg='"env" attribute is not a dict') + self.assertIsInstance(pkg["env"], dict, msg='"env" attribute is not a dict') def test_package_atoms_name_is_str(self): packages = CFG["container"]["strategy"]["polyjit"]["packages"].value for pkg in packages: - self.assertIsInstance(pkg["name"], - str, - msg='"name" attribute is not a str') + self.assertIsInstance(pkg["name"], str, msg='"name" attribute is not a str') diff --git a/tests/test_unionfs.py b/tests/test_unionfs.py index 39f516c28..d92bf9b47 100644 --- a/tests/test_unionfs.py +++ b/tests/test_unionfs.py @@ -1,4 +1,4 @@ -""" Testing suite for the mounting process. """ +"""Testing suite for the mounting process.""" import tempfile import unittest @@ -7,7 +7,7 @@ from benchbuild.settings import CFG -__UNIONFS_ENABLED__ = bool(CFG['unionfs']['enable']) +__UNIONFS_ENABLED__ = bool(CFG["unionfs"]["enable"]) @unittest.skipIf(not __UNIONFS_ENABLED__, "Requires UnionFS to be enabled.") @@ -28,10 +28,10 @@ def tearDownClass(cls): cls.tmp_dir.delete() def test_build_dir(self): - """ Check if the needed build_dir exists. """ - build_dir = local.path(str(CFG['build_dir'])) + """Check if the needed build_dir exists.""" + build_dir = local.path(str(CFG["build_dir"])) self.assertTrue(build_dir.exists()) -if __name__ == 'main': +if __name__ == "main": unittest.main() diff --git a/tests/test_wrappers.py b/tests/test_wrappers.py index cf1586ebe..3ee532d41 100644 --- a/tests/test_wrappers.py +++ b/tests/test_wrappers.py @@ -1,4 +1,5 @@ """Test benchbuild's runtime wrappers.""" + import os import tempfile import unittest @@ -18,7 +19,7 @@ class EmptyProject(project.Project): DOMAIN = "debug" GROUP = "debug" SOURCE = [nosource()] - CONTAINER = declarative.ContainerImage().from_('benchbuild:alpine') + CONTAINER = declarative.ContainerImage().from_("benchbuild:alpine") def __attrs_post_init__(self): pass @@ -37,7 +38,6 @@ def run_tests(self): class WrapperTests(unittest.TestCase): - @classmethod def setUpClass(cls): cls.tmp_dir = tempfile.mkdtemp() @@ -54,7 +54,6 @@ def setUp(self): class RunCompiler(WrapperTests): - def test_create(self): with local.cwd(self.tmp_dir): cmd = compilers.cc(EmptyProject()) @@ -62,7 +61,6 @@ def test_create(self): class RunStatic(WrapperTests): - def test_create(self): with local.cwd(self.tmp_dir): cmd = wrappers.wrap(self.tmp_script, EmptyProject()) @@ -71,7 +69,6 @@ def test_create(self): class RunDynamic(WrapperTests): - def test_create(self): with local.cwd(self.tmp_dir): cmd = wrappers.wrap_dynamic(EmptyProject(), self.tmp_script) diff --git a/tests/ui/test_queries.py b/tests/ui/test_queries.py index d04813f32..7cbf2e4f8 100644 --- a/tests/ui/test_queries.py +++ b/tests/ui/test_queries.py @@ -1,4 +1,5 @@ """Test user interface utility methods.""" + import os import pytest diff --git a/uv.lock b/uv.lock new file mode 100644 index 000000000..c7bca0547 --- /dev/null +++ b/uv.lock @@ -0,0 +1,1577 @@ +version = 1 +revision = 2 +requires-python = ">=3.11" + +[[package]] +name = "alabaster" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/d9c74d0daf3f742840fd818d69cfae176fa332022fd44e3469487d5a9420/alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", size = 24210, upload-time = "2024-07-26T18:15:03.762Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929, upload-time = "2024-07-26T18:15:02.05Z" }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "babel" +version = "2.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" }, +] + +[[package]] +name = "benchbuild" +source = { editable = "." } +dependencies = [ + { name = "attrs" }, + { name = "dill" }, + { name = "jinja2" }, + { name = "parse" }, + { name = "pathos" }, + { name = "plumbum" }, + { name = "psutil" }, + { name = "psycopg2-binary" }, + { name = "pygit2" }, + { name = "pygtrie" }, + { name = "pyparsing" }, + { name = "pyyaml" }, + { name = "result" }, + { name = "rich" }, + { name = "schema" }, + { name = "six" }, + { name = "sqlalchemy" }, + { name = "typing-extensions" }, + { name = "virtualenv" }, +] + +[package.dev-dependencies] +dev = [ + { name = "codecov" }, + { name = "faker" }, + { name = "gitpython" }, + { name = "mock" }, + { name = "pre-commit" }, + { name = "pytest" }, + { name = "pytest-cov" }, + { name = "pytest-describe" }, + { name = "pytest-git" }, +] +docs = [ + { name = "linkify" }, + { name = "mkdocs" }, + { name = "myst-parser", extra = ["linkify"] }, + { name = "pymdown-extensions" }, + { name = "sphinx" }, + { name = "sphinx-autodoc-typehints" }, + { name = "sphinx-markdown" }, + { name = "sphinx-press-theme" }, + { name = "sphinxcontrib-programoutput" }, +] + +[package.metadata] +requires-dist = [ + { name = "attrs", specifier = ">=22" }, + { name = "dill", specifier = ">=0" }, + { name = "jinja2", specifier = ">=3" }, + { name = "parse", specifier = ">=1" }, + { name = "pathos", specifier = ">=0" }, + { name = "plumbum", specifier = ">=1" }, + { name = "psutil", specifier = ">=5" }, + { name = "psycopg2-binary", specifier = ">=2" }, + { name = "pygit2", specifier = ">=1.19" }, + { name = "pygtrie", specifier = ">=2" }, + { name = "pyparsing", specifier = ">=3" }, + { name = "pyyaml", specifier = ">=6.0" }, + { name = "result", specifier = ">=0" }, + { name = "rich", specifier = ">=12" }, + { name = "schema", specifier = ">=0" }, + { name = "six", specifier = ">=1.17.0" }, + { name = "sqlalchemy", specifier = ">=2" }, + { name = "typing-extensions", specifier = ">=4" }, + { name = "virtualenv", specifier = ">=20" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "codecov", specifier = ">=2.1.13" }, + { name = "faker", specifier = ">=37.4.2" }, + { name = "gitpython", specifier = ">=3.1.45" }, + { name = "mock", specifier = ">=5.2.0" }, + { name = "pre-commit", specifier = ">=4.2.0" }, + { name = "pytest", specifier = ">=8.4.1" }, + { name = "pytest-cov", specifier = ">=6.2.1" }, + { name = "pytest-describe", specifier = ">=2.2.0" }, + { name = "pytest-git", specifier = ">=1.8.0" }, +] +docs = [ + { name = "linkify", specifier = ">=1.4" }, + { name = "mkdocs", specifier = ">=1.6" }, + { name = "myst-parser", extras = ["linkify"], specifier = ">=4" }, + { name = "pymdown-extensions", specifier = ">=10.16" }, + { name = "sphinx", specifier = ">=8" }, + { name = "sphinx-autodoc-typehints", specifier = ">=3" }, + { name = "sphinx-markdown", specifier = ">=1" }, + { name = "sphinx-press-theme", specifier = ">=0.9" }, + { name = "sphinxcontrib-programoutput", specifier = ">=0.18" }, +] + +[[package]] +name = "certifi" +version = "2025.7.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/76/52c535bcebe74590f296d6c77c86dabf761c41980e1347a2422e4aa2ae41/certifi-2025.7.14.tar.gz", hash = "sha256:8ea99dbdfaaf2ba2f9bac77b9249ef62ec5218e7c2b2e903378ed5fccf765995", size = 163981, upload-time = "2025-07-14T03:29:28.449Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4f/52/34c6cf5bb9285074dc3531c437b3919e825d976fde097a7a73f79e726d03/certifi-2025.7.14-py3-none-any.whl", hash = "sha256:6b31f564a415d79ee77df69d757bb49a5bb53bd9f756cbbe24394ffd6fc1f4b2", size = 162722, upload-time = "2025-07-14T03:29:26.863Z" }, +] + +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794, upload-time = "2025-05-02T08:32:11.945Z" }, + { url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846, upload-time = "2025-05-02T08:32:13.946Z" }, + { url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350, upload-time = "2025-05-02T08:32:15.873Z" }, + { url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657, upload-time = "2025-05-02T08:32:17.283Z" }, + { url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260, upload-time = "2025-05-02T08:32:18.807Z" }, + { url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164, upload-time = "2025-05-02T08:32:20.333Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571, upload-time = "2025-05-02T08:32:21.86Z" }, + { url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952, upload-time = "2025-05-02T08:32:23.434Z" }, + { url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959, upload-time = "2025-05-02T08:32:24.993Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030, upload-time = "2025-05-02T08:32:26.435Z" }, + { url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015, upload-time = "2025-05-02T08:32:28.376Z" }, + { url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106, upload-time = "2025-05-02T08:32:30.281Z" }, + { url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402, upload-time = "2025-05-02T08:32:32.191Z" }, + { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" }, + { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" }, + { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" }, + { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" }, + { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" }, + { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" }, + { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" }, + { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" }, + { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" }, + { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" }, + { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" }, + { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" }, + { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" }, + { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" }, + { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" }, + { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" }, + { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" }, + { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" }, + { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" }, + { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" }, + { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" }, + { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" }, + { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" }, + { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" }, + { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" }, + { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, +] + +[[package]] +name = "click" +version = "8.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, +] + +[[package]] +name = "codecov" +version = "2.1.13" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2c/bb/594b26d2c85616be6195a64289c578662678afa4910cef2d3ce8417cf73e/codecov-2.1.13.tar.gz", hash = "sha256:2362b685633caeaf45b9951a9b76ce359cd3581dd515b430c6c3f5dfb4d92a8c", size = 21416, upload-time = "2023-04-17T23:11:39.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/02/18785edcdf6266cdd6c6dc7635f1cbeefd9a5b4c3bb8aff8bd681e9dd095/codecov-2.1.13-py2.py3-none-any.whl", hash = "sha256:c2ca5e51bba9ebb43644c43d0690148a55086f7f5e6fd36170858fa4206744d5", size = 16512, upload-time = "2023-04-17T23:11:37.344Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.10.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/87/0e/66dbd4c6a7f0758a8d18044c048779ba21fb94856e1edcf764bd5403e710/coverage-7.10.1.tar.gz", hash = "sha256:ae2b4856f29ddfe827106794f3589949a57da6f0d38ab01e24ec35107979ba57", size = 819938, upload-time = "2025-07-27T14:13:39.045Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/8e/ef088112bd1b26e2aa931ee186992b3e42c222c64f33e381432c8ee52aae/coverage-7.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b45e2f9d5b0b5c1977cb4feb5f594be60eb121106f8900348e29331f553a726f", size = 214747, upload-time = "2025-07-27T14:11:18.217Z" }, + { url = "https://files.pythonhosted.org/packages/2d/76/a1e46f3c6e0897758eb43af88bb3c763cb005f4950769f7b553e22aa5f89/coverage-7.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a7a4d74cb0f5e3334f9aa26af7016ddb94fb4bfa11b4a573d8e98ecba8c34f1", size = 215128, upload-time = "2025-07-27T14:11:19.706Z" }, + { url = "https://files.pythonhosted.org/packages/78/4d/903bafb371a8c887826ecc30d3977b65dfad0e1e66aa61b7e173de0828b0/coverage-7.10.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d4b0aab55ad60ead26159ff12b538c85fbab731a5e3411c642b46c3525863437", size = 245140, upload-time = "2025-07-27T14:11:21.261Z" }, + { url = "https://files.pythonhosted.org/packages/55/f1/1f8f09536f38394a8698dd08a0e9608a512eacee1d3b771e2d06397f77bf/coverage-7.10.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:dcc93488c9ebd229be6ee1f0d9aad90da97b33ad7e2912f5495804d78a3cd6b7", size = 246977, upload-time = "2025-07-27T14:11:23.15Z" }, + { url = "https://files.pythonhosted.org/packages/57/cc/ed6bbc5a3bdb36ae1bca900bbbfdcb23b260ef2767a7b2dab38b92f61adf/coverage-7.10.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aa309df995d020f3438407081b51ff527171cca6772b33cf8f85344b8b4b8770", size = 249140, upload-time = "2025-07-27T14:11:24.743Z" }, + { url = "https://files.pythonhosted.org/packages/10/f5/e881ade2d8e291b60fa1d93d6d736107e940144d80d21a0d4999cff3642f/coverage-7.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cfb8b9d8855c8608f9747602a48ab525b1d320ecf0113994f6df23160af68262", size = 246869, upload-time = "2025-07-27T14:11:26.156Z" }, + { url = "https://files.pythonhosted.org/packages/53/b9/6a5665cb8996e3cd341d184bb11e2a8edf01d8dadcf44eb1e742186cf243/coverage-7.10.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:320d86da829b012982b414c7cdda65f5d358d63f764e0e4e54b33097646f39a3", size = 244899, upload-time = "2025-07-27T14:11:27.622Z" }, + { url = "https://files.pythonhosted.org/packages/27/11/24156776709c4e25bf8a33d6bb2ece9a9067186ddac19990f6560a7f8130/coverage-7.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dc60ddd483c556590da1d9482a4518292eec36dd0e1e8496966759a1f282bcd0", size = 245507, upload-time = "2025-07-27T14:11:29.544Z" }, + { url = "https://files.pythonhosted.org/packages/43/db/a6f0340b7d6802a79928659c9a32bc778ea420e87a61b568d68ac36d45a8/coverage-7.10.1-cp311-cp311-win32.whl", hash = "sha256:4fcfe294f95b44e4754da5b58be750396f2b1caca8f9a0e78588e3ef85f8b8be", size = 217167, upload-time = "2025-07-27T14:11:31.349Z" }, + { url = "https://files.pythonhosted.org/packages/f5/6f/1990eb4fd05cea4cfabdf1d587a997ac5f9a8bee883443a1d519a2a848c9/coverage-7.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:efa23166da3fe2915f8ab452dde40319ac84dc357f635737174a08dbd912980c", size = 218054, upload-time = "2025-07-27T14:11:33.202Z" }, + { url = "https://files.pythonhosted.org/packages/b4/4d/5e061d6020251b20e9b4303bb0b7900083a1a384ec4e5db326336c1c4abd/coverage-7.10.1-cp311-cp311-win_arm64.whl", hash = "sha256:d12b15a8c3759e2bb580ffa423ae54be4f184cf23beffcbd641f4fe6e1584293", size = 216483, upload-time = "2025-07-27T14:11:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/a5/3f/b051feeb292400bd22d071fdf933b3ad389a8cef5c80c7866ed0c7414b9e/coverage-7.10.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6b7dc7f0a75a7eaa4584e5843c873c561b12602439d2351ee28c7478186c4da4", size = 214934, upload-time = "2025-07-27T14:11:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/f8/e4/a61b27d5c4c2d185bdfb0bfe9d15ab4ac4f0073032665544507429ae60eb/coverage-7.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:607f82389f0ecafc565813aa201a5cade04f897603750028dd660fb01797265e", size = 215173, upload-time = "2025-07-27T14:11:38.005Z" }, + { url = "https://files.pythonhosted.org/packages/8a/01/40a6ee05b60d02d0bc53742ad4966e39dccd450aafb48c535a64390a3552/coverage-7.10.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f7da31a1ba31f1c1d4d5044b7c5813878adae1f3af8f4052d679cc493c7328f4", size = 246190, upload-time = "2025-07-27T14:11:39.887Z" }, + { url = "https://files.pythonhosted.org/packages/11/ef/a28d64d702eb583c377255047281305dc5a5cfbfb0ee36e721f78255adb6/coverage-7.10.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:51fe93f3fe4f5d8483d51072fddc65e717a175490804e1942c975a68e04bf97a", size = 248618, upload-time = "2025-07-27T14:11:41.841Z" }, + { url = "https://files.pythonhosted.org/packages/6a/ad/73d018bb0c8317725370c79d69b5c6e0257df84a3b9b781bda27a438a3be/coverage-7.10.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3e59d00830da411a1feef6ac828b90bbf74c9b6a8e87b8ca37964925bba76dbe", size = 250081, upload-time = "2025-07-27T14:11:43.705Z" }, + { url = "https://files.pythonhosted.org/packages/2d/dd/496adfbbb4503ebca5d5b2de8bed5ec00c0a76558ffc5b834fd404166bc9/coverage-7.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:924563481c27941229cb4e16eefacc35da28563e80791b3ddc5597b062a5c386", size = 247990, upload-time = "2025-07-27T14:11:45.244Z" }, + { url = "https://files.pythonhosted.org/packages/18/3c/a9331a7982facfac0d98a4a87b36ae666fe4257d0f00961a3a9ef73e015d/coverage-7.10.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ca79146ee421b259f8131f153102220b84d1a5e6fb9c8aed13b3badfd1796de6", size = 246191, upload-time = "2025-07-27T14:11:47.093Z" }, + { url = "https://files.pythonhosted.org/packages/62/0c/75345895013b83f7afe92ec595e15a9a525ede17491677ceebb2ba5c3d85/coverage-7.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2b225a06d227f23f386fdc0eab471506d9e644be699424814acc7d114595495f", size = 247400, upload-time = "2025-07-27T14:11:48.643Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a9/98b268cfc5619ef9df1d5d34fee408ecb1542d9fd43d467e5c2f28668cd4/coverage-7.10.1-cp312-cp312-win32.whl", hash = "sha256:5ba9a8770effec5baaaab1567be916c87d8eea0c9ad11253722d86874d885eca", size = 217338, upload-time = "2025-07-27T14:11:50.258Z" }, + { url = "https://files.pythonhosted.org/packages/fe/31/22a5440e4d1451f253c5cd69fdcead65e92ef08cd4ec237b8756dc0b20a7/coverage-7.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:9eb245a8d8dd0ad73b4062135a251ec55086fbc2c42e0eb9725a9b553fba18a3", size = 218125, upload-time = "2025-07-27T14:11:52.034Z" }, + { url = "https://files.pythonhosted.org/packages/d6/2b/40d9f0ce7ee839f08a43c5bfc9d05cec28aaa7c9785837247f96cbe490b9/coverage-7.10.1-cp312-cp312-win_arm64.whl", hash = "sha256:7718060dd4434cc719803a5e526838a5d66e4efa5dc46d2b25c21965a9c6fcc4", size = 216523, upload-time = "2025-07-27T14:11:53.965Z" }, + { url = "https://files.pythonhosted.org/packages/ef/72/135ff5fef09b1ffe78dbe6fcf1e16b2e564cd35faeacf3d63d60d887f12d/coverage-7.10.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ebb08d0867c5a25dffa4823377292a0ffd7aaafb218b5d4e2e106378b1061e39", size = 214960, upload-time = "2025-07-27T14:11:55.959Z" }, + { url = "https://files.pythonhosted.org/packages/b1/aa/73a5d1a6fc08ca709a8177825616aa95ee6bf34d522517c2595484a3e6c9/coverage-7.10.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f32a95a83c2e17422f67af922a89422cd24c6fa94041f083dd0bb4f6057d0bc7", size = 215220, upload-time = "2025-07-27T14:11:57.899Z" }, + { url = "https://files.pythonhosted.org/packages/8d/40/3124fdd45ed3772a42fc73ca41c091699b38a2c3bd4f9cb564162378e8b6/coverage-7.10.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c4c746d11c8aba4b9f58ca8bfc6fbfd0da4efe7960ae5540d1a1b13655ee8892", size = 245772, upload-time = "2025-07-27T14:12:00.422Z" }, + { url = "https://files.pythonhosted.org/packages/42/62/a77b254822efa8c12ad59e8039f2bc3df56dc162ebda55e1943e35ba31a5/coverage-7.10.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7f39edd52c23e5c7ed94e0e4bf088928029edf86ef10b95413e5ea670c5e92d7", size = 248116, upload-time = "2025-07-27T14:12:03.099Z" }, + { url = "https://files.pythonhosted.org/packages/1d/01/8101f062f472a3a6205b458d18ef0444a63ae5d36a8a5ed5dd0f6167f4db/coverage-7.10.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab6e19b684981d0cd968906e293d5628e89faacb27977c92f3600b201926b994", size = 249554, upload-time = "2025-07-27T14:12:04.668Z" }, + { url = "https://files.pythonhosted.org/packages/8f/7b/e51bc61573e71ff7275a4f167aecbd16cb010aefdf54bcd8b0a133391263/coverage-7.10.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5121d8cf0eacb16133501455d216bb5f99899ae2f52d394fe45d59229e6611d0", size = 247766, upload-time = "2025-07-27T14:12:06.234Z" }, + { url = "https://files.pythonhosted.org/packages/4b/71/1c96d66a51d4204a9d6d12df53c4071d87e110941a2a1fe94693192262f5/coverage-7.10.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:df1c742ca6f46a6f6cbcaef9ac694dc2cb1260d30a6a2f5c68c5f5bcfee1cfd7", size = 245735, upload-time = "2025-07-27T14:12:08.305Z" }, + { url = "https://files.pythonhosted.org/packages/13/d5/efbc2ac4d35ae2f22ef6df2ca084c60e13bd9378be68655e3268c80349ab/coverage-7.10.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:40f9a38676f9c073bf4b9194707aa1eb97dca0e22cc3766d83879d72500132c7", size = 247118, upload-time = "2025-07-27T14:12:09.903Z" }, + { url = "https://files.pythonhosted.org/packages/d1/22/073848352bec28ca65f2b6816b892fcf9a31abbef07b868487ad15dd55f1/coverage-7.10.1-cp313-cp313-win32.whl", hash = "sha256:2348631f049e884839553b9974f0821d39241c6ffb01a418efce434f7eba0fe7", size = 217381, upload-time = "2025-07-27T14:12:11.535Z" }, + { url = "https://files.pythonhosted.org/packages/b7/df/df6a0ff33b042f000089bd11b6bb034bab073e2ab64a56e78ed882cba55d/coverage-7.10.1-cp313-cp313-win_amd64.whl", hash = "sha256:4072b31361b0d6d23f750c524f694e1a417c1220a30d3ef02741eed28520c48e", size = 218152, upload-time = "2025-07-27T14:12:13.182Z" }, + { url = "https://files.pythonhosted.org/packages/30/e3/5085ca849a40ed6b47cdb8f65471c2f754e19390b5a12fa8abd25cbfaa8f/coverage-7.10.1-cp313-cp313-win_arm64.whl", hash = "sha256:3e31dfb8271937cab9425f19259b1b1d1f556790e98eb266009e7a61d337b6d4", size = 216559, upload-time = "2025-07-27T14:12:14.807Z" }, + { url = "https://files.pythonhosted.org/packages/cc/93/58714efbfdeb547909feaabe1d67b2bdd59f0597060271b9c548d5efb529/coverage-7.10.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1c4f679c6b573a5257af6012f167a45be4c749c9925fd44d5178fd641ad8bf72", size = 215677, upload-time = "2025-07-27T14:12:16.68Z" }, + { url = "https://files.pythonhosted.org/packages/c0/0c/18eaa5897e7e8cb3f8c45e563e23e8a85686b4585e29d53cacb6bc9cb340/coverage-7.10.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:871ebe8143da284bd77b84a9136200bd638be253618765d21a1fce71006d94af", size = 215899, upload-time = "2025-07-27T14:12:18.758Z" }, + { url = "https://files.pythonhosted.org/packages/84/c1/9d1affacc3c75b5a184c140377701bbf14fc94619367f07a269cd9e4fed6/coverage-7.10.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:998c4751dabf7d29b30594af416e4bf5091f11f92a8d88eb1512c7ba136d1ed7", size = 257140, upload-time = "2025-07-27T14:12:20.357Z" }, + { url = "https://files.pythonhosted.org/packages/3d/0f/339bc6b8fa968c346df346068cca1f24bdea2ddfa93bb3dc2e7749730962/coverage-7.10.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:780f750a25e7749d0af6b3631759c2c14f45de209f3faaa2398312d1c7a22759", size = 259005, upload-time = "2025-07-27T14:12:22.007Z" }, + { url = "https://files.pythonhosted.org/packages/c8/22/89390864b92ea7c909079939b71baba7e5b42a76bf327c1d615bd829ba57/coverage-7.10.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:590bdba9445df4763bdbebc928d8182f094c1f3947a8dc0fc82ef014dbdd8324", size = 261143, upload-time = "2025-07-27T14:12:23.746Z" }, + { url = "https://files.pythonhosted.org/packages/2c/56/3d04d89017c0c41c7a71bd69b29699d919b6bbf2649b8b2091240b97dd6a/coverage-7.10.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b2df80cb6a2af86d300e70acb82e9b79dab2c1e6971e44b78dbfc1a1e736b53", size = 258735, upload-time = "2025-07-27T14:12:25.73Z" }, + { url = "https://files.pythonhosted.org/packages/cb/40/312252c8afa5ca781063a09d931f4b9409dc91526cd0b5a2b84143ffafa2/coverage-7.10.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d6a558c2725bfb6337bf57c1cd366c13798bfd3bfc9e3dd1f4a6f6fc95a4605f", size = 256871, upload-time = "2025-07-27T14:12:27.767Z" }, + { url = "https://files.pythonhosted.org/packages/1f/2b/564947d5dede068215aaddb9e05638aeac079685101462218229ddea9113/coverage-7.10.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e6150d167f32f2a54690e572e0a4c90296fb000a18e9b26ab81a6489e24e78dd", size = 257692, upload-time = "2025-07-27T14:12:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/93/1b/c8a867ade85cb26d802aea2209b9c2c80613b9c122baa8c8ecea6799648f/coverage-7.10.1-cp313-cp313t-win32.whl", hash = "sha256:d946a0c067aa88be4a593aad1236493313bafaa27e2a2080bfe88db827972f3c", size = 218059, upload-time = "2025-07-27T14:12:31.076Z" }, + { url = "https://files.pythonhosted.org/packages/a1/fe/cd4ab40570ae83a516bf5e754ea4388aeedd48e660e40c50b7713ed4f930/coverage-7.10.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e37c72eaccdd5ed1130c67a92ad38f5b2af66eeff7b0abe29534225db2ef7b18", size = 219150, upload-time = "2025-07-27T14:12:32.746Z" }, + { url = "https://files.pythonhosted.org/packages/8d/16/6e5ed5854be6d70d0c39e9cb9dd2449f2c8c34455534c32c1a508c7dbdb5/coverage-7.10.1-cp313-cp313t-win_arm64.whl", hash = "sha256:89ec0ffc215c590c732918c95cd02b55c7d0f569d76b90bb1a5e78aa340618e4", size = 217014, upload-time = "2025-07-27T14:12:34.406Z" }, + { url = "https://files.pythonhosted.org/packages/54/8e/6d0bfe9c3d7121cf936c5f8b03e8c3da1484fb801703127dba20fb8bd3c7/coverage-7.10.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:166d89c57e877e93d8827dac32cedae6b0277ca684c6511497311249f35a280c", size = 214951, upload-time = "2025-07-27T14:12:36.069Z" }, + { url = "https://files.pythonhosted.org/packages/f2/29/e3e51a8c653cf2174c60532aafeb5065cea0911403fa144c9abe39790308/coverage-7.10.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:bed4a2341b33cd1a7d9ffc47df4a78ee61d3416d43b4adc9e18b7d266650b83e", size = 215229, upload-time = "2025-07-27T14:12:37.759Z" }, + { url = "https://files.pythonhosted.org/packages/e0/59/3c972080b2fa18b6c4510201f6d4dc87159d450627d062cd9ad051134062/coverage-7.10.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ddca1e4f5f4c67980533df01430184c19b5359900e080248bbf4ed6789584d8b", size = 245738, upload-time = "2025-07-27T14:12:39.453Z" }, + { url = "https://files.pythonhosted.org/packages/2e/04/fc0d99d3f809452654e958e1788454f6e27b34e43f8f8598191c8ad13537/coverage-7.10.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:37b69226001d8b7de7126cad7366b0778d36777e4d788c66991455ba817c5b41", size = 248045, upload-time = "2025-07-27T14:12:41.387Z" }, + { url = "https://files.pythonhosted.org/packages/5e/2e/afcbf599e77e0dfbf4c97197747250d13d397d27e185b93987d9eaac053d/coverage-7.10.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2f22102197bcb1722691296f9e589f02b616f874e54a209284dd7b9294b0b7f", size = 249666, upload-time = "2025-07-27T14:12:43.056Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ae/bc47f7f8ecb7a06cbae2bf86a6fa20f479dd902bc80f57cff7730438059d/coverage-7.10.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1e0c768b0f9ac5839dac5cf88992a4bb459e488ee8a1f8489af4cb33b1af00f1", size = 247692, upload-time = "2025-07-27T14:12:44.83Z" }, + { url = "https://files.pythonhosted.org/packages/b6/26/cbfa3092d31ccba8ba7647e4d25753263e818b4547eba446b113d7d1efdf/coverage-7.10.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:991196702d5e0b120a8fef2664e1b9c333a81d36d5f6bcf6b225c0cf8b0451a2", size = 245536, upload-time = "2025-07-27T14:12:46.527Z" }, + { url = "https://files.pythonhosted.org/packages/56/77/9c68e92500e6a1c83d024a70eadcc9a173f21aadd73c4675fe64c9c43fdf/coverage-7.10.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ae8e59e5f4fd85d6ad34c2bb9d74037b5b11be072b8b7e9986beb11f957573d4", size = 246954, upload-time = "2025-07-27T14:12:49.279Z" }, + { url = "https://files.pythonhosted.org/packages/7f/a5/ba96671c5a669672aacd9877a5987c8551501b602827b4e84256da2a30a7/coverage-7.10.1-cp314-cp314-win32.whl", hash = "sha256:042125c89cf74a074984002e165d61fe0e31c7bd40ebb4bbebf07939b5924613", size = 217616, upload-time = "2025-07-27T14:12:51.214Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3c/e1e1eb95fc1585f15a410208c4795db24a948e04d9bde818fe4eb893bc85/coverage-7.10.1-cp314-cp314-win_amd64.whl", hash = "sha256:a22c3bfe09f7a530e2c94c87ff7af867259c91bef87ed2089cd69b783af7b84e", size = 218412, upload-time = "2025-07-27T14:12:53.429Z" }, + { url = "https://files.pythonhosted.org/packages/b0/85/7e1e5be2cb966cba95566ba702b13a572ca744fbb3779df9888213762d67/coverage-7.10.1-cp314-cp314-win_arm64.whl", hash = "sha256:ee6be07af68d9c4fca4027c70cea0c31a0f1bc9cb464ff3c84a1f916bf82e652", size = 216776, upload-time = "2025-07-27T14:12:55.482Z" }, + { url = "https://files.pythonhosted.org/packages/62/0f/5bb8f29923141cca8560fe2217679caf4e0db643872c1945ac7d8748c2a7/coverage-7.10.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d24fb3c0c8ff0d517c5ca5de7cf3994a4cd559cde0315201511dbfa7ab528894", size = 215698, upload-time = "2025-07-27T14:12:57.225Z" }, + { url = "https://files.pythonhosted.org/packages/80/29/547038ffa4e8e4d9e82f7dfc6d152f75fcdc0af146913f0ba03875211f03/coverage-7.10.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1217a54cfd79be20512a67ca81c7da3f2163f51bbfd188aab91054df012154f5", size = 215902, upload-time = "2025-07-27T14:12:59.071Z" }, + { url = "https://files.pythonhosted.org/packages/e1/8a/7aaa8fbfaed900147987a424e112af2e7790e1ac9cd92601e5bd4e1ba60a/coverage-7.10.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:51f30da7a52c009667e02f125737229d7d8044ad84b79db454308033a7808ab2", size = 257230, upload-time = "2025-07-27T14:13:01.248Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1d/c252b5ffac44294e23a0d79dd5acf51749b39795ccc898faeabf7bee903f/coverage-7.10.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ed3718c757c82d920f1c94089066225ca2ad7f00bb904cb72b1c39ebdd906ccb", size = 259194, upload-time = "2025-07-27T14:13:03.247Z" }, + { url = "https://files.pythonhosted.org/packages/16/ad/6c8d9f83d08f3bac2e7507534d0c48d1a4f52c18e6f94919d364edbdfa8f/coverage-7.10.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc452481e124a819ced0c25412ea2e144269ef2f2534b862d9f6a9dae4bda17b", size = 261316, upload-time = "2025-07-27T14:13:04.957Z" }, + { url = "https://files.pythonhosted.org/packages/d6/4e/f9bbf3a36c061e2e0e0f78369c006d66416561a33d2bee63345aee8ee65e/coverage-7.10.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:9d6f494c307e5cb9b1e052ec1a471060f1dea092c8116e642e7a23e79d9388ea", size = 258794, upload-time = "2025-07-27T14:13:06.715Z" }, + { url = "https://files.pythonhosted.org/packages/87/82/e600bbe78eb2cb0541751d03cef9314bcd0897e8eea156219c39b685f869/coverage-7.10.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:fc0e46d86905ddd16b85991f1f4919028092b4e511689bbdaff0876bd8aab3dd", size = 256869, upload-time = "2025-07-27T14:13:08.933Z" }, + { url = "https://files.pythonhosted.org/packages/ce/5d/2fc9a9236c5268f68ac011d97cd3a5ad16cc420535369bedbda659fdd9b7/coverage-7.10.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:80b9ccd82e30038b61fc9a692a8dc4801504689651b281ed9109f10cc9fe8b4d", size = 257765, upload-time = "2025-07-27T14:13:10.778Z" }, + { url = "https://files.pythonhosted.org/packages/8a/05/b4e00b2bd48a2dc8e1c7d2aea7455f40af2e36484ab2ef06deb85883e9fe/coverage-7.10.1-cp314-cp314t-win32.whl", hash = "sha256:e58991a2b213417285ec866d3cd32db17a6a88061a985dbb7e8e8f13af429c47", size = 218420, upload-time = "2025-07-27T14:13:12.882Z" }, + { url = "https://files.pythonhosted.org/packages/77/fb/d21d05f33ea27ece327422240e69654b5932b0b29e7fbc40fbab3cf199bf/coverage-7.10.1-cp314-cp314t-win_amd64.whl", hash = "sha256:e88dd71e4ecbc49d9d57d064117462c43f40a21a1383507811cf834a4a620651", size = 219536, upload-time = "2025-07-27T14:13:14.718Z" }, + { url = "https://files.pythonhosted.org/packages/a6/68/7fea94b141281ed8be3d1d5c4319a97f2befc3e487ce33657fc64db2c45e/coverage-7.10.1-cp314-cp314t-win_arm64.whl", hash = "sha256:1aadfb06a30c62c2eb82322171fe1f7c288c80ca4156d46af0ca039052814bab", size = 217190, upload-time = "2025-07-27T14:13:16.85Z" }, + { url = "https://files.pythonhosted.org/packages/0f/64/922899cff2c0fd3496be83fa8b81230f5a8d82a2ad30f98370b133c2c83b/coverage-7.10.1-py3-none-any.whl", hash = "sha256:fa2a258aa6bf188eb9a8948f7102a83da7c430a0dce918dbd8b60ef8fcb772d7", size = 206597, upload-time = "2025-07-27T14:13:37.221Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + +[[package]] +name = "dill" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/12/80/630b4b88364e9a8c8c5797f4602d0f76ef820909ee32f0bacb9f90654042/dill-0.4.0.tar.gz", hash = "sha256:0633f1d2df477324f53a895b02c901fb961bdbf65a17122586ea7019292cbcf0", size = 186976, upload-time = "2025-04-16T00:41:48.867Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/50/3d/9373ad9c56321fdab5b41197068e1d8c25883b3fea29dd361f9b55116869/dill-0.4.0-py3-none-any.whl", hash = "sha256:44f54bf6412c2c8464c14e8243eb163690a9800dbe2c367330883b19c7561049", size = 119668, upload-time = "2025-04-16T00:41:47.671Z" }, +] + +[[package]] +name = "distlib" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, +] + +[[package]] +name = "docutils" +version = "0.21.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444, upload-time = "2024-04-23T18:57:18.24Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408, upload-time = "2024-04-23T18:57:14.835Z" }, +] + +[[package]] +name = "execnet" +version = "2.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/ff/b4c0dc78fbe20c3e59c0c7334de0c27eb4001a2b2017999af398bf730817/execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3", size = 166524, upload-time = "2024-04-08T09:04:19.245Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/09/2aea36ff60d16dd8879bdb2f5b3ee0ba8d08cbbdcdfe870e695ce3784385/execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc", size = 40612, upload-time = "2024-04-08T09:04:17.414Z" }, +] + +[[package]] +name = "faker" +version = "37.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tzdata" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/65/95/da573e055608e180086e2ac3208f8c15d8b44220912f565a9821b9bff33a/faker-37.4.2.tar.gz", hash = "sha256:8e281bbaea30e5658895b8bea21cc50d27aaf3a43db3f2694409ca5701c56b0a", size = 1902890, upload-time = "2025-07-15T16:38:24.803Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/1c/b909a055be556c11f13cf058cfa0e152f9754d803ff3694a937efe300709/faker-37.4.2-py3-none-any.whl", hash = "sha256:b70ed1af57bfe988cbcd0afd95f4768c51eaf4e1ce8a30962e127ac5c139c93f", size = 1943179, upload-time = "2025-07-15T16:38:23.053Z" }, +] + +[[package]] +name = "filelock" +version = "3.18.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075, upload-time = "2025-03-14T07:11:40.47Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload-time = "2025-03-14T07:11:39.145Z" }, +] + +[[package]] +name = "ghp-import" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "python-dateutil" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d9/29/d40217cbe2f6b1359e00c6c307bb3fc876ba74068cbab3dde77f03ca0dc4/ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343", size = 10943, upload-time = "2022-05-02T15:47:16.11Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/ec/67fbef5d497f86283db54c22eec6f6140243aae73265799baaaa19cd17fb/ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619", size = 11034, upload-time = "2022-05-02T15:47:14.552Z" }, +] + +[[package]] +name = "gitdb" +version = "4.0.12" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "smmap" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/72/94/63b0fc47eb32792c7ba1fe1b694daec9a63620db1e313033d18140c2320a/gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571", size = 394684, upload-time = "2025-01-02T07:20:46.413Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/61/5c78b91c3143ed5c14207f463aecfc8f9dbb5092fb2869baf37c273b2705/gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf", size = 62794, upload-time = "2025-01-02T07:20:43.624Z" }, +] + +[[package]] +name = "gitpython" +version = "3.1.45" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "gitdb" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9a/c8/dd58967d119baab745caec2f9d853297cec1989ec1d63f677d3880632b88/gitpython-3.1.45.tar.gz", hash = "sha256:85b0ee964ceddf211c41b9f27a49086010a190fd8132a24e21f362a4b36a791c", size = 215076, upload-time = "2025-07-24T03:45:54.871Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/61/d4b89fec821f72385526e1b9d9a3a0385dda4a72b206d28049e2c7cd39b8/gitpython-3.1.45-py3-none-any.whl", hash = "sha256:8908cb2e02fb3b93b7eb0f2827125cb699869470432cc885f019b8fd0fccff77", size = 208168, upload-time = "2025-07-24T03:45:52.517Z" }, +] + +[[package]] +name = "greenlet" +version = "3.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/92/bb85bd6e80148a4d2e0c59f7c0c2891029f8fd510183afc7d8d2feeed9b6/greenlet-3.2.3.tar.gz", hash = "sha256:8b0dd8ae4c0d6f5e54ee55ba935eeb3d735a9b58a8a1e5b5cbab64e01a39f365", size = 185752, upload-time = "2025-06-05T16:16:09.955Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/2e/d4fcb2978f826358b673f779f78fa8a32ee37df11920dc2bb5589cbeecef/greenlet-3.2.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:784ae58bba89fa1fa5733d170d42486580cab9decda3484779f4759345b29822", size = 270219, upload-time = "2025-06-05T16:10:10.414Z" }, + { url = "https://files.pythonhosted.org/packages/16/24/929f853e0202130e4fe163bc1d05a671ce8dcd604f790e14896adac43a52/greenlet-3.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0921ac4ea42a5315d3446120ad48f90c3a6b9bb93dd9b3cf4e4d84a66e42de83", size = 630383, upload-time = "2025-06-05T16:38:51.785Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b2/0320715eb61ae70c25ceca2f1d5ae620477d246692d9cc284c13242ec31c/greenlet-3.2.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d2971d93bb99e05f8c2c0c2f4aa9484a18d98c4c3bd3c62b65b7e6ae33dfcfaf", size = 642422, upload-time = "2025-06-05T16:41:35.259Z" }, + { url = "https://files.pythonhosted.org/packages/bd/49/445fd1a210f4747fedf77615d941444349c6a3a4a1135bba9701337cd966/greenlet-3.2.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c667c0bf9d406b77a15c924ef3285e1e05250948001220368e039b6aa5b5034b", size = 638375, upload-time = "2025-06-05T16:48:18.235Z" }, + { url = "https://files.pythonhosted.org/packages/7e/c8/ca19760cf6eae75fa8dc32b487e963d863b3ee04a7637da77b616703bc37/greenlet-3.2.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:592c12fb1165be74592f5de0d70f82bc5ba552ac44800d632214b76089945147", size = 637627, upload-time = "2025-06-05T16:13:02.858Z" }, + { url = "https://files.pythonhosted.org/packages/65/89/77acf9e3da38e9bcfca881e43b02ed467c1dedc387021fc4d9bd9928afb8/greenlet-3.2.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29e184536ba333003540790ba29829ac14bb645514fbd7e32af331e8202a62a5", size = 585502, upload-time = "2025-06-05T16:12:49.642Z" }, + { url = "https://files.pythonhosted.org/packages/97/c6/ae244d7c95b23b7130136e07a9cc5aadd60d59b5951180dc7dc7e8edaba7/greenlet-3.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:93c0bb79844a367782ec4f429d07589417052e621aa39a5ac1fb99c5aa308edc", size = 1114498, upload-time = "2025-06-05T16:36:46.598Z" }, + { url = "https://files.pythonhosted.org/packages/89/5f/b16dec0cbfd3070658e0d744487919740c6d45eb90946f6787689a7efbce/greenlet-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:751261fc5ad7b6705f5f76726567375bb2104a059454e0226e1eef6c756748ba", size = 1139977, upload-time = "2025-06-05T16:12:38.262Z" }, + { url = "https://files.pythonhosted.org/packages/66/77/d48fb441b5a71125bcac042fc5b1494c806ccb9a1432ecaa421e72157f77/greenlet-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:83a8761c75312361aa2b5b903b79da97f13f556164a7dd2d5448655425bd4c34", size = 297017, upload-time = "2025-06-05T16:25:05.225Z" }, + { url = "https://files.pythonhosted.org/packages/f3/94/ad0d435f7c48debe960c53b8f60fb41c2026b1d0fa4a99a1cb17c3461e09/greenlet-3.2.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:25ad29caed5783d4bd7a85c9251c651696164622494c00802a139c00d639242d", size = 271992, upload-time = "2025-06-05T16:11:23.467Z" }, + { url = "https://files.pythonhosted.org/packages/93/5d/7c27cf4d003d6e77749d299c7c8f5fd50b4f251647b5c2e97e1f20da0ab5/greenlet-3.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:88cd97bf37fe24a6710ec6a3a7799f3f81d9cd33317dcf565ff9950c83f55e0b", size = 638820, upload-time = "2025-06-05T16:38:52.882Z" }, + { url = "https://files.pythonhosted.org/packages/c6/7e/807e1e9be07a125bb4c169144937910bf59b9d2f6d931578e57f0bce0ae2/greenlet-3.2.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:baeedccca94880d2f5666b4fa16fc20ef50ba1ee353ee2d7092b383a243b0b0d", size = 653046, upload-time = "2025-06-05T16:41:36.343Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ab/158c1a4ea1068bdbc78dba5a3de57e4c7aeb4e7fa034320ea94c688bfb61/greenlet-3.2.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:be52af4b6292baecfa0f397f3edb3c6092ce071b499dd6fe292c9ac9f2c8f264", size = 647701, upload-time = "2025-06-05T16:48:19.604Z" }, + { url = "https://files.pythonhosted.org/packages/cc/0d/93729068259b550d6a0288da4ff72b86ed05626eaf1eb7c0d3466a2571de/greenlet-3.2.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0cc73378150b8b78b0c9fe2ce56e166695e67478550769536a6742dca3651688", size = 649747, upload-time = "2025-06-05T16:13:04.628Z" }, + { url = "https://files.pythonhosted.org/packages/f6/f6/c82ac1851c60851302d8581680573245c8fc300253fc1ff741ae74a6c24d/greenlet-3.2.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:706d016a03e78df129f68c4c9b4c4f963f7d73534e48a24f5f5a7101ed13dbbb", size = 605461, upload-time = "2025-06-05T16:12:50.792Z" }, + { url = "https://files.pythonhosted.org/packages/98/82/d022cf25ca39cf1200650fc58c52af32c90f80479c25d1cbf57980ec3065/greenlet-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:419e60f80709510c343c57b4bb5a339d8767bf9aef9b8ce43f4f143240f88b7c", size = 1121190, upload-time = "2025-06-05T16:36:48.59Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e1/25297f70717abe8104c20ecf7af0a5b82d2f5a980eb1ac79f65654799f9f/greenlet-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:93d48533fade144203816783373f27a97e4193177ebaaf0fc396db19e5d61163", size = 1149055, upload-time = "2025-06-05T16:12:40.457Z" }, + { url = "https://files.pythonhosted.org/packages/1f/8f/8f9e56c5e82eb2c26e8cde787962e66494312dc8cb261c460e1f3a9c88bc/greenlet-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:7454d37c740bb27bdeddfc3f358f26956a07d5220818ceb467a483197d84f849", size = 297817, upload-time = "2025-06-05T16:29:49.244Z" }, + { url = "https://files.pythonhosted.org/packages/b1/cf/f5c0b23309070ae93de75c90d29300751a5aacefc0a3ed1b1d8edb28f08b/greenlet-3.2.3-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:500b8689aa9dd1ab26872a34084503aeddefcb438e2e7317b89b11eaea1901ad", size = 270732, upload-time = "2025-06-05T16:10:08.26Z" }, + { url = "https://files.pythonhosted.org/packages/48/ae/91a957ba60482d3fecf9be49bc3948f341d706b52ddb9d83a70d42abd498/greenlet-3.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a07d3472c2a93117af3b0136f246b2833fdc0b542d4a9799ae5f41c28323faef", size = 639033, upload-time = "2025-06-05T16:38:53.983Z" }, + { url = "https://files.pythonhosted.org/packages/6f/df/20ffa66dd5a7a7beffa6451bdb7400d66251374ab40b99981478c69a67a8/greenlet-3.2.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8704b3768d2f51150626962f4b9a9e4a17d2e37c8a8d9867bbd9fa4eb938d3b3", size = 652999, upload-time = "2025-06-05T16:41:37.89Z" }, + { url = "https://files.pythonhosted.org/packages/51/b4/ebb2c8cb41e521f1d72bf0465f2f9a2fd803f674a88db228887e6847077e/greenlet-3.2.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5035d77a27b7c62db6cf41cf786cfe2242644a7a337a0e155c80960598baab95", size = 647368, upload-time = "2025-06-05T16:48:21.467Z" }, + { url = "https://files.pythonhosted.org/packages/8e/6a/1e1b5aa10dced4ae876a322155705257748108b7fd2e4fae3f2a091fe81a/greenlet-3.2.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2d8aa5423cd4a396792f6d4580f88bdc6efcb9205891c9d40d20f6e670992efb", size = 650037, upload-time = "2025-06-05T16:13:06.402Z" }, + { url = "https://files.pythonhosted.org/packages/26/f2/ad51331a157c7015c675702e2d5230c243695c788f8f75feba1af32b3617/greenlet-3.2.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2c724620a101f8170065d7dded3f962a2aea7a7dae133a009cada42847e04a7b", size = 608402, upload-time = "2025-06-05T16:12:51.91Z" }, + { url = "https://files.pythonhosted.org/packages/26/bc/862bd2083e6b3aff23300900a956f4ea9a4059de337f5c8734346b9b34fc/greenlet-3.2.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:873abe55f134c48e1f2a6f53f7d1419192a3d1a4e873bace00499a4e45ea6af0", size = 1119577, upload-time = "2025-06-05T16:36:49.787Z" }, + { url = "https://files.pythonhosted.org/packages/86/94/1fc0cc068cfde885170e01de40a619b00eaa8f2916bf3541744730ffb4c3/greenlet-3.2.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:024571bbce5f2c1cfff08bf3fbaa43bbc7444f580ae13b0099e95d0e6e67ed36", size = 1147121, upload-time = "2025-06-05T16:12:42.527Z" }, + { url = "https://files.pythonhosted.org/packages/27/1a/199f9587e8cb08a0658f9c30f3799244307614148ffe8b1e3aa22f324dea/greenlet-3.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:5195fb1e75e592dd04ce79881c8a22becdfa3e6f500e7feb059b1e6fdd54d3e3", size = 297603, upload-time = "2025-06-05T16:20:12.651Z" }, + { url = "https://files.pythonhosted.org/packages/d8/ca/accd7aa5280eb92b70ed9e8f7fd79dc50a2c21d8c73b9a0856f5b564e222/greenlet-3.2.3-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:3d04332dddb10b4a211b68111dabaee2e1a073663d117dc10247b5b1642bac86", size = 271479, upload-time = "2025-06-05T16:10:47.525Z" }, + { url = "https://files.pythonhosted.org/packages/55/71/01ed9895d9eb49223280ecc98a557585edfa56b3d0e965b9fa9f7f06b6d9/greenlet-3.2.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8186162dffde068a465deab08fc72c767196895c39db26ab1c17c0b77a6d8b97", size = 683952, upload-time = "2025-06-05T16:38:55.125Z" }, + { url = "https://files.pythonhosted.org/packages/ea/61/638c4bdf460c3c678a0a1ef4c200f347dff80719597e53b5edb2fb27ab54/greenlet-3.2.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f4bfbaa6096b1b7a200024784217defedf46a07c2eee1a498e94a1b5f8ec5728", size = 696917, upload-time = "2025-06-05T16:41:38.959Z" }, + { url = "https://files.pythonhosted.org/packages/22/cc/0bd1a7eb759d1f3e3cc2d1bc0f0b487ad3cc9f34d74da4b80f226fde4ec3/greenlet-3.2.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:ed6cfa9200484d234d8394c70f5492f144b20d4533f69262d530a1a082f6ee9a", size = 692443, upload-time = "2025-06-05T16:48:23.113Z" }, + { url = "https://files.pythonhosted.org/packages/67/10/b2a4b63d3f08362662e89c103f7fe28894a51ae0bc890fabf37d1d780e52/greenlet-3.2.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:02b0df6f63cd15012bed5401b47829cfd2e97052dc89da3cfaf2c779124eb892", size = 692995, upload-time = "2025-06-05T16:13:07.972Z" }, + { url = "https://files.pythonhosted.org/packages/5a/c6/ad82f148a4e3ce9564056453a71529732baf5448ad53fc323e37efe34f66/greenlet-3.2.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:86c2d68e87107c1792e2e8d5399acec2487a4e993ab76c792408e59394d52141", size = 655320, upload-time = "2025-06-05T16:12:53.453Z" }, + { url = "https://files.pythonhosted.org/packages/5c/4f/aab73ecaa6b3086a4c89863d94cf26fa84cbff63f52ce9bc4342b3087a06/greenlet-3.2.3-cp314-cp314-win_amd64.whl", hash = "sha256:8c47aae8fbbfcf82cc13327ae802ba13c9c36753b67e760023fd116bc124a62a", size = 301236, upload-time = "2025-06-05T16:15:20.111Z" }, +] + +[[package]] +name = "identify" +version = "2.6.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/88/d193a27416618628a5eea64e3223acd800b40749a96ffb322a9b55a49ed1/identify-2.6.12.tar.gz", hash = "sha256:d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6", size = 99254, upload-time = "2025-05-23T20:37:53.3Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/cd/18f8da995b658420625f7ef13f037be53ae04ec5ad33f9b718240dcfd48c/identify-2.6.12-py2.py3-none-any.whl", hash = "sha256:ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2", size = 99145, upload-time = "2025-05-23T20:37:51.495Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026, upload-time = "2022-07-01T12:21:05.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769, upload-time = "2022-07-01T12:21:02.467Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "linkify" +version = "1.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/c6/246100fa3967074d9725b3716913bd495823547bde5047050d4c3462f994/linkify-1.4.tar.gz", hash = "sha256:9ba276ba179525f7262820d90f009604e51cd4f1466c1112b882ef7eda243d5e", size = 1749, upload-time = "2009-11-12T21:42:00.934Z" } + +[[package]] +name = "linkify-it-py" +version = "2.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "uc-micro-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2a/ae/bb56c6828e4797ba5a4821eec7c43b8bf40f69cda4d4f5f8c8a2810ec96a/linkify-it-py-2.0.3.tar.gz", hash = "sha256:68cda27e162e9215c17d786649d1da0021a451bdc436ef9e0fa0ba5234b9b048", size = 27946, upload-time = "2024-02-04T14:48:04.179Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/1e/b832de447dee8b582cac175871d2f6c3d5077cc56d5575cadba1fd1cccfa/linkify_it_py-2.0.3-py3-none-any.whl", hash = "sha256:6bcbc417b0ac14323382aef5c5192c0075bf8a9d6b41820a2b66371eac6b6d79", size = 19820, upload-time = "2024-02-04T14:48:02.496Z" }, +] + +[[package]] +name = "markdown" +version = "3.8.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/c2/4ab49206c17f75cb08d6311171f2d65798988db4360c4d1485bd0eedd67c/markdown-3.8.2.tar.gz", hash = "sha256:247b9a70dd12e27f67431ce62523e675b866d254f900c4fe75ce3dda62237c45", size = 362071, upload-time = "2025-06-19T17:12:44.483Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/96/2b/34cc11786bc00d0f04d0f5fdc3a2b1ae0b6239eef72d3d345805f9ad92a1/markdown-3.8.2-py3-none-any.whl", hash = "sha256:5c83764dbd4e00bdd94d85a19b8d55ccca20fe35b2e678a1422b380324dd5f24", size = 106827, upload-time = "2025-06-19T17:12:42.994Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" }, + { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" }, + { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, + { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, + { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, + { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, + { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, + { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, + { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" }, + { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" }, + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, +] + +[[package]] +name = "mdit-py-plugins" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/03/a2ecab526543b152300717cf232bb4bb8605b6edb946c845016fa9c9c9fd/mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5", size = 43542, upload-time = "2024-09-09T20:27:49.564Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/f7/7782a043553ee469c1ff49cfa1cdace2d6bf99a1f333cf38676b3ddf30da/mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636", size = 55316, upload-time = "2024-09-09T20:27:48.397Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "mergedeep" +version = "1.3.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/41/580bb4006e3ed0361b8151a01d324fb03f420815446c7def45d02f74c270/mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8", size = 4661, upload-time = "2021-02-05T18:55:30.623Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/19/04f9b178c2d8a15b076c8b5140708fa6ffc5601fb6f1e975537072df5b2a/mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307", size = 6354, upload-time = "2021-02-05T18:55:29.583Z" }, +] + +[[package]] +name = "mkdocs" +version = "1.6.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "ghp-import" }, + { name = "jinja2" }, + { name = "markdown" }, + { name = "markupsafe" }, + { name = "mergedeep" }, + { name = "mkdocs-get-deps" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "pyyaml" }, + { name = "pyyaml-env-tag" }, + { name = "watchdog" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bc/c6/bbd4f061bd16b378247f12953ffcb04786a618ce5e904b8c5a01a0309061/mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2", size = 3889159, upload-time = "2024-08-30T12:24:06.899Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/5b/dbc6a8cddc9cfa9c4971d59fb12bb8d42e161b7e7f8cc89e49137c5b279c/mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e", size = 3864451, upload-time = "2024-08-30T12:24:05.054Z" }, +] + +[[package]] +name = "mkdocs-get-deps" +version = "0.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mergedeep" }, + { name = "platformdirs" }, + { name = "pyyaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/98/f5/ed29cd50067784976f25ed0ed6fcd3c2ce9eb90650aa3b2796ddf7b6870b/mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c", size = 10239, upload-time = "2023-11-20T17:51:09.981Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9f/d4/029f984e8d3f3b6b726bd33cafc473b75e9e44c0f7e80a5b29abc466bdea/mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134", size = 9521, upload-time = "2023-11-20T17:51:08.587Z" }, +] + +[[package]] +name = "mock" +version = "5.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/07/8c/14c2ae915e5f9dca5a22edd68b35be94400719ccfa068a03e0fb63d0f6f6/mock-5.2.0.tar.gz", hash = "sha256:4e460e818629b4b173f32d08bf30d3af8123afbb8e04bb5707a1fd4799e503f0", size = 92796, upload-time = "2025-03-03T12:31:42.911Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/d9/617e6af809bf3a1d468e0d58c3997b1dc219a9a9202e650d30c2fc85d481/mock-5.2.0-py3-none-any.whl", hash = "sha256:7ba87f72ca0e915175596069dbbcc7c75af7b5e9b9bc107ad6349ede0819982f", size = 31617, upload-time = "2025-03-03T12:31:41.518Z" }, +] + +[[package]] +name = "multiprocess" +version = "0.70.18" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dill" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/72/fd/2ae3826f5be24c6ed87266bc4e59c46ea5b059a103f3d7e7eb76a52aeecb/multiprocess-0.70.18.tar.gz", hash = "sha256:f9597128e6b3e67b23956da07cf3d2e5cba79e2f4e0fba8d7903636663ec6d0d", size = 1798503, upload-time = "2025-04-17T03:11:27.742Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/4d/9af0d1279c84618bcd35bf5fd7e371657358c7b0a523e54a9cffb87461f8/multiprocess-0.70.18-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8b8940ae30139e04b076da6c5b83e9398585ebdf0f2ad3250673fef5b2ff06d6", size = 144695, upload-time = "2025-04-17T03:11:09.161Z" }, + { url = "https://files.pythonhosted.org/packages/17/bf/87323e79dd0562474fad3373c21c66bc6c3c9963b68eb2a209deb4c8575e/multiprocess-0.70.18-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:0929ba95831adb938edbd5fb801ac45e705ecad9d100b3e653946b7716cb6bd3", size = 144742, upload-time = "2025-04-17T03:11:10.072Z" }, + { url = "https://files.pythonhosted.org/packages/dd/74/cb8c831e58dc6d5cf450b17c7db87f14294a1df52eb391da948b5e0a0b94/multiprocess-0.70.18-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4d77f8e4bfe6c6e2e661925bbf9aed4d5ade9a1c6502d5dfc10129b9d1141797", size = 144745, upload-time = "2025-04-17T03:11:11.453Z" }, + { url = "https://files.pythonhosted.org/packages/ba/d8/0cba6cf51a1a31f20471fbc823a716170c73012ddc4fb85d706630ed6e8f/multiprocess-0.70.18-py310-none-any.whl", hash = "sha256:60c194974c31784019c1f459d984e8f33ee48f10fcf42c309ba97b30d9bd53ea", size = 134948, upload-time = "2025-04-17T03:11:20.223Z" }, + { url = "https://files.pythonhosted.org/packages/4b/88/9039f2fed1012ef584751d4ceff9ab4a51e5ae264898f0b7cbf44340a859/multiprocess-0.70.18-py311-none-any.whl", hash = "sha256:5aa6eef98e691281b3ad923be2832bf1c55dd2c859acd73e5ec53a66aae06a1d", size = 144462, upload-time = "2025-04-17T03:11:21.657Z" }, + { url = "https://files.pythonhosted.org/packages/bf/b6/5f922792be93b82ec6b5f270bbb1ef031fd0622847070bbcf9da816502cc/multiprocess-0.70.18-py312-none-any.whl", hash = "sha256:9b78f8e5024b573730bfb654783a13800c2c0f2dfc0c25e70b40d184d64adaa2", size = 150287, upload-time = "2025-04-17T03:11:22.69Z" }, + { url = "https://files.pythonhosted.org/packages/ee/25/7d7e78e750bc1aecfaf0efbf826c69a791d2eeaf29cf20cba93ff4cced78/multiprocess-0.70.18-py313-none-any.whl", hash = "sha256:871743755f43ef57d7910a38433cfe41319e72be1bbd90b79c7a5ac523eb9334", size = 151917, upload-time = "2025-04-17T03:11:24.044Z" }, + { url = "https://files.pythonhosted.org/packages/3b/c3/ca84c19bd14cdfc21c388fdcebf08b86a7a470ebc9f5c3c084fc2dbc50f7/multiprocess-0.70.18-py38-none-any.whl", hash = "sha256:dbf705e52a154fe5e90fb17b38f02556169557c2dd8bb084f2e06c2784d8279b", size = 132636, upload-time = "2025-04-17T03:11:24.936Z" }, + { url = "https://files.pythonhosted.org/packages/6c/28/dd72947e59a6a8c856448a5e74da6201cb5502ddff644fbc790e4bd40b9a/multiprocess-0.70.18-py39-none-any.whl", hash = "sha256:e78ca805a72b1b810c690b6b4cc32579eba34f403094bbbae962b7b5bf9dfcb8", size = 133478, upload-time = "2025-04-17T03:11:26.253Z" }, +] + +[[package]] +name = "myst-parser" +version = "4.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "jinja2" }, + { name = "markdown-it-py" }, + { name = "mdit-py-plugins" }, + { name = "pyyaml" }, + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/a5/9626ba4f73555b3735ad86247a8077d4603aa8628537687c839ab08bfe44/myst_parser-4.0.1.tar.gz", hash = "sha256:5cfea715e4f3574138aecbf7d54132296bfd72bb614d31168f48c477a830a7c4", size = 93985, upload-time = "2025-02-12T10:53:03.833Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/df/76d0321c3797b54b60fef9ec3bd6f4cfd124b9e422182156a1dd418722cf/myst_parser-4.0.1-py3-none-any.whl", hash = "sha256:9134e88959ec3b5780aedf8a99680ea242869d012e8821db3126d427edc9c95d", size = 84579, upload-time = "2025-02-12T10:53:02.078Z" }, +] + +[package.optional-dependencies] +linkify = [ + { name = "linkify-it-py" }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "parse" +version = "1.20.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4f/78/d9b09ba24bb36ef8b83b71be547e118d46214735b6dfb39e4bfde0e9b9dd/parse-1.20.2.tar.gz", hash = "sha256:b41d604d16503c79d81af5165155c0b20f6c8d6c559efa66b4b695c3e5a0a0ce", size = 29391, upload-time = "2024-06-11T04:41:57.34Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/31/ba45bf0b2aa7898d81cbbfac0e88c267befb59ad91a19e36e1bc5578ddb1/parse-1.20.2-py2.py3-none-any.whl", hash = "sha256:967095588cb802add9177d0c0b6133b5ba33b1ea9007ca800e526f42a85af558", size = 20126, upload-time = "2024-06-11T04:41:55.057Z" }, +] + +[[package]] +name = "pathos" +version = "0.3.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dill" }, + { name = "multiprocess" }, + { name = "pox" }, + { name = "ppft" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/be/90/fdbe3bbfe79933db439e1844083cb6e9d5a9d3b686738549b3d22d06eae7/pathos-0.3.4.tar.gz", hash = "sha256:bad4912d0ef865654a7cc478da65f2e1d5b69f3d92c4a7d9c9845657783c0754", size = 167076, upload-time = "2025-04-17T03:37:08.234Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/0a/daece46e65c821d153746566a1604ac90338f0279b1fb858a3617eb60472/pathos-0.3.4-py3-none-any.whl", hash = "sha256:fe44883448c05c80d518b61df491b496f6190bb6860253f3254d8c9afb53c340", size = 82261, upload-time = "2025-04-17T03:37:06.936Z" }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.3.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362, upload-time = "2025-05-07T22:47:42.121Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "plumbum" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "platform_python_implementation != 'PyPy' and sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f0/5d/49ba324ad4ae5b1a4caefafbce7a1648540129344481f2ed4ef6bb68d451/plumbum-1.9.0.tar.gz", hash = "sha256:e640062b72642c3873bd5bdc3effed75ba4d3c70ef6b6a7b907357a84d909219", size = 319083, upload-time = "2024-10-05T05:59:27.059Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4f/9d/d03542c93bb3d448406731b80f39c3d5601282f778328c22c77d270f4ed4/plumbum-1.9.0-py3-none-any.whl", hash = "sha256:9fd0d3b0e8d86e4b581af36edf3f3bbe9d1ae15b45b8caab28de1bcb27aaa7f5", size = 127970, upload-time = "2024-10-05T05:59:25.102Z" }, +] + +[[package]] +name = "pox" +version = "0.3.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/56/99/42670d273fd598a6fe98c8b2f593ee425b29e44f2d1a61ff622031204ccd/pox-0.3.6.tar.gz", hash = "sha256:84eeed39600159a62804aacfc00e353edeaae67d8c647ccaaab73a6efed3f605", size = 119393, upload-time = "2025-04-16T00:05:49.811Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/c2/6435789c26661bef699868ee54d2763aea636a1ed21ec8e350b1f9f65888/pox-0.3.6-py3-none-any.whl", hash = "sha256:d48654d0a3dca0c9c02dccae54a53c3870286a5217ad306b2bd94f84e008bc1b", size = 29495, upload-time = "2025-04-16T00:05:48.319Z" }, +] + +[[package]] +name = "ppft" +version = "1.7.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/46/9e9f2ae7e8e284acbde6ab36f7f4a35b273519a60c0ed419af2da780d49f/ppft-1.7.7.tar.gz", hash = "sha256:f3f77448cfe24c2b8d2296b6d8732280b25041a3f3e1f551856c6451d3e01b96", size = 136272, upload-time = "2025-04-16T01:47:40.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5b/23/6aef7c24f4ee6f765aeaaaa3bf24cfdb0730a20336a02b1a061d227d84be/ppft-1.7.7-py3-none-any.whl", hash = "sha256:fb7524db110682de886b4bb5b08f7bf6a38940566074ef2f62521cbbd3864676", size = 56764, upload-time = "2025-04-16T01:47:39.453Z" }, +] + +[[package]] +name = "pre-commit" +version = "4.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv" }, + { name = "identify" }, + { name = "nodeenv" }, + { name = "pyyaml" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/08/39/679ca9b26c7bb2999ff122d50faa301e49af82ca9c066ec061cfbc0c6784/pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146", size = 193424, upload-time = "2025-03-18T21:35:20.987Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/74/a88bf1b1efeae488a0c0b7bdf71429c313722d1fc0f377537fbe554e6180/pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd", size = 220707, upload-time = "2025-03-18T21:35:19.343Z" }, +] + +[[package]] +name = "psutil" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003, upload-time = "2025-02-13T21:54:07.946Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051, upload-time = "2025-02-13T21:54:12.36Z" }, + { url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535, upload-time = "2025-02-13T21:54:16.07Z" }, + { url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004, upload-time = "2025-02-13T21:54:18.662Z" }, + { url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986, upload-time = "2025-02-13T21:54:21.811Z" }, + { url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544, upload-time = "2025-02-13T21:54:24.68Z" }, + { url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053, upload-time = "2025-02-13T21:54:34.31Z" }, + { url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885, upload-time = "2025-02-13T21:54:37.486Z" }, +] + +[[package]] +name = "psycopg2-binary" +version = "2.9.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/bdc8274dc0585090b4e3432267d7be4dfbfd8971c0fa59167c711105a6bf/psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2", size = 385764, upload-time = "2024-10-16T11:24:58.126Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/8f/9feb01291d0d7a0a4c6a6bab24094135c2b59c6a81943752f632c75896d6/psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff", size = 3043397, upload-time = "2024-10-16T11:19:40.033Z" }, + { url = "https://files.pythonhosted.org/packages/15/30/346e4683532011561cd9c8dfeac6a8153dd96452fee0b12666058ab7893c/psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c", size = 3274806, upload-time = "2024-10-16T11:19:43.5Z" }, + { url = "https://files.pythonhosted.org/packages/66/6e/4efebe76f76aee7ec99166b6c023ff8abdc4e183f7b70913d7c047701b79/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c", size = 2851370, upload-time = "2024-10-16T11:19:46.986Z" }, + { url = "https://files.pythonhosted.org/packages/7f/fd/ff83313f86b50f7ca089b161b8e0a22bb3c319974096093cd50680433fdb/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb", size = 3080780, upload-time = "2024-10-16T11:19:50.242Z" }, + { url = "https://files.pythonhosted.org/packages/e6/c4/bfadd202dcda8333a7ccafdc51c541dbdfce7c2c7cda89fa2374455d795f/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341", size = 3264583, upload-time = "2024-10-16T11:19:54.424Z" }, + { url = "https://files.pythonhosted.org/packages/5d/f1/09f45ac25e704ac954862581f9f9ae21303cc5ded3d0b775532b407f0e90/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a", size = 3019831, upload-time = "2024-10-16T11:19:57.762Z" }, + { url = "https://files.pythonhosted.org/packages/9e/2e/9beaea078095cc558f215e38f647c7114987d9febfc25cb2beed7c3582a5/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b", size = 2871822, upload-time = "2024-10-16T11:20:04.693Z" }, + { url = "https://files.pythonhosted.org/packages/01/9e/ef93c5d93f3dc9fc92786ffab39e323b9aed066ba59fdc34cf85e2722271/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7", size = 2820975, upload-time = "2024-10-16T11:20:11.401Z" }, + { url = "https://files.pythonhosted.org/packages/a5/f0/049e9631e3268fe4c5a387f6fc27e267ebe199acf1bc1bc9cbde4bd6916c/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e", size = 2919320, upload-time = "2024-10-16T11:20:17.959Z" }, + { url = "https://files.pythonhosted.org/packages/dc/9a/bcb8773b88e45fb5a5ea8339e2104d82c863a3b8558fbb2aadfe66df86b3/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68", size = 2957617, upload-time = "2024-10-16T11:20:24.711Z" }, + { url = "https://files.pythonhosted.org/packages/e2/6b/144336a9bf08a67d217b3af3246abb1d027095dab726f0687f01f43e8c03/psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392", size = 1024618, upload-time = "2024-10-16T11:20:27.718Z" }, + { url = "https://files.pythonhosted.org/packages/61/69/3b3d7bd583c6d3cbe5100802efa5beacaacc86e37b653fc708bf3d6853b8/psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4", size = 1163816, upload-time = "2024-10-16T11:20:30.777Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/465cc9795cf76f6d329efdafca74693714556ea3891813701ac1fee87545/psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0", size = 3044771, upload-time = "2024-10-16T11:20:35.234Z" }, + { url = "https://files.pythonhosted.org/packages/8b/31/6d225b7b641a1a2148e3ed65e1aa74fc86ba3fee850545e27be9e1de893d/psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a", size = 3275336, upload-time = "2024-10-16T11:20:38.742Z" }, + { url = "https://files.pythonhosted.org/packages/30/b7/a68c2b4bff1cbb1728e3ec864b2d92327c77ad52edcd27922535a8366f68/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539", size = 2851637, upload-time = "2024-10-16T11:20:42.145Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b1/cfedc0e0e6f9ad61f8657fd173b2f831ce261c02a08c0b09c652b127d813/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526", size = 3082097, upload-time = "2024-10-16T11:20:46.185Z" }, + { url = "https://files.pythonhosted.org/packages/18/ed/0a8e4153c9b769f59c02fb5e7914f20f0b2483a19dae7bf2db54b743d0d0/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1", size = 3264776, upload-time = "2024-10-16T11:20:50.879Z" }, + { url = "https://files.pythonhosted.org/packages/10/db/d09da68c6a0cdab41566b74e0a6068a425f077169bed0946559b7348ebe9/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e", size = 3020968, upload-time = "2024-10-16T11:20:56.819Z" }, + { url = "https://files.pythonhosted.org/packages/94/28/4d6f8c255f0dfffb410db2b3f9ac5218d959a66c715c34cac31081e19b95/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f", size = 2872334, upload-time = "2024-10-16T11:21:02.411Z" }, + { url = "https://files.pythonhosted.org/packages/05/f7/20d7bf796593c4fea95e12119d6cc384ff1f6141a24fbb7df5a668d29d29/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00", size = 2822722, upload-time = "2024-10-16T11:21:09.01Z" }, + { url = "https://files.pythonhosted.org/packages/4d/e4/0c407ae919ef626dbdb32835a03b6737013c3cc7240169843965cada2bdf/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5", size = 2920132, upload-time = "2024-10-16T11:21:16.339Z" }, + { url = "https://files.pythonhosted.org/packages/2d/70/aa69c9f69cf09a01da224909ff6ce8b68faeef476f00f7ec377e8f03be70/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47", size = 2959312, upload-time = "2024-10-16T11:21:25.584Z" }, + { url = "https://files.pythonhosted.org/packages/d3/bd/213e59854fafe87ba47814bf413ace0dcee33a89c8c8c814faca6bc7cf3c/psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64", size = 1025191, upload-time = "2024-10-16T11:21:29.912Z" }, + { url = "https://files.pythonhosted.org/packages/92/29/06261ea000e2dc1e22907dbbc483a1093665509ea586b29b8986a0e56733/psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0", size = 1164031, upload-time = "2024-10-16T11:21:34.211Z" }, + { url = "https://files.pythonhosted.org/packages/3e/30/d41d3ba765609c0763505d565c4d12d8f3c79793f0d0f044ff5a28bf395b/psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d", size = 3044699, upload-time = "2024-10-16T11:21:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/35/44/257ddadec7ef04536ba71af6bc6a75ec05c5343004a7ec93006bee66c0bc/psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb", size = 3275245, upload-time = "2024-10-16T11:21:51.989Z" }, + { url = "https://files.pythonhosted.org/packages/1b/11/48ea1cd11de67f9efd7262085588790a95d9dfcd9b8a687d46caf7305c1a/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7", size = 2851631, upload-time = "2024-10-16T11:21:57.584Z" }, + { url = "https://files.pythonhosted.org/packages/62/e0/62ce5ee650e6c86719d621a761fe4bc846ab9eff8c1f12b1ed5741bf1c9b/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d", size = 3082140, upload-time = "2024-10-16T11:22:02.005Z" }, + { url = "https://files.pythonhosted.org/packages/27/ce/63f946c098611f7be234c0dd7cb1ad68b0b5744d34f68062bb3c5aa510c8/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73", size = 3264762, upload-time = "2024-10-16T11:22:06.412Z" }, + { url = "https://files.pythonhosted.org/packages/43/25/c603cd81402e69edf7daa59b1602bd41eb9859e2824b8c0855d748366ac9/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673", size = 3020967, upload-time = "2024-10-16T11:22:11.583Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d6/8708d8c6fca531057fa170cdde8df870e8b6a9b136e82b361c65e42b841e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f", size = 2872326, upload-time = "2024-10-16T11:22:16.406Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ac/5b1ea50fc08a9df82de7e1771537557f07c2632231bbab652c7e22597908/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909", size = 2822712, upload-time = "2024-10-16T11:22:21.366Z" }, + { url = "https://files.pythonhosted.org/packages/c4/fc/504d4503b2abc4570fac3ca56eb8fed5e437bf9c9ef13f36b6621db8ef00/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1", size = 2920155, upload-time = "2024-10-16T11:22:25.684Z" }, + { url = "https://files.pythonhosted.org/packages/b2/d1/323581e9273ad2c0dbd1902f3fb50c441da86e894b6e25a73c3fda32c57e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567", size = 2959356, upload-time = "2024-10-16T11:22:30.562Z" }, + { url = "https://files.pythonhosted.org/packages/08/50/d13ea0a054189ae1bc21af1d85b6f8bb9bbc5572991055d70ad9006fe2d6/psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142", size = 2569224, upload-time = "2025-01-04T20:09:19.234Z" }, +] + +[[package]] +name = "pycparser" +version = "2.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, +] + +[[package]] +name = "pygit2" +version = "1.19.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6c/4b/da6f1a1a48a4095e3c12c5fa1f2784f4423db3e16159e08740cc5c6ee639/pygit2-1.19.0.tar.gz", hash = "sha256:ca5db6f395a74166a019d777895f96bcb211ee60ce0be4132b139603e0066d83", size = 799757, upload-time = "2025-10-23T12:34:35.783Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/c6/302e84b2326050e19ec1617363d4d40d8bba784d28875ee800104b4ab82a/pygit2-1.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:12ee8dc6d14573811ba52a18b37d4f6f42e66e0fcef7ed7d4b5e799bc66455f5", size = 5516323, upload-time = "2025-10-23T12:33:23.458Z" }, + { url = "https://files.pythonhosted.org/packages/c8/0a/381ff360e4e65c6227a673ae34067c2a6f347a68b991e3cb9ab7b6d499f5/pygit2-1.19.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae8fe208d11bbfb2dd2716ac8871764c847d1db29f40a1bb20b469df652ca0e3", size = 5782847, upload-time = "2025-10-23T12:33:26.027Z" }, + { url = "https://files.pythonhosted.org/packages/a4/19/94584f45b1f6e73549270ee8693725f379945eb1b6b0b9bde7572392ef50/pygit2-1.19.0-cp311-cp311-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:18c13b8c49402d007b58744379c455d56526eae16bb9059b2a578d43dd8dfc40", size = 4621296, upload-time = "2025-10-23T12:33:27.751Z" }, + { url = "https://files.pythonhosted.org/packages/70/07/b6f56301aae5418ec284dac523b9126f6f619e999b5fb09b75a1b838a663/pygit2-1.19.0-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f23387f555f6a7224657c7cec618b374c7b01fc617b38ca551859bb69548ed53", size = 5513796, upload-time = "2025-10-23T12:33:28.948Z" }, + { url = "https://files.pythonhosted.org/packages/03/bb/8609d2135652955521ef3a4f30462b247c88c6299e7d9169d0b37dd85b55/pygit2-1.19.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a29bd5db67631ac526dbab327f6e254e33e44aa14169055621da6c94636e7e2c", size = 5781787, upload-time = "2025-10-23T12:33:30.623Z" }, + { url = "https://files.pythonhosted.org/packages/64/54/55de062e2402e15480f042e8283f437a996b4b6c22afcdbec1f7e2ca0866/pygit2-1.19.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6d7a90a3cfa55b828c912c1817a1c8c4d5a0988df0958371c122774e997a6c3e", size = 5480456, upload-time = "2025-10-23T12:33:32.616Z" }, + { url = "https://files.pythonhosted.org/packages/00/6b/677c6aaf6d569e2151a631a0202ac9f0853c208fb5e23b0a8b01eed9ea3b/pygit2-1.19.0-cp311-cp311-win32.whl", hash = "sha256:30266cf1e679a24f689d753931d465bedc7e1270a8aa10abe9065a78439a5558", size = 941540, upload-time = "2025-10-23T12:33:33.802Z" }, + { url = "https://files.pythonhosted.org/packages/84/c2/d9f6cb6c3b516cb59f10237768d62487734d6ab429cf53165f77ca90230f/pygit2-1.19.0-cp311-cp311-win_amd64.whl", hash = "sha256:c6f519790957936dcc54849b901af0cc1cec7aef3be1eb336184b1790a41ebf2", size = 1159185, upload-time = "2025-10-23T12:33:35.182Z" }, + { url = "https://files.pythonhosted.org/packages/31/ee/7cd716be9d698cbb3fb645164454126df02670935eb63466673f1e45c9ef/pygit2-1.19.0-cp311-cp311-win_arm64.whl", hash = "sha256:8a7e7ef28a457643bc6a4d17ca4b437db5f5e400926efda4d269a597a6350e4e", size = 966767, upload-time = "2025-10-23T12:33:36.553Z" }, + { url = "https://files.pythonhosted.org/packages/88/d1/e50790d985932f31d329ab177bc714703a10fe9779dc3f76c101ff1a18ba/pygit2-1.19.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:770e1d506ceb08cc65ac0de4e8a9911a169603361c23c3386556a8aab7ee1f7e", size = 5510740, upload-time = "2025-10-23T12:33:38.12Z" }, + { url = "https://files.pythonhosted.org/packages/89/e2/56eb9de348221aa7b76d4fd07a663a1be2ccaad1571f68cc9bd83fc7a4ef/pygit2-1.19.0-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db2a75f4a2e896a8b09bcfdeb1d52c0fc72b403e0e9910c03307d5e577e3fb40", size = 5783741, upload-time = "2025-10-23T12:33:40.253Z" }, + { url = "https://files.pythonhosted.org/packages/c6/91/274f7ea354d9029a27133f3c1a222a0fb468b4bc40f535e389cda25a17c7/pygit2-1.19.0-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2600ed9acc3b6f52e49a65c2200e5eadd70ef6d022fd8e029adbfa6e6d9cbf50", size = 4620504, upload-time = "2025-10-23T12:33:41.48Z" }, + { url = "https://files.pythonhosted.org/packages/bf/8b/e1a4728ef6f7c1522394a16b614f52a91295602126b81150acc944d2b858/pygit2-1.19.0-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:795dce83170f4b82fa275a6233ee3f70673a06a3e22a1c57221e16b9a140ef98", size = 5518086, upload-time = "2025-10-23T12:33:42.926Z" }, + { url = "https://files.pythonhosted.org/packages/63/ff/04d91f40caae04bf03394b181ed5e614e82928a99ea3659d38d12a124604/pygit2-1.19.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b26875600a8720196d0cdaacdb485761ac07334512a44da79d7b2398672549f7", size = 5782825, upload-time = "2025-10-23T12:33:44.135Z" }, + { url = "https://files.pythonhosted.org/packages/73/c7/2dea8ce2d0c6185911334f5f84eeb9e941a030e13596b43ee5fab00f550c/pygit2-1.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b5dd263e0d3820892e0751b344eab30f5fb068f20a456e2b2fc2082160d263fe", size = 5484890, upload-time = "2025-10-23T12:33:48.171Z" }, + { url = "https://files.pythonhosted.org/packages/97/bc/2df6780be170187baeb2de4e4743c66cca8c35e838a8e66a9e53c0d53282/pygit2-1.19.0-cp312-cp312-win32.whl", hash = "sha256:1314c81d3608201be032ff1631392f92c767b65d3c81f7efb4e83a551b65290d", size = 942311, upload-time = "2025-10-23T12:33:49.257Z" }, + { url = "https://files.pythonhosted.org/packages/48/bd/aba9902ee25e7a6aaf43c10b979dbbabe64f920217e8817f4df942fb0b68/pygit2-1.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:f7efa3fe6d818b48561bc5b72bd991eb57c0baaafc864b64b27f9d064761e557", size = 1159573, upload-time = "2025-10-23T12:33:50.397Z" }, + { url = "https://files.pythonhosted.org/packages/2c/de/0063114b8140f0aa52bdc63d49405bc4a81268968319ce922ce374264554/pygit2-1.19.0-cp312-cp312-win_arm64.whl", hash = "sha256:c433b9d448912ba7237cb26149b43252b6187acebfa205edf53cfde9e0e441bb", size = 966822, upload-time = "2025-10-23T12:33:51.764Z" }, + { url = "https://files.pythonhosted.org/packages/3b/f7/c0f72a5a87537b5622931a59be33654fa6e7cce25497c42f54de45a54622/pygit2-1.19.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e84d4a1c8bcd462524bf9d0c5778c2219042c8aeeea82735e8f415d8f8519797", size = 5510727, upload-time = "2025-10-23T12:33:53.223Z" }, + { url = "https://files.pythonhosted.org/packages/ba/40/3df9f3cc9397aae3c6befcea9f815393c8d0a53e9717b6ab4ca51f62ff72/pygit2-1.19.0-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5998943514b89fd4bf8ab11320872bc4c0b6d3517b27beaf79ff9591d3142f1c", size = 5784559, upload-time = "2025-10-23T12:33:54.531Z" }, + { url = "https://files.pythonhosted.org/packages/64/71/bcbf4e144e843329f4167df2c54ec67dfffa8b71ee736508cada602f1894/pygit2-1.19.0-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:38ea299bd02584df017f44a0673157bb7b28a2ae69826cfbb456f7429e781d58", size = 4621054, upload-time = "2025-10-23T12:33:55.783Z" }, + { url = "https://files.pythonhosted.org/packages/dc/b3/f0c69382492f1f19030ee580fb85bd4cbae157ecf3b24ac89362a315539c/pygit2-1.19.0-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:46f5f287f19053eb1ba36d72a7458712bc5dcdb1b667ecf162a6cae7e643afe2", size = 5519203, upload-time = "2025-10-23T12:33:57.099Z" }, + { url = "https://files.pythonhosted.org/packages/06/0b/919be1b5a679391125e96acfa6f9930f07aa143854faf5d37e67950a150a/pygit2-1.19.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:41d9effdc1e54e7bcd14013ea5a86c9dbf9bbc16b7c932d6e0ed96773e0baa68", size = 5784159, upload-time = "2025-10-23T12:33:58.343Z" }, + { url = "https://files.pythonhosted.org/packages/13/8a/19944d4697e0cf52abe6721bfefa4a826c7ec33144cdeab263180ee1311f/pygit2-1.19.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e74312467a45de208dc157d1053b851b4181c2fabcacc5d9c578a8ef1b367e13", size = 5486342, upload-time = "2025-10-23T12:33:59.515Z" }, + { url = "https://files.pythonhosted.org/packages/ec/67/3e2d8d68b857466bea178236dafbcac09476907ee5dc7e9fc6e178407890/pygit2-1.19.0-cp313-cp313-win32.whl", hash = "sha256:eb55020bf0bd36e9a4c167c88139a9e20e787b2c66b5c2f60a8a12f3e0334a82", size = 942284, upload-time = "2025-10-23T12:34:01.438Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ae/58ff127fa1177f63c6b56bf77882a357779090a70aaaadbdfe71bbd98a27/pygit2-1.19.0-cp313-cp313-win_amd64.whl", hash = "sha256:37ba33c59b8b941f7a2fa1014c11bc847c315ebbaeb92341f7f39efeab75edb2", size = 1159559, upload-time = "2025-10-23T12:34:03.2Z" }, + { url = "https://files.pythonhosted.org/packages/cc/02/dabfd6880d948dd91f68a757f5a8c1280fe8e8e0f2786e9b85695c5627f7/pygit2-1.19.0-cp313-cp313-win_arm64.whl", hash = "sha256:f59b39b7f9583fd0e5dbd63b6f156b735293b3b4a1688534a5eb2c695975eb39", size = 966823, upload-time = "2025-10-23T12:34:04.432Z" }, + { url = "https://files.pythonhosted.org/packages/84/ee/6248d08f5a2b6a19e8cb706c64241158992996ddcfe93cd1bea7f87d258b/pygit2-1.19.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:db136a24a9b46327f334a960604f5ed5ca138cab57cf54906c82bae147b2f600", size = 5510739, upload-time = "2025-10-23T12:34:05.565Z" }, + { url = "https://files.pythonhosted.org/packages/de/b1/612a653abe3310efcb17a3efa9881c793dba8a563e781540b1a223c04e88/pygit2-1.19.0-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1487023a52d25d169fd3d5c146475dec0f39dd54586bd55aac810ae0d7809154", size = 5786665, upload-time = "2025-10-23T12:34:07.036Z" }, + { url = "https://files.pythonhosted.org/packages/66/7c/720c12f14ca0947c3d1d10832f6094c765bfed640b7ca160e45f4ec21e58/pygit2-1.19.0-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:14bd00877c5b2571d1e8e2e203df35291198a3a9a90602121a8419b540174b8a", size = 4624940, upload-time = "2025-10-23T12:34:08.229Z" }, + { url = "https://files.pythonhosted.org/packages/e4/72/ad3061fccddc9eb06ef56b6a80ab506b61b2606732a4176c84222958705e/pygit2-1.19.0-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cb47afb011ef6133e1b938018afa7d6435040d1ae1e84242bf0699dc6960a4cf", size = 5521370, upload-time = "2025-10-23T12:34:09.446Z" }, + { url = "https://files.pythonhosted.org/packages/85/d2/20c3436114325286f67e9fa4ae9a7eb44335a5fa6f9c72f8ea95f84a19ee/pygit2-1.19.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4d81f578aef1c04cd5169b96aa38f864c0ed1a37249e13bff69b49f12f6ae761", size = 5784686, upload-time = "2025-10-23T12:34:11.67Z" }, + { url = "https://files.pythonhosted.org/packages/a6/f7/5302831d9e8bdbe6643321e3a3499c1df12bb8d519ca0250bc4fddc10b8f/pygit2-1.19.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8c97c281d4e24d45b1d93ea844b52ea157a7c1c408e31e53bbb7b969c3168a96", size = 5487011, upload-time = "2025-10-23T12:34:13.314Z" }, + { url = "https://files.pythonhosted.org/packages/09/e4/d64ca0240e5eecfa541b31869b4d67f1237b16469ce99d0f67734aa71f6f/pygit2-1.19.0-cp314-cp314-win32.whl", hash = "sha256:e69576401664911633351ebbe2a896861a8c1ff531d0375796e61483db39ebd7", size = 963580, upload-time = "2025-10-23T12:34:14.823Z" }, + { url = "https://files.pythonhosted.org/packages/dc/1a/4fd4b60ac6b17581e8c63565a6ae8d529a1c699e021297c3e1ce9f510ed2/pygit2-1.19.0-cp314-cp314-win_amd64.whl", hash = "sha256:e56a14fcbfb8e07e30d18f21cfb790a74506957fa3ce03c54c02745b5d0152e2", size = 1190416, upload-time = "2025-10-23T12:34:16.054Z" }, + { url = "https://files.pythonhosted.org/packages/9b/7b/814b5b86cb9735068d93ebc15d055445a82c0b3d9689d4095fdc2810890b/pygit2-1.19.0-cp314-cp314-win_arm64.whl", hash = "sha256:2e84ab99802d8de643c6f8aa5b689b033ee5d5dee70ae04432005299dec33ee4", size = 994715, upload-time = "2025-10-23T12:34:17.178Z" }, + { url = "https://files.pythonhosted.org/packages/a5/45/78b1928f2b7517ee792d72a7a5527862066e53aeac586cea9fa5bd0d21cb/pygit2-1.19.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:12c8f8672fb319ee4276fc028019de4d3e6b9cd94bffc74a1aaa81ffc6445dc7", size = 5517120, upload-time = "2025-10-23T12:34:18.442Z" }, + { url = "https://files.pythonhosted.org/packages/c1/92/df0f760b80b57bf878df11ce3212beaa1e18ec9ce622088776435a4d3ec1/pygit2-1.19.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ccddde0826101035ca31f9df42c6a57704285e2387ab15cd314afa18f0521d95", size = 5846886, upload-time = "2025-10-23T12:34:19.906Z" }, + { url = "https://files.pythonhosted.org/packages/77/d1/b3af6681c698210fd9f80e8c0b8b05b3a4d7e9dfb9bd95741867adbd14c1/pygit2-1.19.0-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:de1fe1a2dfd10a58d17e88c14b363aa809a480c181b56c47cbc4fa83b0b68918", size = 4682365, upload-time = "2025-10-23T12:34:21.606Z" }, + { url = "https://files.pythonhosted.org/packages/4d/92/aff356800b7189fb085fba9e82102666bbec207dbd48acbf869947f41200/pygit2-1.19.0-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9b775d93b7ea9b8ff676002a857eabbe07fbc838802fd76b9b1e17109f571557", size = 5574375, upload-time = "2025-10-23T12:34:23.003Z" }, + { url = "https://files.pythonhosted.org/packages/4d/a2/6beb46b3a04567182374273d0845f8460572b562ba28ea908b363f1eaf05/pygit2-1.19.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:21a9bf74720360fcb21a0e6ad750013ba3e0625cd484f1bb7ddfefdcd207c0f5", size = 5842375, upload-time = "2025-10-23T12:34:24.408Z" }, + { url = "https://files.pythonhosted.org/packages/1d/eb/3fffdb3e1aecba1c4124f5e73008bd63a2fd8d2b44f420d4612e6dcf5065/pygit2-1.19.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ba17b947b2166000aeb9b19135160bb32aec38a7da9892d9fb476cdb3f518aba", size = 5539028, upload-time = "2025-10-23T12:34:26.307Z" }, + { url = "https://files.pythonhosted.org/packages/be/6e/dfc7693474e908ce4be5833b4b3a73868d1f8b58540af68b8361589c3168/pygit2-1.19.0-cp314-cp314t-win32.whl", hash = "sha256:772bf01936eb306c6dfb3cc3b955e2f8d3271d0eef2c23e24203352519421b20", size = 968740, upload-time = "2025-10-23T12:34:27.606Z" }, + { url = "https://files.pythonhosted.org/packages/d0/a8/5e926b7756efb97db3c52a7ad45b7c3eec135cc5bc19e1647313bb95abf4/pygit2-1.19.0-cp314-cp314t-win_amd64.whl", hash = "sha256:4516e87d35df9b2867b5f859a0d09cd86857f1e3ef4215d6c87850ce4869a793", size = 1196880, upload-time = "2025-10-23T12:34:28.875Z" }, + { url = "https://files.pythonhosted.org/packages/e3/56/28654e46e78743740252419a713edce22323dbcab8cc38e72e14490e5b27/pygit2-1.19.0-cp314-cp314t-win_arm64.whl", hash = "sha256:fceba6e279ab2be9ec762f2b3ff1b315cd922de879800d1f57a25eba4a90bc60", size = 995620, upload-time = "2025-10-23T12:34:30.635Z" }, + { url = "https://files.pythonhosted.org/packages/23/27/a240394fdfba09a4c25ef6838e525167229056a8c7910bd5451cc4b6ecfb/pygit2-1.19.0-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c6163a94e60ccd574c76018fdd19316c3c0671858a83000ea7978c28f55c78fc", size = 5321434, upload-time = "2025-10-23T12:34:31.957Z" }, + { url = "https://files.pythonhosted.org/packages/46/cd/749ea7ee588ceedb27dac65833329c511458e428b5dbcc30fc3ce1cbb0fa/pygit2-1.19.0-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b9ebbd394ce40024499d0e5905d1d26cd2bbc4429a30c2ac13a98f990e84ab88", size = 5045524, upload-time = "2025-10-23T12:34:33.35Z" }, + { url = "https://files.pythonhosted.org/packages/c1/65/48a1f131bc69a87aa80a92a9ffc8d87515bb4acb95f9a49063087ae62ca6/pygit2-1.19.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:33a509b4df90e23e87b4118020cabecd1910ede436d90a43c31eec27e33f52ce", size = 1129759, upload-time = "2025-10-23T12:34:34.608Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pygtrie" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b9/13/55deec25bf09383216fa7f1dfcdbfca40a04aa00b6d15a5cbf25af8fce5f/pygtrie-2.5.0.tar.gz", hash = "sha256:203514ad826eb403dab1d2e2ddd034e0d1534bbe4dbe0213bb0593f66beba4e2", size = 39266, upload-time = "2022-07-16T14:29:47.459Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/cd/bd196b2cf014afb1009de8b0f05ecd54011d881944e62763f3c1b1e8ef37/pygtrie-2.5.0-py3-none-any.whl", hash = "sha256:8795cda8105493d5ae159a5bef313ff13156c5d4d72feddefacaad59f8c8ce16", size = 25099, upload-time = "2022-09-23T20:30:05.12Z" }, +] + +[[package]] +name = "pymdown-extensions" +version = "10.16" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown" }, + { name = "pyyaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1a/0a/c06b542ac108bfc73200677309cd9188a3a01b127a63f20cadc18d873d88/pymdown_extensions-10.16.tar.gz", hash = "sha256:71dac4fca63fabeffd3eb9038b756161a33ec6e8d230853d3cecf562155ab3de", size = 853197, upload-time = "2025-06-21T17:56:36.974Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/d4/10bb14004d3c792811e05e21b5e5dcae805aacb739bd12a0540967b99592/pymdown_extensions-10.16-py3-none-any.whl", hash = "sha256:f5dd064a4db588cb2d95229fc4ee63a1b16cc8b4d0e6145c0899ed8723da1df2", size = 266143, upload-time = "2025-06-21T17:56:35.356Z" }, +] + +[[package]] +name = "pyparsing" +version = "3.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/22/f1129e69d94ffff626bdb5c835506b3a5b4f3d070f17ea295e12c2c6f60f/pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be", size = 1088608, upload-time = "2025-03-25T05:01:28.114Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/e7/df2285f3d08fee213f2d041540fa4fc9ca6c2d44cf36d3a035bf2a8d2bcc/pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf", size = 111120, upload-time = "2025-03-25T05:01:24.908Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/08/ba/45911d754e8eba3d5a841a5ce61a65a685ff1798421ac054f85aa8747dfb/pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c", size = 1517714, upload-time = "2025-06-18T05:48:06.109Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" }, +] + +[[package]] +name = "pytest-cov" +version = "6.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/18/99/668cade231f434aaa59bbfbf49469068d2ddd945000621d3d165d2e7dd7b/pytest_cov-6.2.1.tar.gz", hash = "sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2", size = 69432, upload-time = "2025-06-12T10:47:47.684Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/16/4ea354101abb1287856baa4af2732be351c7bee728065aed451b678153fd/pytest_cov-6.2.1-py3-none-any.whl", hash = "sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5", size = 24644, upload-time = "2025-06-12T10:47:45.932Z" }, +] + +[[package]] +name = "pytest-describe" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/58/4079baf5a7937159aa59b2f696f8d61c55a6ae4df87bd9ed49e7e130df21/pytest-describe-2.2.0.tar.gz", hash = "sha256:39bb05eb90f2497d9ca342ef9a0b7fa5bada7e58505aec33f66d661d631955b7", size = 10907, upload-time = "2024-02-10T15:30:35.224Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/95/2990d6e3e777be36690700b524d7daf44c9562158d1af6a8f85a6236f22c/pytest_describe-2.2.0-py3-none-any.whl", hash = "sha256:bd9e2c73acb4b9522a8400823d98f5b6a081667d3bfd7243a8598336896b544d", size = 6940, upload-time = "2024-02-10T15:30:33.451Z" }, +] + +[[package]] +name = "pytest-git" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "gitpython" }, + { name = "pytest" }, + { name = "pytest-shutil" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/22/74/8a237956bebbad3b1c84de5939c9d19fa82894622fa4df62d75ffa978165/pytest-git-1.8.0.tar.gz", hash = "sha256:e828ad716241f6485efda805aade5a17a034c720070a7121668155e6e706b415", size = 8754, upload-time = "2024-10-17T15:50:48.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/2c/21172aba6939986589c56923fc5c274f76ec274b73e6a9b34799a3d6633e/pytest_git-1.8.0-py3-none-any.whl", hash = "sha256:39f0b7ac64a0d4043a7f5a2f23a6ed9304139ca150c53efa525151940cd49b48", size = 6958, upload-time = "2024-10-17T15:50:32.251Z" }, +] + +[[package]] +name = "pytest-shutil" +version = "1.8.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "execnet" }, + { name = "pytest" }, + { name = "six" }, + { name = "termcolor" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a0/8d/5a60eeced74608fd2304dafc9e26815988714f8a2cfa72da99c8c3c85515/pytest-shutil-1.8.1.tar.gz", hash = "sha256:7dcc02e8a372098d51a98737e7f662e6edfd75cec7070a11e904141de49d866b", size = 37554, upload-time = "2024-11-29T19:34:17.066Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/17/1b161657385982134b723dcd463483d498e54e286c5632d42602407e606f/pytest_shutil-1.8.1-py3-none-any.whl", hash = "sha256:0793e347e07b9296d814ce33377ed348ce376ffac76c0e57b28bf84235499f51", size = 15948, upload-time = "2024-11-29T19:33:26.349Z" }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" }, + { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" }, + { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, +] + +[[package]] +name = "pyyaml-env-tag" +version = "1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/2e/79c822141bfd05a853236b504869ebc6b70159afc570e1d5a20641782eaa/pyyaml_env_tag-1.1.tar.gz", hash = "sha256:2eb38b75a2d21ee0475d6d97ec19c63287a7e140231e4214969d0eac923cd7ff", size = 5737, upload-time = "2025-05-13T15:24:01.64Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/11/432f32f8097b03e3cd5fe57e88efb685d964e2e5178a48ed61e841f7fdce/pyyaml_env_tag-1.1-py3-none-any.whl", hash = "sha256:17109e1a528561e32f026364712fee1264bc2ea6715120891174ed1b980d2e04", size = 4722, upload-time = "2025-05-13T15:23:59.629Z" }, +] + +[[package]] +name = "requests" +version = "2.32.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" }, +] + +[[package]] +name = "result" +version = "0.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/47/2175be65744aa4d8419c27bd3a7a7d65af5bcad7a4dc6a812c00778754f0/result-0.17.0.tar.gz", hash = "sha256:b73da420c0cb1a3bf741dbd41ff96dedafaad6a1b3ef437a9e33e380bb0d91cf", size = 20180, upload-time = "2024-06-02T16:39:54.51Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/90/19110ce9374c3db619e2df0816f2c58e4ddc5cdad5f7284cd81d8b30b7cb/result-0.17.0-py3-none-any.whl", hash = "sha256:49fd668b4951ad15800b8ccefd98b6b94effc789607e19c65064b775570933e8", size = 11689, upload-time = "2024-06-02T16:39:52.715Z" }, +] + +[[package]] +name = "rich" +version = "14.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" }, +] + +[[package]] +name = "roman-numerals-py" +version = "3.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/30/76/48fd56d17c5bdbdf65609abbc67288728a98ed4c02919428d4f52d23b24b/roman_numerals_py-3.1.0.tar.gz", hash = "sha256:be4bf804f083a4ce001b5eb7e3c0862479d10f94c936f6c4e5f250aa5ff5bd2d", size = 9017, upload-time = "2025-02-22T07:34:54.333Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/53/97/d2cbbaa10c9b826af0e10fdf836e1bf344d9f0abb873ebc34d1f49642d3f/roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c", size = 7742, upload-time = "2025-02-22T07:34:52.422Z" }, +] + +[[package]] +name = "schema" +version = "0.7.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/01/0ea2e66bad2f13271e93b729c653747614784d3ebde219679e41ccdceecd/schema-0.7.7.tar.gz", hash = "sha256:7da553abd2958a19dc2547c388cde53398b39196175a9be59ea1caf5ab0a1807", size = 44245, upload-time = "2024-05-04T10:56:17.318Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ad/1b/81855a88c6db2b114d5b2e9f96339190d5ee4d1b981d217fa32127bb00e0/schema-0.7.7-py2.py3-none-any.whl", hash = "sha256:5d976a5b50f36e74e2157b47097b60002bd4d42e65425fcc9c9befadb4255dde", size = 18632, upload-time = "2024-05-04T10:56:13.86Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "smmap" +version = "5.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/44/cd/a040c4b3119bbe532e5b0732286f805445375489fceaec1f48306068ee3b/smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5", size = 22329, upload-time = "2025-01-02T07:14:40.909Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/be/d09147ad1ec7934636ad912901c5fd7667e1c858e19d355237db0d0cd5e4/smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e", size = 24303, upload-time = "2025-01-02T07:14:38.724Z" }, +] + +[[package]] +name = "snowballstemmer" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/75/a7/9810d872919697c9d01295633f5d574fb416d47e535f258272ca1f01f447/snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895", size = 105575, upload-time = "2025-05-09T16:34:51.843Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/78/3565d011c61f5a43488987ee32b6f3f656e7f107ac2782dd57bdd7d91d9a/snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064", size = 103274, upload-time = "2025-05-09T16:34:50.371Z" }, +] + +[[package]] +name = "sphinx" +version = "8.2.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "alabaster" }, + { name = "babel" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "docutils" }, + { name = "imagesize" }, + { name = "jinja2" }, + { name = "packaging" }, + { name = "pygments" }, + { name = "requests" }, + { name = "roman-numerals-py" }, + { name = "snowballstemmer" }, + { name = "sphinxcontrib-applehelp" }, + { name = "sphinxcontrib-devhelp" }, + { name = "sphinxcontrib-htmlhelp" }, + { name = "sphinxcontrib-jsmath" }, + { name = "sphinxcontrib-qthelp" }, + { name = "sphinxcontrib-serializinghtml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/ad/4360e50ed56cb483667b8e6dadf2d3fda62359593faabbe749a27c4eaca6/sphinx-8.2.3.tar.gz", hash = "sha256:398ad29dee7f63a75888314e9424d40f52ce5a6a87ae88e7071e80af296ec348", size = 8321876, upload-time = "2025-03-02T22:31:59.658Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/53/136e9eca6e0b9dc0e1962e2c908fbea2e5ac000c2a2fbd9a35797958c48b/sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3", size = 3589741, upload-time = "2025-03-02T22:31:56.836Z" }, +] + +[[package]] +name = "sphinx-autodoc-typehints" +version = "3.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/93/68/a388a9b8f066cd865d9daa65af589d097efbfab9a8c302d2cb2daa43b52e/sphinx_autodoc_typehints-3.2.0.tar.gz", hash = "sha256:107ac98bc8b4837202c88c0736d59d6da44076e65a0d7d7d543a78631f662a9b", size = 36724, upload-time = "2025-04-25T16:53:25.872Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/c7/8aab362e86cbf887e58be749a78d20ad743e1eb2c73c2b13d4761f39a104/sphinx_autodoc_typehints-3.2.0-py3-none-any.whl", hash = "sha256:884b39be23b1d884dcc825d4680c9c6357a476936e3b381a67ae80091984eb49", size = 20563, upload-time = "2025-04-25T16:53:24.492Z" }, +] + +[[package]] +name = "sphinx-markdown" +version = "1.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown" }, + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/98/54/8eebe3ebea47c16645a7a7888cc4015618bcee93019b54f2269c6fcca35d/sphinx-markdown-1.0.2.tar.gz", hash = "sha256:f21b5b064eb6a45ea5cc3c48d82674407ef9c868486afb49668e1f503dbf50d5", size = 3394, upload-time = "2019-04-15T11:29:08.23Z" } + +[[package]] +name = "sphinx-press-theme" +version = "0.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e7/25/8769ef130d57ea449309a4ee2d76eed653063b5de27d34100822e34e7e93/sphinx_press_theme-0.9.1.tar.gz", hash = "sha256:1643dee7365f7831d1d3971b389b7c255641a7aced75f0681f71574e380046cf", size = 254696, upload-time = "2024-03-23T01:39:02.384Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/28/fbd928e5dcde4f8acdc7b2b730685dafb35d83437cc0b955332349e12b89/sphinx_press_theme-0.9.1-py3-none-any.whl", hash = "sha256:eed3fdd8df249b67136b507dfc6a84d1a2c5feca5376960c6d4d28ada4f6cdf7", size = 83558, upload-time = "2024-03-23T01:39:00.525Z" }, +] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053, upload-time = "2024-07-29T01:09:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300, upload-time = "2024-07-29T01:08:58.99Z" }, +] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967, upload-time = "2024-07-29T01:09:23.417Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530, upload-time = "2024-07-29T01:09:21.945Z" }, +] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617, upload-time = "2024-07-29T01:09:37.889Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705, upload-time = "2024-07-29T01:09:36.407Z" }, +] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787, upload-time = "2019-01-21T16:10:16.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071, upload-time = "2019-01-21T16:10:14.333Z" }, +] + +[[package]] +name = "sphinxcontrib-programoutput" +version = "0.18" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3f/c0/834af2290f8477213ec0dd60e90104f5644aa0c37b1a0d6f0a2b5efe03c4/sphinxcontrib_programoutput-0.18.tar.gz", hash = "sha256:09e68b6411d937a80b6085f4fdeaa42e0dc5555480385938465f410589d2eed8", size = 26333, upload-time = "2024-12-06T20:38:36.959Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/2c/7aec6e0580f666d4f61474a50c4995a98abfff27d827f0e7bc8c4fa528f5/sphinxcontrib_programoutput-0.18-py3-none-any.whl", hash = "sha256:8a651bc85de69a808a064ff0e48d06c12b9347da4fe5fdb1e94914b01e1b0c36", size = 20346, upload-time = "2024-12-06T20:38:22.406Z" }, +] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165, upload-time = "2024-07-29T01:09:56.435Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743, upload-time = "2024-07-29T01:09:54.885Z" }, +] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080, upload-time = "2024-07-29T01:10:09.332Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072, upload-time = "2024-07-29T01:10:08.203Z" }, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.41" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "(python_full_version < '3.14' and platform_machine == 'AMD64') or (python_full_version < '3.14' and platform_machine == 'WIN32') or (python_full_version < '3.14' and platform_machine == 'aarch64') or (python_full_version < '3.14' and platform_machine == 'amd64') or (python_full_version < '3.14' and platform_machine == 'ppc64le') or (python_full_version < '3.14' and platform_machine == 'win32') or (python_full_version < '3.14' and platform_machine == 'x86_64')" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/66/45b165c595ec89aa7dcc2c1cd222ab269bc753f1fc7a1e68f8481bd957bf/sqlalchemy-2.0.41.tar.gz", hash = "sha256:edba70118c4be3c2b1f90754d308d0b79c6fe2c0fdc52d8ddf603916f83f4db9", size = 9689424, upload-time = "2025-05-14T17:10:32.339Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/37/4e/b00e3ffae32b74b5180e15d2ab4040531ee1bef4c19755fe7926622dc958/sqlalchemy-2.0.41-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6375cd674fe82d7aa9816d1cb96ec592bac1726c11e0cafbf40eeee9a4516b5f", size = 2121232, upload-time = "2025-05-14T17:48:20.444Z" }, + { url = "https://files.pythonhosted.org/packages/ef/30/6547ebb10875302074a37e1970a5dce7985240665778cfdee2323709f749/sqlalchemy-2.0.41-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9f8c9fdd15a55d9465e590a402f42082705d66b05afc3ffd2d2eb3c6ba919560", size = 2110897, upload-time = "2025-05-14T17:48:21.634Z" }, + { url = "https://files.pythonhosted.org/packages/9e/21/59df2b41b0f6c62da55cd64798232d7349a9378befa7f1bb18cf1dfd510a/sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f9dc8c44acdee06c8fc6440db9eae8b4af8b01e4b1aee7bdd7241c22edff4f", size = 3273313, upload-time = "2025-05-14T17:51:56.205Z" }, + { url = "https://files.pythonhosted.org/packages/62/e4/b9a7a0e5c6f79d49bcd6efb6e90d7536dc604dab64582a9dec220dab54b6/sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90c11ceb9a1f482c752a71f203a81858625d8df5746d787a4786bca4ffdf71c6", size = 3273807, upload-time = "2025-05-14T17:55:26.928Z" }, + { url = "https://files.pythonhosted.org/packages/39/d8/79f2427251b44ddee18676c04eab038d043cff0e764d2d8bb08261d6135d/sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:911cc493ebd60de5f285bcae0491a60b4f2a9f0f5c270edd1c4dbaef7a38fc04", size = 3209632, upload-time = "2025-05-14T17:51:59.384Z" }, + { url = "https://files.pythonhosted.org/packages/d4/16/730a82dda30765f63e0454918c982fb7193f6b398b31d63c7c3bd3652ae5/sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03968a349db483936c249f4d9cd14ff2c296adfa1290b660ba6516f973139582", size = 3233642, upload-time = "2025-05-14T17:55:29.901Z" }, + { url = "https://files.pythonhosted.org/packages/04/61/c0d4607f7799efa8b8ea3c49b4621e861c8f5c41fd4b5b636c534fcb7d73/sqlalchemy-2.0.41-cp311-cp311-win32.whl", hash = "sha256:293cd444d82b18da48c9f71cd7005844dbbd06ca19be1ccf6779154439eec0b8", size = 2086475, upload-time = "2025-05-14T17:56:02.095Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8e/8344f8ae1cb6a479d0741c02cd4f666925b2bf02e2468ddaf5ce44111f30/sqlalchemy-2.0.41-cp311-cp311-win_amd64.whl", hash = "sha256:3d3549fc3e40667ec7199033a4e40a2f669898a00a7b18a931d3efb4c7900504", size = 2110903, upload-time = "2025-05-14T17:56:03.499Z" }, + { url = "https://files.pythonhosted.org/packages/3e/2a/f1f4e068b371154740dd10fb81afb5240d5af4aa0087b88d8b308b5429c2/sqlalchemy-2.0.41-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:81f413674d85cfd0dfcd6512e10e0f33c19c21860342a4890c3a2b59479929f9", size = 2119645, upload-time = "2025-05-14T17:55:24.854Z" }, + { url = "https://files.pythonhosted.org/packages/9b/e8/c664a7e73d36fbfc4730f8cf2bf930444ea87270f2825efbe17bf808b998/sqlalchemy-2.0.41-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:598d9ebc1e796431bbd068e41e4de4dc34312b7aa3292571bb3674a0cb415dd1", size = 2107399, upload-time = "2025-05-14T17:55:28.097Z" }, + { url = "https://files.pythonhosted.org/packages/5c/78/8a9cf6c5e7135540cb682128d091d6afa1b9e48bd049b0d691bf54114f70/sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a104c5694dfd2d864a6f91b0956eb5d5883234119cb40010115fd45a16da5e70", size = 3293269, upload-time = "2025-05-14T17:50:38.227Z" }, + { url = "https://files.pythonhosted.org/packages/3c/35/f74add3978c20de6323fb11cb5162702670cc7a9420033befb43d8d5b7a4/sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6145afea51ff0af7f2564a05fa95eb46f542919e6523729663a5d285ecb3cf5e", size = 3303364, upload-time = "2025-05-14T17:51:49.829Z" }, + { url = "https://files.pythonhosted.org/packages/6a/d4/c990f37f52c3f7748ebe98883e2a0f7d038108c2c5a82468d1ff3eec50b7/sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b46fa6eae1cd1c20e6e6f44e19984d438b6b2d8616d21d783d150df714f44078", size = 3229072, upload-time = "2025-05-14T17:50:39.774Z" }, + { url = "https://files.pythonhosted.org/packages/15/69/cab11fecc7eb64bc561011be2bd03d065b762d87add52a4ca0aca2e12904/sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41836fe661cc98abfae476e14ba1906220f92c4e528771a8a3ae6a151242d2ae", size = 3268074, upload-time = "2025-05-14T17:51:51.736Z" }, + { url = "https://files.pythonhosted.org/packages/5c/ca/0c19ec16858585d37767b167fc9602593f98998a68a798450558239fb04a/sqlalchemy-2.0.41-cp312-cp312-win32.whl", hash = "sha256:a8808d5cf866c781150d36a3c8eb3adccfa41a8105d031bf27e92c251e3969d6", size = 2084514, upload-time = "2025-05-14T17:55:49.915Z" }, + { url = "https://files.pythonhosted.org/packages/7f/23/4c2833d78ff3010a4e17f984c734f52b531a8c9060a50429c9d4b0211be6/sqlalchemy-2.0.41-cp312-cp312-win_amd64.whl", hash = "sha256:5b14e97886199c1f52c14629c11d90c11fbb09e9334fa7bb5f6d068d9ced0ce0", size = 2111557, upload-time = "2025-05-14T17:55:51.349Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ad/2e1c6d4f235a97eeef52d0200d8ddda16f6c4dd70ae5ad88c46963440480/sqlalchemy-2.0.41-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4eeb195cdedaf17aab6b247894ff2734dcead6c08f748e617bfe05bd5a218443", size = 2115491, upload-time = "2025-05-14T17:55:31.177Z" }, + { url = "https://files.pythonhosted.org/packages/cf/8d/be490e5db8400dacc89056f78a52d44b04fbf75e8439569d5b879623a53b/sqlalchemy-2.0.41-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d4ae769b9c1c7757e4ccce94b0641bc203bbdf43ba7a2413ab2523d8d047d8dc", size = 2102827, upload-time = "2025-05-14T17:55:34.921Z" }, + { url = "https://files.pythonhosted.org/packages/a0/72/c97ad430f0b0e78efaf2791342e13ffeafcbb3c06242f01a3bb8fe44f65d/sqlalchemy-2.0.41-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a62448526dd9ed3e3beedc93df9bb6b55a436ed1474db31a2af13b313a70a7e1", size = 3225224, upload-time = "2025-05-14T17:50:41.418Z" }, + { url = "https://files.pythonhosted.org/packages/5e/51/5ba9ea3246ea068630acf35a6ba0d181e99f1af1afd17e159eac7e8bc2b8/sqlalchemy-2.0.41-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc56c9788617b8964ad02e8fcfeed4001c1f8ba91a9e1f31483c0dffb207002a", size = 3230045, upload-time = "2025-05-14T17:51:54.722Z" }, + { url = "https://files.pythonhosted.org/packages/78/2f/8c14443b2acea700c62f9b4a8bad9e49fc1b65cfb260edead71fd38e9f19/sqlalchemy-2.0.41-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c153265408d18de4cc5ded1941dcd8315894572cddd3c58df5d5b5705b3fa28d", size = 3159357, upload-time = "2025-05-14T17:50:43.483Z" }, + { url = "https://files.pythonhosted.org/packages/fc/b2/43eacbf6ccc5276d76cea18cb7c3d73e294d6fb21f9ff8b4eef9b42bbfd5/sqlalchemy-2.0.41-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f67766965996e63bb46cfbf2ce5355fc32d9dd3b8ad7e536a920ff9ee422e23", size = 3197511, upload-time = "2025-05-14T17:51:57.308Z" }, + { url = "https://files.pythonhosted.org/packages/fa/2e/677c17c5d6a004c3c45334ab1dbe7b7deb834430b282b8a0f75ae220c8eb/sqlalchemy-2.0.41-cp313-cp313-win32.whl", hash = "sha256:bfc9064f6658a3d1cadeaa0ba07570b83ce6801a1314985bf98ec9b95d74e15f", size = 2082420, upload-time = "2025-05-14T17:55:52.69Z" }, + { url = "https://files.pythonhosted.org/packages/e9/61/e8c1b9b6307c57157d328dd8b8348ddc4c47ffdf1279365a13b2b98b8049/sqlalchemy-2.0.41-cp313-cp313-win_amd64.whl", hash = "sha256:82ca366a844eb551daff9d2e6e7a9e5e76d2612c8564f58db6c19a726869c1df", size = 2108329, upload-time = "2025-05-14T17:55:54.495Z" }, + { url = "https://files.pythonhosted.org/packages/1c/fc/9ba22f01b5cdacc8f5ed0d22304718d2c758fce3fd49a5372b886a86f37c/sqlalchemy-2.0.41-py3-none-any.whl", hash = "sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576", size = 1911224, upload-time = "2025-05-14T17:39:42.154Z" }, +] + +[[package]] +name = "termcolor" +version = "3.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/6c/3d75c196ac07ac8749600b60b03f4f6094d54e132c4d94ebac6ee0e0add0/termcolor-3.1.0.tar.gz", hash = "sha256:6a6dd7fbee581909eeec6a756cff1d7f7c376063b14e4a298dc4980309e55970", size = 14324, upload-time = "2025-04-30T11:37:53.791Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4f/bd/de8d508070629b6d84a30d01d57e4a65c69aa7f5abe7560b8fad3b50ea59/termcolor-3.1.0-py3-none-any.whl", hash = "sha256:591dd26b5c2ce03b9e43f391264626557873ce1d379019786f99b0c2bee140aa", size = 7684, upload-time = "2025-04-30T11:37:52.382Z" }, +] + +[[package]] +name = "tomli" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, + { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, + { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, + { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, + { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, + { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, + { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, + { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, + { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, + { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" }, + { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" }, + { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" }, + { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" }, + { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" }, + { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" }, + { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" }, + { url = "https://files.pythonhosted.org/packages/19/94/aeafa14a52e16163008060506fcb6aa1949d13548d13752171a755c65611/tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e", size = 154244, upload-time = "2025-10-08T22:01:27.06Z" }, + { url = "https://files.pythonhosted.org/packages/db/e4/1e58409aa78eefa47ccd19779fc6f36787edbe7d4cd330eeeedb33a4515b/tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3", size = 148637, upload-time = "2025-10-08T22:01:28.059Z" }, + { url = "https://files.pythonhosted.org/packages/26/b6/d1eccb62f665e44359226811064596dd6a366ea1f985839c566cd61525ae/tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc", size = 241925, upload-time = "2025-10-08T22:01:29.066Z" }, + { url = "https://files.pythonhosted.org/packages/70/91/7cdab9a03e6d3d2bb11beae108da5bdc1c34bdeb06e21163482544ddcc90/tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0", size = 249045, upload-time = "2025-10-08T22:01:31.98Z" }, + { url = "https://files.pythonhosted.org/packages/15/1b/8c26874ed1f6e4f1fcfeb868db8a794cbe9f227299402db58cfcc858766c/tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879", size = 245835, upload-time = "2025-10-08T22:01:32.989Z" }, + { url = "https://files.pythonhosted.org/packages/fd/42/8e3c6a9a4b1a1360c1a2a39f0b972cef2cc9ebd56025168c4137192a9321/tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005", size = 253109, upload-time = "2025-10-08T22:01:34.052Z" }, + { url = "https://files.pythonhosted.org/packages/22/0c/b4da635000a71b5f80130937eeac12e686eefb376b8dee113b4a582bba42/tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463", size = 97930, upload-time = "2025-10-08T22:01:35.082Z" }, + { url = "https://files.pythonhosted.org/packages/b9/74/cb1abc870a418ae99cd5c9547d6bce30701a954e0e721821df483ef7223c/tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8", size = 107964, upload-time = "2025-10-08T22:01:36.057Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/5c46fff6432a712af9f792944f4fcd7067d8823157949f4e40c56b8b3c83/tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77", size = 163065, upload-time = "2025-10-08T22:01:37.27Z" }, + { url = "https://files.pythonhosted.org/packages/39/67/f85d9bd23182f45eca8939cd2bc7050e1f90c41f4a2ecbbd5963a1d1c486/tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf", size = 159088, upload-time = "2025-10-08T22:01:38.235Z" }, + { url = "https://files.pythonhosted.org/packages/26/5a/4b546a0405b9cc0659b399f12b6adb750757baf04250b148d3c5059fc4eb/tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530", size = 268193, upload-time = "2025-10-08T22:01:39.712Z" }, + { url = "https://files.pythonhosted.org/packages/42/4f/2c12a72ae22cf7b59a7fe75b3465b7aba40ea9145d026ba41cb382075b0e/tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b", size = 275488, upload-time = "2025-10-08T22:01:40.773Z" }, + { url = "https://files.pythonhosted.org/packages/92/04/a038d65dbe160c3aa5a624e93ad98111090f6804027d474ba9c37c8ae186/tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67", size = 272669, upload-time = "2025-10-08T22:01:41.824Z" }, + { url = "https://files.pythonhosted.org/packages/be/2f/8b7c60a9d1612a7cbc39ffcca4f21a73bf368a80fc25bccf8253e2563267/tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f", size = 279709, upload-time = "2025-10-08T22:01:43.177Z" }, + { url = "https://files.pythonhosted.org/packages/7e/46/cc36c679f09f27ded940281c38607716c86cf8ba4a518d524e349c8b4874/tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0", size = 107563, upload-time = "2025-10-08T22:01:44.233Z" }, + { url = "https://files.pythonhosted.org/packages/84/ff/426ca8683cf7b753614480484f6437f568fd2fda2edbdf57a2d3d8b27a0b/tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba", size = 119756, upload-time = "2025-10-08T22:01:45.234Z" }, + { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.14.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" }, +] + +[[package]] +name = "tzdata" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, +] + +[[package]] +name = "uc-micro-py" +version = "1.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/91/7a/146a99696aee0609e3712f2b44c6274566bc368dfe8375191278045186b8/uc-micro-py-1.0.3.tar.gz", hash = "sha256:d321b92cff673ec58027c04015fcaa8bb1e005478643ff4a500882eaab88c48a", size = 6043, upload-time = "2024-02-09T16:52:01.654Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/37/87/1f677586e8ac487e29672e4b17455758fce261de06a0d086167bb760361a/uc_micro_py-1.0.3-py3-none-any.whl", hash = "sha256:db1dffff340817673d7b466ec86114a9dc0e9d4d9b5ba229d9d60e5c12600cd5", size = 6229, upload-time = "2024-02-09T16:52:00.371Z" }, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, +] + +[[package]] +name = "virtualenv" +version = "20.32.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a9/96/0834f30fa08dca3738614e6a9d42752b6420ee94e58971d702118f7cfd30/virtualenv-20.32.0.tar.gz", hash = "sha256:886bf75cadfdc964674e6e33eb74d787dff31ca314ceace03ca5810620f4ecf0", size = 6076970, upload-time = "2025-07-21T04:09:50.985Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/c6/f8f28009920a736d0df434b52e9feebfb4d702ba942f15338cb4a83eafc1/virtualenv-20.32.0-py3-none-any.whl", hash = "sha256:2c310aecb62e5aa1b06103ed7c2977b81e042695de2697d01017ff0f1034af56", size = 6057761, upload-time = "2025-07-21T04:09:48.059Z" }, +] + +[[package]] +name = "watchdog" +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220, upload-time = "2024-11-01T14:07:13.037Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/24/d9be5cd6642a6aa68352ded4b4b10fb0d7889cb7f45814fb92cecd35f101/watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c", size = 96393, upload-time = "2024-11-01T14:06:31.756Z" }, + { url = "https://files.pythonhosted.org/packages/63/7a/6013b0d8dbc56adca7fdd4f0beed381c59f6752341b12fa0886fa7afc78b/watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2", size = 88392, upload-time = "2024-11-01T14:06:32.99Z" }, + { url = "https://files.pythonhosted.org/packages/d1/40/b75381494851556de56281e053700e46bff5b37bf4c7267e858640af5a7f/watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c", size = 89019, upload-time = "2024-11-01T14:06:34.963Z" }, + { url = "https://files.pythonhosted.org/packages/39/ea/3930d07dafc9e286ed356a679aa02d777c06e9bfd1164fa7c19c288a5483/watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948", size = 96471, upload-time = "2024-11-01T14:06:37.745Z" }, + { url = "https://files.pythonhosted.org/packages/12/87/48361531f70b1f87928b045df868a9fd4e253d9ae087fa4cf3f7113be363/watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860", size = 88449, upload-time = "2024-11-01T14:06:39.748Z" }, + { url = "https://files.pythonhosted.org/packages/5b/7e/8f322f5e600812e6f9a31b75d242631068ca8f4ef0582dd3ae6e72daecc8/watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0", size = 89054, upload-time = "2024-11-01T14:06:41.009Z" }, + { url = "https://files.pythonhosted.org/packages/68/98/b0345cabdce2041a01293ba483333582891a3bd5769b08eceb0d406056ef/watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c", size = 96480, upload-time = "2024-11-01T14:06:42.952Z" }, + { url = "https://files.pythonhosted.org/packages/85/83/cdf13902c626b28eedef7ec4f10745c52aad8a8fe7eb04ed7b1f111ca20e/watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134", size = 88451, upload-time = "2024-11-01T14:06:45.084Z" }, + { url = "https://files.pythonhosted.org/packages/fe/c4/225c87bae08c8b9ec99030cd48ae9c4eca050a59bf5c2255853e18c87b50/watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b", size = 89057, upload-time = "2024-11-01T14:06:47.324Z" }, + { url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079, upload-time = "2024-11-01T14:06:59.472Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078, upload-time = "2024-11-01T14:07:01.431Z" }, + { url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076, upload-time = "2024-11-01T14:07:02.568Z" }, + { url = "https://files.pythonhosted.org/packages/ab/cc/da8422b300e13cb187d2203f20b9253e91058aaf7db65b74142013478e66/watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f", size = 79077, upload-time = "2024-11-01T14:07:03.893Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3b/b8964e04ae1a025c44ba8e4291f86e97fac443bca31de8bd98d3263d2fcf/watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26", size = 79078, upload-time = "2024-11-01T14:07:05.189Z" }, + { url = "https://files.pythonhosted.org/packages/62/ae/a696eb424bedff7407801c257d4b1afda455fe40821a2be430e173660e81/watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c", size = 79077, upload-time = "2024-11-01T14:07:06.376Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078, upload-time = "2024-11-01T14:07:07.547Z" }, + { url = "https://files.pythonhosted.org/packages/07/f6/d0e5b343768e8bcb4cda79f0f2f55051bf26177ecd5651f84c07567461cf/watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a", size = 79065, upload-time = "2024-11-01T14:07:09.525Z" }, + { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070, upload-time = "2024-11-01T14:07:10.686Z" }, + { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" }, +]