From 17212b8590767b46c55b6b0029f75e47404e421b Mon Sep 17 00:00:00 2001 From: Test User Date: Sat, 14 Mar 2026 14:49:55 -0400 Subject: [PATCH 1/3] Drop 3.8, fix mypy, add ty, bump test req versions --- .github/workflows/tests.yml | 171 ++++++++++------------- .readthedocs.yml | 15 ++ build_wheels.sh | 12 +- docs/source/conf.py | 16 ++- line_profiler/_logger.py | 6 +- line_profiler/autoprofile/autoprofile.py | 2 +- line_profiler/cli_utils.py | 4 +- pyproject.toml | 13 +- requirements/ipython.txt | 4 +- requirements/optional.txt | 2 +- requirements/tests.txt | 17 +-- 11 files changed, 132 insertions(+), 130 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index b44389ff..8c779fe0 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -21,7 +21,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout source - uses: actions/checkout@v4.2.2 + uses: actions/checkout@v6.0.2 - name: Set up Python 3.13 for linting uses: actions/setup-python@v5.6.0 with: @@ -34,12 +34,14 @@ jobs: run: |- # stop the build if there are Python syntax errors or undefined names flake8 ./line_profiler --count --select=E9,F63,F7,F82 --show-source --statistics - - name: Typecheck with mypy + - name: Typecheck run: |- python -m pip install mypy pip install -r requirements/runtime.txt - mypy --install-types --non-interactive ./line_profiler mypy ./line_profiler + python -m pip install ty + pip install -r requirements/runtime.txt + ty check ./line_profiler build_and_test_sdist: ## # Build the binary package from source and test it in the same @@ -49,7 +51,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout source - uses: actions/checkout@v4.2.2 + uses: actions/checkout@v6.0.2 - name: Set up Python 3.13 uses: actions/setup-python@v5.6.0 with: @@ -57,8 +59,8 @@ jobs: - name: Upgrade pip run: |- python -m pip install pip uv -U - python -m uv pip install -r requirements/tests.txt - python -m uv pip install -r requirements/runtime.txt + python -m pip install --prefer-binary -r requirements/tests.txt + python -m pip install --prefer-binary -r requirements/runtime.txt - name: Build sdist shell: bash run: |- @@ -69,7 +71,7 @@ jobs: - name: Install sdist run: |- ls -al wheelhouse - python -m uv pip install wheelhouse/line_profiler*.tar.gz -v + python -m pip install --prefer-binary wheelhouse/line_profiler*.tar.gz -v - name: Test minimal loose sdist env: COVERAGE_CORE: ctrace @@ -103,7 +105,7 @@ jobs: echo "MOD_DPATH = $MOD_DPATH" python -m pytest --verbose --cov=line_profiler $MOD_DPATH ../tests cd .. - - uses: actions/upload-artifact@v4.4.0 + - uses: actions/upload-artifact@v6.0.0 name: Upload sdist artifact with: name: sdist_wheels @@ -126,41 +128,35 @@ jobs: # explicitly here. os: - ubuntu-latest - # Overhead of building ARM wheels on Intel Linux nodes is - # unreasonably high (20s build time per wheel vs 3m); - # it's better to just spin another runner up to build them - # natively - - ubuntu-24.04-arm - macOS-latest - windows-latest + - ubuntu-24.04-arm cibw_skip: - '*-win32 cp3{9,10}-win_arm64 cp313-musllinux_i686' arch: - auto steps: - name: Checkout source - uses: actions/checkout@v4.2.2 + uses: actions/checkout@v6.0.2 - name: Enable MSVC 64bit uses: ilammy/msvc-dev-cmd@v1 if: ${{ startsWith(matrix.os, 'windows-') }} && ${{ contains(matrix.cibw_skip, '*-win32') }} with: arch: ${{ contains(matrix.os, 'arm') && 'arm64' || 'x64' }} - # Note: Since we're building Linux wheels on their native - # architectures, we don't need QEMU - name: Set up QEMU - uses: docker/setup-qemu-action@v3.0.0 + uses: docker/setup-qemu-action@v3.7.0 if: runner.os == 'Linux' && matrix.arch != 'auto' with: platforms: all - name: Build binary wheels - uses: pypa/cibuildwheel@v3.1.2 + uses: pypa/cibuildwheel@v3.3.1 with: output-dir: wheelhouse config-file: pyproject.toml env: CIBW_SKIP: ${{ matrix.cibw_skip }} CIBW_TEST_SKIP: '*-win_arm64' - CIBW_ENVIRONMENT: PYTHONUTF8=1 + CIBW_ARCHS_LINUX: ${{ matrix.arch }} PYTHONUTF8: '1' VSCMD_ARG_TGT_ARCH: '' - name: Show built files @@ -186,7 +182,7 @@ jobs: echo '### The cwd should now have a coverage.xml' ls -altr pwd - - uses: codecov/codecov-action@v5.4.3 + - uses: codecov/codecov-action@v5.5.2 name: Codecov Upload env: HAVE_CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN != '' }} @@ -195,12 +191,12 @@ jobs: with: file: ./coverage.xml token: ${{ secrets.CODECOV_TOKEN }} - - uses: codecov/codecov-action@v5.4.3 + - uses: codecov/codecov-action@v5.5.2 name: Codecov Upload with: file: ./coverage.xml token: ${{ secrets.CODECOV_TOKEN }} - - uses: actions/upload-artifact@v4.4.0 + - uses: actions/upload-artifact@v6.0.0 name: Upload wheels artifact with: name: wheels-${{ matrix.os }}-${{ matrix.arch }} @@ -222,29 +218,25 @@ jobs: # Xcookie generates an explicit list of environments that will be used # for testing instead of using the more concise matrix notation. include: - - python-version: '3.8' + - python-version: '3.9' install-extras: tests-strict,runtime-strict os: ubuntu-latest arch: auto - - python-version: '3.8' - install-extras: tests-strict,runtime-strict - os: ubuntu-24.04-arm - arch: auto - - python-version: '3.8' + - python-version: '3.9' install-extras: tests-strict,runtime-strict os: macOS-latest arch: auto - - python-version: '3.8' + - python-version: '3.9' install-extras: tests-strict,runtime-strict os: windows-latest arch: auto - - python-version: '3.13' - install-extras: tests-strict,runtime-strict,optional-strict - os: ubuntu-latest + - python-version: '3.9' + install-extras: tests-strict,runtime-strict + os: ubuntu-24.04-arm arch: auto - python-version: '3.13' install-extras: tests-strict,runtime-strict,optional-strict - os: ubuntu-24.04-arm + os: ubuntu-latest arch: auto - python-version: '3.13' install-extras: tests-strict,runtime-strict,optional-strict @@ -256,32 +248,28 @@ jobs: arch: auto - python-version: '3.13' install-extras: tests-strict,runtime-strict,optional-strict - os: windows-11-arm + os: ubuntu-24.04-arm arch: auto - python-version: '3.13' - install-extras: tests - os: ubuntu-latest + install-extras: tests-strict,runtime-strict,optional-strict + os: windows-11-arm arch: auto - python-version: '3.13' install-extras: tests - os: ubuntu-24.04-arm + os: macOS-latest arch: auto - python-version: '3.13' install-extras: tests - os: macOS-latest + os: windows-latest arch: auto - python-version: '3.13' install-extras: tests - os: windows-latest + os: ubuntu-24.04-arm arch: auto - python-version: '3.13' install-extras: tests os: windows-11-arm arch: auto - - python-version: '3.8' - install-extras: tests,optional - os: ubuntu-latest - arch: auto - python-version: '3.9' install-extras: tests,optional os: ubuntu-latest @@ -306,89 +294,77 @@ jobs: install-extras: tests,optional os: ubuntu-latest arch: auto - - python-version: '3.8' - install-extras: tests,optional - os: ubuntu-24.04-arm - arch: auto - python-version: '3.9' install-extras: tests,optional - os: ubuntu-24.04-arm + os: macOS-latest arch: auto - python-version: '3.10' install-extras: tests,optional - os: ubuntu-24.04-arm + os: macOS-latest arch: auto - python-version: '3.11' install-extras: tests,optional - os: ubuntu-24.04-arm + os: macOS-latest arch: auto - python-version: '3.12' install-extras: tests,optional - os: ubuntu-24.04-arm + os: macOS-latest arch: auto - python-version: '3.13' install-extras: tests,optional - os: ubuntu-24.04-arm + os: macOS-latest arch: auto - python-version: '3.14' - install-extras: tests,optional - os: ubuntu-24.04-arm - arch: auto - - python-version: '3.8' install-extras: tests,optional os: macOS-latest arch: auto - python-version: '3.9' install-extras: tests,optional - os: macOS-latest + os: windows-latest arch: auto - python-version: '3.10' install-extras: tests,optional - os: macOS-latest + os: windows-latest arch: auto - python-version: '3.11' install-extras: tests,optional - os: macOS-latest + os: windows-latest arch: auto - python-version: '3.12' install-extras: tests,optional - os: macOS-latest + os: windows-latest arch: auto - python-version: '3.13' install-extras: tests,optional - os: macOS-latest + os: windows-latest arch: auto - python-version: '3.14' - install-extras: tests,optional - os: macOS-latest - arch: auto - - python-version: '3.8' install-extras: tests,optional os: windows-latest arch: auto - python-version: '3.9' install-extras: tests,optional - os: windows-latest + os: ubuntu-24.04-arm arch: auto - python-version: '3.10' install-extras: tests,optional - os: windows-latest + os: ubuntu-24.04-arm arch: auto - python-version: '3.11' install-extras: tests,optional - os: windows-latest + os: ubuntu-24.04-arm arch: auto - python-version: '3.12' install-extras: tests,optional - os: windows-latest + os: ubuntu-24.04-arm arch: auto - python-version: '3.13' install-extras: tests,optional - os: windows-latest + os: ubuntu-24.04-arm arch: auto - python-version: '3.14' install-extras: tests,optional - os: windows-latest + os: ubuntu-24.04-arm arch: auto - python-version: '3.11' install-extras: tests,optional @@ -408,16 +384,14 @@ jobs: arch: auto steps: - name: Checkout source - uses: actions/checkout@v4.2.2 + uses: actions/checkout@v6.0.2 - name: Enable MSVC 64bit uses: ilammy/msvc-dev-cmd@v1 if: ${{ startsWith(matrix.os, 'windows-') }} with: arch: ${{ contains(matrix.os, 'arm') && 'arm64' || 'x64' }} - # Note: Since we're testing Linux wheels on their native - # architectures, we don't need QEMU - name: Set up QEMU - uses: docker/setup-qemu-action@v3.0.0 + uses: docker/setup-qemu-action@v3.7.0 if: runner.os == 'Linux' && matrix.arch != 'auto' with: platforms: all @@ -443,28 +417,35 @@ jobs: echo "Installing helpers: setuptools" python -m uv pip install setuptools>=0.8 setuptools_scm wheel build -U echo "Installing helpers: tomli and pkginfo" - python -m uv pip install tomli pkginfo + python -m uv pip install tomli pkginfo packaging export WHEEL_FPATH=$(python -c "if 1: import pathlib + from packaging import tags + from packaging.utils import parse_wheel_filename dist_dpath = pathlib.Path('wheelhouse') - candidates = list(dist_dpath.glob('line_profiler*.whl')) - candidates += list(dist_dpath.glob('line_profiler*.tar.gz')) - fpath = sorted(candidates)[-1] + wheels = sorted(dist_dpath.glob('line_profiler*.whl')) + if wheels: + sys_tags = set(tags.sys_tags()) + matching = [] + for w in wheels: + try: + _, _, _, wheel_tags = parse_wheel_filename(w.name) + except Exception: + continue + if any(t in sys_tags for t in wheel_tags): + matching.append(w) + fpath = sorted(matching or wheels)[-1] + else: + sdists = sorted(dist_dpath.glob('line_profiler*.tar.gz')) + if not sdists: + raise SystemExit('No wheel artifacts found in wheelhouse') + fpath = sdists[-1] print(str(fpath).replace(chr(92), chr(47))) ") - export MOD_VERSION=$(python -c "if 1: - from pkginfo import Wheel, SDist - import pathlib - fpath = '$WHEEL_FPATH' - cls = Wheel if fpath.endswith('.whl') else SDist - item = cls(fpath) - print(item.version) - ") echo "WHEEL_FPATH=$WHEEL_FPATH" echo "INSTALL_EXTRAS=$INSTALL_EXTRAS" echo "UV_RESOLUTION=$UV_RESOLUTION" - echo "MOD_VERSION=$MOD_VERSION" - python -m uv pip install "line_profiler[$INSTALL_EXTRAS]==$MOD_VERSION" -f wheelhouse + python -m pip install --prefer-binary "${WHEEL_FPATH}[${INSTALL_EXTRAS}]" echo "Install finished." - name: Test wheel ${{ matrix.install-extras }} shell: bash @@ -513,7 +494,7 @@ jobs: echo '### The cwd should now have a coverage.xml' ls -altr pwd - - uses: codecov/codecov-action@v5.4.3 + - uses: codecov/codecov-action@v5.5.2 name: Codecov Upload with: file: ./coverage.xml @@ -527,7 +508,7 @@ jobs: - build_binpy_wheels steps: - name: Checkout source - uses: actions/checkout@v4.2.2 + uses: actions/checkout@v6.0.2 - uses: actions/download-artifact@v4.1.8 name: Download wheels with: @@ -584,7 +565,7 @@ jobs: ots stamp wheelhouse/*.whl wheelhouse/*.tar.gz wheelhouse/*.asc ls -la wheelhouse twine upload --username __token__ --password "$TWINE_PASSWORD" --repository-url "$TWINE_REPOSITORY_URL" wheelhouse/*.whl wheelhouse/*.tar.gz --skip-existing --verbose || { echo "failed to twine upload" ; exit 1; } - - uses: actions/upload-artifact@v4.4.0 + - uses: actions/upload-artifact@v6.0.0 name: Upload deploy artifacts with: name: deploy_artifacts @@ -603,7 +584,7 @@ jobs: - build_binpy_wheels steps: - name: Checkout source - uses: actions/checkout@v4.2.2 + uses: actions/checkout@v6.0.2 - uses: actions/download-artifact@v4.1.8 name: Download wheels with: @@ -660,7 +641,7 @@ jobs: ots stamp wheelhouse/*.whl wheelhouse/*.tar.gz wheelhouse/*.asc ls -la wheelhouse twine upload --username __token__ --password "$TWINE_PASSWORD" --repository-url "$TWINE_REPOSITORY_URL" wheelhouse/*.whl wheelhouse/*.tar.gz --skip-existing --verbose || { echo "failed to twine upload" ; exit 1; } - - uses: actions/upload-artifact@v4.4.0 + - uses: actions/upload-artifact@v6.0.0 name: Upload deploy artifacts with: name: deploy_artifacts @@ -680,7 +661,7 @@ jobs: - live_deploy steps: - name: Checkout source - uses: actions/checkout@v4.2.2 + uses: actions/checkout@v6.0.2 - uses: actions/download-artifact@v4.1.8 name: Download artifacts with: diff --git a/.readthedocs.yml b/.readthedocs.yml index 1c728dac..79335f21 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -7,15 +7,30 @@ # Required version: 2 + build: os: "ubuntu-24.04" tools: python: "3.13" + +# Build documentation in the docs/ directory with Sphinx sphinx: configuration: docs/source/conf.py + +# Build documentation with MkDocs +#mkdocs: +# configuration: mkdocs.yml + +# Optionally build your docs in additional formats such as PDF and ePub formats: all + python: install: - requirements: requirements/docs.txt - method: pip path: . + #extra_requirements: + # - docs + +#conda: +# environment: environment.yml diff --git a/build_wheels.sh b/build_wheels.sh index 6a6daecb..b8e7d2bb 100755 --- a/build_wheels.sh +++ b/build_wheels.sh @@ -18,5 +18,13 @@ if ! which cibuildwheel ; then exit 1 fi -# Build version-pinned wheels -cibuildwheel --config-file pyproject.toml --platform linux --archs x86_64 +LOCAL_CP_VERSION=$(python3 -c "import sys; print('cp' + ''.join(list(map(str, sys.version_info[0:2]))))") +echo "LOCAL_CP_VERSION = $LOCAL_CP_VERSION" + +# Build for only the current version of Python +export CIBW_BUILD="${LOCAL_CP_VERSION}-*" + + +#pip wheel -w wheelhouse . +# python -m build --wheel -o wheelhouse # line_profiler: +COMMENT_IF(binpy) +cibuildwheel --config-file pyproject.toml --platform linux --archs x86_64 # line_profiler: +UNCOMMENT_IF(binpy) diff --git a/docs/source/conf.py b/docs/source/conf.py index b81e1786..0604cedd 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -141,7 +141,7 @@ def visit_Assign(self, node): project = 'line_profiler' -copyright = '2025, Robert Kern' +copyright = '2026, Robert Kern' author = 'Robert Kern' modname = 'line_profiler' @@ -435,7 +435,7 @@ class PatchedPythonDomain(PythonDomain): """ def resolve_xref( - self, env, fromdocname, builder, typ, target, node, contnode + self, env, fromdocname, builder, type, target, node, contnode ): """ Helps to resolves cross-references @@ -445,7 +445,7 @@ def resolve_xref( if target.startswith('xdoc.'): target = 'xdoctest.' + target[3] return_value = super(PatchedPythonDomain, self).resolve_xref( - env, fromdocname, builder, typ, target, node, contnode + env, fromdocname, builder, type, target, node, contnode ) return return_value @@ -838,10 +838,12 @@ def create_doctest_figure(app, obj, name, lines): The idea is that each doctest that produces a figure should generate that and then that figure should be part of the docs. """ - import xdoctest import sys import types + import xdoctest + import xdoctest.core + if isinstance(obj, types.ModuleType): module = obj else: @@ -1035,9 +1037,10 @@ def postprocess_hyperlinks(app, doctree, docname): "autodoc-process-docstring" event. """ # Your hyperlink postprocessing logic here - from docutils import nodes import pathlib + from docutils import nodes + for node in doctree.traverse(nodes.reference): if 'refuri' in node.attributes: refuri = node.attributes['refuri'] @@ -1054,7 +1057,7 @@ def postprocess_hyperlinks(app, doctree, docname): def fix_rst_todo_section(lines): - new_lines = [] + # new_lines = [] for line in lines: ... ... @@ -1062,6 +1065,7 @@ def fix_rst_todo_section(lines): def setup(app): import sphinx + import sphinx.application app: sphinx.application.Sphinx = app app.add_domain(PatchedPythonDomain, override=True) diff --git a/line_profiler/_logger.py b/line_profiler/_logger.py index c632e925..bb4629a7 100644 --- a/line_profiler/_logger.py +++ b/line_profiler/_logger.py @@ -6,7 +6,7 @@ import logging from abc import ABC, abstractmethod import sys -from typing import ClassVar +from typing import ClassVar, cast from logging import INFO, DEBUG, ERROR, WARNING, CRITICAL # NOQA @@ -168,7 +168,7 @@ def configure( 'path': None, 'format': '%(asctime)s : [file] %(levelname)s : %(message)s', } - streaminfo = { + streaminfo: dict[str, bool | None | str] = { '__enable__': None, # will be determined below 'format': '%(levelname)s: %(message)s', } @@ -195,7 +195,7 @@ def configure( # Add a stream handler if enabled if streaminfo['__enable__']: - streamformat = streaminfo.get('format') + streamformat = cast(str, streaminfo.get('format')) sh = logging.StreamHandler(sys.stdout) sh.setFormatter(logging.Formatter(streamformat)) self.logger.addHandler(sh) diff --git a/line_profiler/autoprofile/autoprofile.py b/line_profiler/autoprofile/autoprofile.py index 1e20fbb7..e287d35c 100644 --- a/line_profiler/autoprofile/autoprofile.py +++ b/line_profiler/autoprofile/autoprofile.py @@ -153,4 +153,4 @@ def __exit__(self, *_, **__): # then restore it via the context manager, so that the executed # code is run as `__main__` sys.modules['__main__'] = module_obj - exec(code_obj, cast(Dict[str, Any], namespace), namespace) + exec(code_obj, cast(Dict[str, Any], namespace), namespace) # type: ignore[redundant-cast] diff --git a/line_profiler/cli_utils.py b/line_profiler/cli_utils.py index bc9b6fa1..470a06d3 100644 --- a/line_profiler/cli_utils.py +++ b/line_profiler/cli_utils.py @@ -12,11 +12,11 @@ import shutil import sys from os import PathLike -from typing import Protocol, Sequence, TypeVar, cast +from typing import cast from .toml_config import ConfigSource -_BOOLEAN_VALUES = { +_BOOLEAN_VALUES: dict[str, bool] = { **{k.casefold(): False for k in ('', '0', 'off', 'False', 'F', 'no', 'N')}, **{k.casefold(): True for k in ('1', 'on', 'True', 'T', 'yes', 'Y')}, } diff --git a/pyproject.toml b/pyproject.toml index 4ac613b8..ff66661d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,7 +34,7 @@ omit =[ ] [tool.cibuildwheel] -build = "cp38-* cp39-* cp310-* cp311-* cp312-* cp313-* cp314-*" +build = "cp39-* cp310-* cp311-* cp312-* cp313-* cp314-*" # XXX: since `tests.yml` already defines `matrix.cibw_skip` for # `build_binpy_wheels`, can we deduplicate and just use that? # Or do we need these when building wheels for release, which may run on @@ -59,13 +59,13 @@ archs = ['AMD64', 'ARM64'] ignore_missing_imports = true [tool.xcookie] -tags = [ "pyutils", "binpy", "github",] +tags = [ "pyutils", "binpy", "github", "mypy", "binpy-ubuntu-arm"] mod_name = "line_profiler" repo_name = "line_profiler" rel_mod_parent_dpath = "." os = [ "all", "linux", "osx", "win",] main_python = '3.13' -min_python = '3.8' +min_python = '3.9' max_python = '3.14' author = "Robert Kern" author_email = "robert.kern@enthought.com" @@ -102,7 +102,7 @@ rules = { unused-type-ignore-comment = "ignore" } [tool.ruff] line-length = 80 -target-version = "py38" +target-version = "py39" [tool.ruff.lint] # Enable Flake8 (E, F) and isort (I) rules. @@ -119,3 +119,8 @@ indent-style = "space" skip-magic-trailing-comma = false line-ending = "auto" docstring-code-format = false + +[tool.ty.rules] +unused-ignore-comment = "ignore" +unused-type-ignore-comment = "ignore" +unresolved-import = "ignore" diff --git a/requirements/ipython.txt b/requirements/ipython.txt index c439de11..2c1b22c4 100644 --- a/requirements/ipython.txt +++ b/requirements/ipython.txt @@ -1,2 +1,2 @@ -IPython >=8.14.0 ; python_version < '4.0.0' and python_version >= '3.9.0' # Python 3.9+ -IPython >=8.12.2 ; python_version < '3.9.0' and python_version >= '3.8.0' # Python 3.8 +IPython >=8.14.0 ; python_version < '3.10' and python_version >= '3.9' +IPython >=8.28.0 ; python_version < '4.0' and python_version >= '3.10' diff --git a/requirements/optional.txt b/requirements/optional.txt index 24b6ba08..91637884 100644 --- a/requirements/optional.txt +++ b/requirements/optional.txt @@ -1,4 +1,4 @@ # Add requirements here, use the script for help # xdev availpkg rich -rich>=12.3.0 +rich>=13.9.0 -r ipython.txt diff --git a/requirements/tests.txt b/requirements/tests.txt index 74c2abc3..dae10d28 100644 --- a/requirements/tests.txt +++ b/requirements/tests.txt @@ -1,16 +1,5 @@ -pytest>=7.4.4 ; python_version < '4.0' and python_version >= '3.13' # Python 3.13+ -pytest>=7.4.4 ; python_version < '3.13' and python_version >= '3.12' # Python 3.12 -pytest>=7.4.4 ; python_version < '3.12' and python_version >= '3.11' # Python 3.11 -pytest>=7.4.4 ; python_version < '3.11' and python_version >= '3.10' # Python 3.10 -pytest>=7.4.4 ; python_version < '3.10' and python_version >= '3.9' # Python 3.9 -pytest>=7.4.4 ; python_version < '3.9' and python_version >= '3.8' # Python 3.8 - -pytest-cov>=3.0.0 - -coverage[toml]>=7.3.0 ; python_version < '4.0' and python_version >= '3.12' # Python 3.12 -coverage[toml]>=6.5.0 ; python_version < '3.12' and python_version >= '3.10' # Python 3.10-3.11 -coverage[toml]>=6.5.0 ; python_version < '3.10' and python_version >= '3.9' # Python 3.9 -coverage[toml]>=6.5.0 ; python_version < '3.9' and python_version >= '3.8' # Python 3.8 - +pytest>=7.4.4 +pytest-cov>=7.1.0 +coverage[toml]>=7.10.7 ubelt >= 1.3.4 xdoctest >= 1.1.3 From 17f2097b2c0f84b0f8f95a3cf3d55fed39f55cd5 Mon Sep 17 00:00:00 2001 From: Test User Date: Thu, 2 Apr 2026 20:26:32 -0400 Subject: [PATCH 2/3] update xcookie --- .github/workflows/release.yml | 297 ++++++++++++++++++++++++++++++++++ .github/workflows/tests.yml | 231 ++------------------------ dev/setup_secrets.sh | 27 +++- pyproject.toml | 8 +- 4 files changed, 332 insertions(+), 231 deletions(-) create mode 100644 .github/workflows/release.yml diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..67c3d191 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,297 @@ +# This workflow is autogenerated by xcookie. +# File kind: release +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions +# Based on ~/code/xcookie/xcookie/builders/github_actions.py +# See: https://github.com/Erotemic/xcookie + +name: BinPyRelease + +on: + push: + workflow_dispatch: + +jobs: + build_sdist: + ## + # Build the sdist artifact used by the release workflow. + # This workflow intentionally builds artifacts but does not run the + # full test matrix. + ## + name: Build sdist + runs-on: ubuntu-latest + steps: + - name: Checkout source + uses: actions/checkout@v6.0.2 + - name: Set up Python 3.13 + uses: actions/setup-python@v5.6.0 + with: + python-version: '3.13' + - name: Build sdist + shell: bash + run: |- + python -m pip install pip uv -U + python -m uv pip install setuptools>=0.8 wheel build twine + python -m build --sdist --outdir wheelhouse + python -m twine check ./wheelhouse/line_profiler*.tar.gz + - name: Show built files + shell: bash + run: ls -la wheelhouse + - uses: actions/upload-artifact@v6.0.0 + name: Upload sdist artifact + with: + name: sdist_wheels + path: ./wheelhouse/line_profiler*.tar.gz + build_binpy_wheels: + ## + # Build binary wheels used by the release workflow. + ## + name: ${{ matrix.os }}, arch=${{ matrix.arch }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + # Normally, xcookie generates explicit lists of platforms to build / test + # on, but in this case cibuildwheel does that for us, so we need to just + # set the environment variables for cibuildwheel. These are parsed out of + # the standard [tool.cibuildwheel] section in pyproject.toml and set + # explicitly here. + os: + - ubuntu-latest + - macOS-latest + - windows-latest + - ubuntu-24.04-arm + cibw_skip: + - '*-win32 cp3{10}-win_arm64 cp313-musllinux_i686' + arch: + - auto + steps: + - name: Checkout source + uses: actions/checkout@v6.0.2 + - name: Enable MSVC 64bit + uses: ilammy/msvc-dev-cmd@v1 + if: ${{ startsWith(matrix.os, 'windows-') }} + with: + arch: ${{ contains(matrix.os, 'arm') && 'arm64' || 'x64' }} + - name: Set up QEMU + uses: docker/setup-qemu-action@v3.7.0 + if: runner.os == 'Linux' && matrix.arch != 'auto' + with: + platforms: all + - name: Build binary wheels + uses: pypa/cibuildwheel@v3.3.1 + with: + output-dir: wheelhouse + config-file: pyproject.toml + env: + CIBW_SKIP: ${{ matrix.cibw_skip }} + CIBW_TEST_SKIP: '*-win_arm64' + CIBW_ARCHS_LINUX: ${{ matrix.arch }} + PYTHONUTF8: '1' + VSCMD_ARG_TGT_ARCH: '' + - name: Show built files + shell: bash + run: ls -la wheelhouse + - uses: actions/upload-artifact@v6.0.0 + name: Upload wheels artifact + with: + name: wheels-${{ matrix.os }}-${{ matrix.arch }} + path: ./wheelhouse/line_profiler*.whl + test_deploy: + name: Deploy Test + runs-on: ubuntu-latest + if: github.event_name == 'push' && ! startsWith(github.event.ref, 'refs/tags') && ! startsWith(github.event.ref, 'refs/heads/release') + needs: + - build_binpy_wheels + - build_sdist + steps: + - name: Checkout source + uses: actions/checkout@v6.0.2 + - uses: actions/download-artifact@v4.1.8 + name: Download wheels + with: + pattern: wheels-* + merge-multiple: true + path: wheelhouse + - uses: actions/download-artifact@v4.1.8 + name: Download sdist + with: + name: sdist_wheels + path: wheelhouse + - name: Show files to upload + shell: bash + run: ls -la wheelhouse + - name: Sign and Publish + env: + TWINE_REPOSITORY_URL: https://test.pypi.org/legacy/ + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.TEST_TWINE_PASSWORD }} + CI_SECRET: ${{ secrets.CI_SECRET }} + run: |- + GPG_EXECUTABLE=gpg + $GPG_EXECUTABLE --version + openssl version + $GPG_EXECUTABLE --list-keys + echo "Decrypting Keys" + openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:CI_SECRET -d -a -in dev/ci_public_gpg_key.pgp.enc | $GPG_EXECUTABLE --import + openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:CI_SECRET -d -a -in dev/gpg_owner_trust.enc | $GPG_EXECUTABLE --import-ownertrust + openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:CI_SECRET -d -a -in dev/ci_secret_gpg_subkeys.pgp.enc | $GPG_EXECUTABLE --import + echo "Finish Decrypt Keys" + $GPG_EXECUTABLE --list-keys || true + $GPG_EXECUTABLE --list-keys || echo "first invocation of gpg creates directories and returns 1" + $GPG_EXECUTABLE --list-keys + VERSION=$(python -c "import setup; print(setup.VERSION)") + python -m pip install pip uv -U + python -m pip install packaging twine -U + python -m pip install urllib3 requests[security] + GPG_KEYID=$(cat dev/public_gpg_key) + echo "GPG_KEYID = '$GPG_KEYID'" + GPG_SIGN_CMD="$GPG_EXECUTABLE --batch --yes --detach-sign --armor --local-user $GPG_KEYID" + WHEEL_PATHS=(wheelhouse/*.whl wheelhouse/*.tar.gz) + WHEEL_PATHS_STR=$(printf '"%s" ' "${WHEEL_PATHS[@]}") + echo "$WHEEL_PATHS_STR" + for WHEEL_PATH in "${WHEEL_PATHS[@]}" + do + echo "------" + echo "WHEEL_PATH = $WHEEL_PATH" + $GPG_SIGN_CMD --output $WHEEL_PATH.asc $WHEEL_PATH + $GPG_EXECUTABLE --verify $WHEEL_PATH.asc $WHEEL_PATH || echo "hack, the first run of gpg very fails" + $GPG_EXECUTABLE --verify $WHEEL_PATH.asc $WHEEL_PATH + done + ls -la wheelhouse + python -m pip install opentimestamps-client + ots stamp wheelhouse/*.whl wheelhouse/*.tar.gz wheelhouse/*.asc + ls -la wheelhouse + twine upload --username __token__ --password "$TWINE_PASSWORD" --repository-url "$TWINE_REPOSITORY_URL" wheelhouse/*.whl wheelhouse/*.tar.gz --skip-existing --verbose || { echo "failed to twine upload" ; exit 1; } + - uses: actions/upload-artifact@v6.0.0 + name: Upload deploy artifacts + with: + name: deploy_artifacts + path: |- + wheelhouse/*.whl + wheelhouse/*.zip + wheelhouse/*.tar.gz + wheelhouse/*.asc + wheelhouse/*.ots + live_deploy: + name: Deploy Live + runs-on: ubuntu-latest + if: github.event_name == 'push' && (startsWith(github.event.ref, 'refs/tags') || startsWith(github.event.ref, 'refs/heads/release')) + needs: + - build_binpy_wheels + - build_sdist + steps: + - name: Checkout source + uses: actions/checkout@v6.0.2 + - uses: actions/download-artifact@v4.1.8 + name: Download wheels + with: + pattern: wheels-* + merge-multiple: true + path: wheelhouse + - uses: actions/download-artifact@v4.1.8 + name: Download sdist + with: + name: sdist_wheels + path: wheelhouse + - name: Show files to upload + shell: bash + run: ls -la wheelhouse + - name: Sign and Publish + env: + TWINE_REPOSITORY_URL: https://upload.pypi.org/legacy/ + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }} + CI_SECRET: ${{ secrets.CI_SECRET }} + run: |- + GPG_EXECUTABLE=gpg + $GPG_EXECUTABLE --version + openssl version + $GPG_EXECUTABLE --list-keys + echo "Decrypting Keys" + openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:CI_SECRET -d -a -in dev/ci_public_gpg_key.pgp.enc | $GPG_EXECUTABLE --import + openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:CI_SECRET -d -a -in dev/gpg_owner_trust.enc | $GPG_EXECUTABLE --import-ownertrust + openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:CI_SECRET -d -a -in dev/ci_secret_gpg_subkeys.pgp.enc | $GPG_EXECUTABLE --import + echo "Finish Decrypt Keys" + $GPG_EXECUTABLE --list-keys || true + $GPG_EXECUTABLE --list-keys || echo "first invocation of gpg creates directories and returns 1" + $GPG_EXECUTABLE --list-keys + VERSION=$(python -c "import setup; print(setup.VERSION)") + python -m pip install pip uv -U + python -m pip install packaging twine -U + python -m pip install urllib3 requests[security] + GPG_KEYID=$(cat dev/public_gpg_key) + echo "GPG_KEYID = '$GPG_KEYID'" + GPG_SIGN_CMD="$GPG_EXECUTABLE --batch --yes --detach-sign --armor --local-user $GPG_KEYID" + WHEEL_PATHS=(wheelhouse/*.whl wheelhouse/*.tar.gz) + WHEEL_PATHS_STR=$(printf '"%s" ' "${WHEEL_PATHS[@]}") + echo "$WHEEL_PATHS_STR" + for WHEEL_PATH in "${WHEEL_PATHS[@]}" + do + echo "------" + echo "WHEEL_PATH = $WHEEL_PATH" + $GPG_SIGN_CMD --output $WHEEL_PATH.asc $WHEEL_PATH + $GPG_EXECUTABLE --verify $WHEEL_PATH.asc $WHEEL_PATH || echo "hack, the first run of gpg very fails" + $GPG_EXECUTABLE --verify $WHEEL_PATH.asc $WHEEL_PATH + done + ls -la wheelhouse + python -m pip install opentimestamps-client + ots stamp wheelhouse/*.whl wheelhouse/*.tar.gz wheelhouse/*.asc + ls -la wheelhouse + twine upload --username __token__ --password "$TWINE_PASSWORD" --repository-url "$TWINE_REPOSITORY_URL" wheelhouse/*.whl wheelhouse/*.tar.gz --skip-existing --verbose || { echo "failed to twine upload" ; exit 1; } + - uses: actions/upload-artifact@v6.0.0 + name: Upload deploy artifacts + with: + name: deploy_artifacts + path: |- + wheelhouse/*.whl + wheelhouse/*.zip + wheelhouse/*.tar.gz + wheelhouse/*.asc + wheelhouse/*.ots + release: + name: Create Github Release + if: github.event_name == 'push' && (startsWith(github.event.ref, 'refs/tags') || startsWith(github.event.ref, 'refs/heads/release')) + runs-on: ubuntu-latest + permissions: + contents: write + needs: + - live_deploy + steps: + - name: Checkout source + uses: actions/checkout@v6.0.2 + - uses: actions/download-artifact@v4.1.8 + name: Download artifacts + with: + name: deploy_artifacts + path: wheelhouse + - name: Show files to release + shell: bash + run: ls -la wheelhouse + - run: 'echo "Automatic Release Notes. TODO: improve" > ${{ github.workspace }}-CHANGELOG.txt' + - name: Tag Release Commit + if: (startsWith(github.event.ref, 'refs/heads/release')) + run: |- + export VERSION=$(python -c "import setup; print(setup.VERSION)") + git tag "v$VERSION" + git push origin "v$VERSION" + - uses: softprops/action-gh-release@v1 + name: Create Release + id: create_release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + body_path: ${{ github.workspace }}-CHANGELOG.txt + tag_name: ${{ github.ref }} + name: Release ${{ github.ref }} + body: Automatic Release + generate_release_notes: true + draft: true + prerelease: false + files: |- + wheelhouse/*.whl + wheelhouse/*.asc + wheelhouse/*.ots + wheelhouse/*.zip + wheelhouse/*.tar.gz + + diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 8c779fe0..9120f751 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,7 +1,7 @@ -# This workflow will install Python dependencies, run tests and lint with a variety of Python versions +# This workflow is autogenerated by xcookie. +# File kind: tests # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions -# Based on ~/code/xcookie/xcookie/rc/tests.yml.in -# Now based on ~/code/xcookie/xcookie/builders/github_actions.py +# Based on ~/code/xcookie/xcookie/builders/github_actions.py # See: https://github.com/Erotemic/xcookie name: BinPyCI @@ -132,7 +132,7 @@ jobs: - windows-latest - ubuntu-24.04-arm cibw_skip: - - '*-win32 cp3{9,10}-win_arm64 cp313-musllinux_i686' + - '*-win32 cp3{10}-win_arm64 cp313-musllinux_i686' arch: - auto steps: @@ -203,7 +203,7 @@ jobs: path: ./wheelhouse/line_profiler*.whl test_binpy_wheels: ## - # Download the previously build binary wheels from the + # Download the previously built binary wheels from the # build_binpy_wheels step, and test them in an independent # environment. ## @@ -218,19 +218,19 @@ jobs: # Xcookie generates an explicit list of environments that will be used # for testing instead of using the more concise matrix notation. include: - - python-version: '3.9' + - python-version: '3.10' install-extras: tests-strict,runtime-strict os: ubuntu-latest arch: auto - - python-version: '3.9' + - python-version: '3.10' install-extras: tests-strict,runtime-strict os: macOS-latest arch: auto - - python-version: '3.9' + - python-version: '3.10' install-extras: tests-strict,runtime-strict os: windows-latest arch: auto - - python-version: '3.9' + - python-version: '3.10' install-extras: tests-strict,runtime-strict os: ubuntu-24.04-arm arch: auto @@ -270,10 +270,6 @@ jobs: install-extras: tests os: windows-11-arm arch: auto - - python-version: '3.9' - install-extras: tests,optional - os: ubuntu-latest - arch: auto - python-version: '3.10' install-extras: tests,optional os: ubuntu-latest @@ -294,10 +290,6 @@ jobs: install-extras: tests,optional os: ubuntu-latest arch: auto - - python-version: '3.9' - install-extras: tests,optional - os: macOS-latest - arch: auto - python-version: '3.10' install-extras: tests,optional os: macOS-latest @@ -318,10 +310,6 @@ jobs: install-extras: tests,optional os: macOS-latest arch: auto - - python-version: '3.9' - install-extras: tests,optional - os: windows-latest - arch: auto - python-version: '3.10' install-extras: tests,optional os: windows-latest @@ -342,10 +330,6 @@ jobs: install-extras: tests,optional os: windows-latest arch: auto - - python-version: '3.9' - install-extras: tests,optional - os: ubuntu-24.04-arm - arch: auto - python-version: '3.10' install-extras: tests,optional os: ubuntu-24.04-arm @@ -499,202 +483,5 @@ jobs: with: file: ./coverage.xml token: ${{ secrets.CODECOV_TOKEN }} - test_deploy: - name: Deploy Test - runs-on: ubuntu-latest - if: github.event_name == 'push' && ! startsWith(github.event.ref, 'refs/tags') && ! startsWith(github.event.ref, 'refs/heads/release') - needs: - - build_and_test_sdist - - build_binpy_wheels - steps: - - name: Checkout source - uses: actions/checkout@v6.0.2 - - uses: actions/download-artifact@v4.1.8 - name: Download wheels - with: - pattern: wheels-* - merge-multiple: true - path: wheelhouse - - uses: actions/download-artifact@v4.1.8 - name: Download sdist - with: - name: sdist_wheels - path: wheelhouse - - name: Show files to upload - shell: bash - run: ls -la wheelhouse - - name: Sign and Publish - env: - TWINE_REPOSITORY_URL: https://test.pypi.org/legacy/ - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.TEST_TWINE_PASSWORD }} - CI_SECRET: ${{ secrets.CI_SECRET }} - run: |- - GPG_EXECUTABLE=gpg - $GPG_EXECUTABLE --version - openssl version - $GPG_EXECUTABLE --list-keys - echo "Decrypting Keys" - openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:CI_SECRET -d -a -in dev/ci_public_gpg_key.pgp.enc | $GPG_EXECUTABLE --import - openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:CI_SECRET -d -a -in dev/gpg_owner_trust.enc | $GPG_EXECUTABLE --import-ownertrust - openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:CI_SECRET -d -a -in dev/ci_secret_gpg_subkeys.pgp.enc | $GPG_EXECUTABLE --import - echo "Finish Decrypt Keys" - $GPG_EXECUTABLE --list-keys || true - $GPG_EXECUTABLE --list-keys || echo "first invocation of gpg creates directories and returns 1" - $GPG_EXECUTABLE --list-keys - VERSION=$(python -c "import setup; print(setup.VERSION)") - python -m pip install pip uv -U - python -m pip install packaging twine -U - python -m pip install urllib3 requests[security] - GPG_KEYID=$(cat dev/public_gpg_key) - echo "GPG_KEYID = '$GPG_KEYID'" - GPG_SIGN_CMD="$GPG_EXECUTABLE --batch --yes --detach-sign --armor --local-user $GPG_KEYID" - WHEEL_PATHS=(wheelhouse/*.whl wheelhouse/*.tar.gz) - WHEEL_PATHS_STR=$(printf '"%s" ' "${WHEEL_PATHS[@]}") - echo "$WHEEL_PATHS_STR" - for WHEEL_PATH in "${WHEEL_PATHS[@]}" - do - echo "------" - echo "WHEEL_PATH = $WHEEL_PATH" - $GPG_SIGN_CMD --output $WHEEL_PATH.asc $WHEEL_PATH - $GPG_EXECUTABLE --verify $WHEEL_PATH.asc $WHEEL_PATH || echo "hack, the first run of gpg very fails" - $GPG_EXECUTABLE --verify $WHEEL_PATH.asc $WHEEL_PATH - done - ls -la wheelhouse - python -m pip install opentimestamps-client - ots stamp wheelhouse/*.whl wheelhouse/*.tar.gz wheelhouse/*.asc - ls -la wheelhouse - twine upload --username __token__ --password "$TWINE_PASSWORD" --repository-url "$TWINE_REPOSITORY_URL" wheelhouse/*.whl wheelhouse/*.tar.gz --skip-existing --verbose || { echo "failed to twine upload" ; exit 1; } - - uses: actions/upload-artifact@v6.0.0 - name: Upload deploy artifacts - with: - name: deploy_artifacts - path: |- - wheelhouse/*.whl - wheelhouse/*.zip - wheelhouse/*.tar.gz - wheelhouse/*.asc - wheelhouse/*.ots - live_deploy: - name: Deploy Live - runs-on: ubuntu-latest - if: github.event_name == 'push' && (startsWith(github.event.ref, 'refs/tags') || startsWith(github.event.ref, 'refs/heads/release')) - needs: - - build_and_test_sdist - - build_binpy_wheels - steps: - - name: Checkout source - uses: actions/checkout@v6.0.2 - - uses: actions/download-artifact@v4.1.8 - name: Download wheels - with: - pattern: wheels-* - merge-multiple: true - path: wheelhouse - - uses: actions/download-artifact@v4.1.8 - name: Download sdist - with: - name: sdist_wheels - path: wheelhouse - - name: Show files to upload - shell: bash - run: ls -la wheelhouse - - name: Sign and Publish - env: - TWINE_REPOSITORY_URL: https://upload.pypi.org/legacy/ - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }} - CI_SECRET: ${{ secrets.CI_SECRET }} - run: |- - GPG_EXECUTABLE=gpg - $GPG_EXECUTABLE --version - openssl version - $GPG_EXECUTABLE --list-keys - echo "Decrypting Keys" - openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:CI_SECRET -d -a -in dev/ci_public_gpg_key.pgp.enc | $GPG_EXECUTABLE --import - openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:CI_SECRET -d -a -in dev/gpg_owner_trust.enc | $GPG_EXECUTABLE --import-ownertrust - openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:CI_SECRET -d -a -in dev/ci_secret_gpg_subkeys.pgp.enc | $GPG_EXECUTABLE --import - echo "Finish Decrypt Keys" - $GPG_EXECUTABLE --list-keys || true - $GPG_EXECUTABLE --list-keys || echo "first invocation of gpg creates directories and returns 1" - $GPG_EXECUTABLE --list-keys - VERSION=$(python -c "import setup; print(setup.VERSION)") - python -m pip install pip uv -U - python -m pip install packaging twine -U - python -m pip install urllib3 requests[security] - GPG_KEYID=$(cat dev/public_gpg_key) - echo "GPG_KEYID = '$GPG_KEYID'" - GPG_SIGN_CMD="$GPG_EXECUTABLE --batch --yes --detach-sign --armor --local-user $GPG_KEYID" - WHEEL_PATHS=(wheelhouse/*.whl wheelhouse/*.tar.gz) - WHEEL_PATHS_STR=$(printf '"%s" ' "${WHEEL_PATHS[@]}") - echo "$WHEEL_PATHS_STR" - for WHEEL_PATH in "${WHEEL_PATHS[@]}" - do - echo "------" - echo "WHEEL_PATH = $WHEEL_PATH" - $GPG_SIGN_CMD --output $WHEEL_PATH.asc $WHEEL_PATH - $GPG_EXECUTABLE --verify $WHEEL_PATH.asc $WHEEL_PATH || echo "hack, the first run of gpg very fails" - $GPG_EXECUTABLE --verify $WHEEL_PATH.asc $WHEEL_PATH - done - ls -la wheelhouse - python -m pip install opentimestamps-client - ots stamp wheelhouse/*.whl wheelhouse/*.tar.gz wheelhouse/*.asc - ls -la wheelhouse - twine upload --username __token__ --password "$TWINE_PASSWORD" --repository-url "$TWINE_REPOSITORY_URL" wheelhouse/*.whl wheelhouse/*.tar.gz --skip-existing --verbose || { echo "failed to twine upload" ; exit 1; } - - uses: actions/upload-artifact@v6.0.0 - name: Upload deploy artifacts - with: - name: deploy_artifacts - path: |- - wheelhouse/*.whl - wheelhouse/*.zip - wheelhouse/*.tar.gz - wheelhouse/*.asc - wheelhouse/*.ots - release: - name: Create Github Release - if: github.event_name == 'push' && (startsWith(github.event.ref, 'refs/tags') || startsWith(github.event.ref, 'refs/heads/release')) - runs-on: ubuntu-latest - permissions: - contents: write - needs: - - live_deploy - steps: - - name: Checkout source - uses: actions/checkout@v6.0.2 - - uses: actions/download-artifact@v4.1.8 - name: Download artifacts - with: - name: deploy_artifacts - path: wheelhouse - - name: Show files to release - shell: bash - run: ls -la wheelhouse - - run: 'echo "Automatic Release Notes. TODO: improve" > ${{ github.workspace }}-CHANGELOG.txt' - - name: Tag Release Commit - if: (startsWith(github.event.ref, 'refs/heads/release')) - run: |- - export VERSION=$(python -c "import setup; print(setup.VERSION)") - git tag "v$VERSION" - git push origin "v$VERSION" - - uses: softprops/action-gh-release@v1 - name: Create Release - id: create_release - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - body_path: ${{ github.workspace }}-CHANGELOG.txt - tag_name: ${{ github.ref }} - name: Release ${{ github.ref }} - body: Automatic Release - generate_release_notes: true - draft: true - prerelease: false - files: |- - wheelhouse/*.whl - wheelhouse/*.asc - wheelhouse/*.ots - wheelhouse/*.zip - wheelhouse/*.tar.gz diff --git a/dev/setup_secrets.sh b/dev/setup_secrets.sh index 0a8efc9e..81f12011 100644 --- a/dev/setup_secrets.sh +++ b/dev/setup_secrets.sh @@ -162,6 +162,22 @@ setup_package_environs_github_pyutils(){ #' | python -c "import sys; from textwrap import dedent; print(dedent(sys.stdin.read()).strip(chr(10)))" > dev/secrets_configuration.sh } +resolve_secret_value_from_varname_ptr(){ + local secret_varname_ptr="$1" + local secret_name="$2" + local secret_varname="${!secret_varname_ptr}" + if [[ "$secret_varname" == "" ]]; then + echo "Skipping $secret_name because $secret_varname_ptr is unset" >&2 + return 1 + fi + local secret_value="${!secret_varname}" + if [[ "$secret_value" == "" ]]; then + echo "Skipping $secret_name because $secret_varname is unset or empty" >&2 + return 1 + fi + printf '%s' "$secret_value" +} + upload_github_secrets(){ load_secrets unset GITHUB_TOKEN @@ -169,13 +185,14 @@ upload_github_secrets(){ if ! gh auth status ; then gh auth login fi + local secret_value source dev/secrets_configuration.sh - gh secret set "TWINE_USERNAME" -b"${!VARNAME_TWINE_USERNAME}" - gh secret set "TEST_TWINE_USERNAME" -b"${!VARNAME_TEST_TWINE_USERNAME}" + secret_value=$(resolve_secret_value_from_varname_ptr VARNAME_TWINE_USERNAME TWINE_USERNAME) && gh secret set "TWINE_USERNAME" -b"$secret_value" + secret_value=$(resolve_secret_value_from_varname_ptr VARNAME_TEST_TWINE_USERNAME TEST_TWINE_USERNAME) && gh secret set "TEST_TWINE_USERNAME" -b"$secret_value" toggle_setx_enter - gh secret set "CI_SECRET" -b"${!VARNAME_CI_SECRET}" - gh secret set "TWINE_PASSWORD" -b"${!VARNAME_TWINE_PASSWORD}" - gh secret set "TEST_TWINE_PASSWORD" -b"${!VARNAME_TEST_TWINE_PASSWORD}" + secret_value=$(resolve_secret_value_from_varname_ptr VARNAME_CI_SECRET CI_SECRET) && gh secret set "CI_SECRET" -b"$secret_value" + secret_value=$(resolve_secret_value_from_varname_ptr VARNAME_TWINE_PASSWORD TWINE_PASSWORD) && gh secret set "TWINE_PASSWORD" -b"$secret_value" + secret_value=$(resolve_secret_value_from_varname_ptr VARNAME_TEST_TWINE_PASSWORD TEST_TWINE_PASSWORD) && gh secret set "TEST_TWINE_PASSWORD" -b"$secret_value" toggle_setx_exit } diff --git a/pyproject.toml b/pyproject.toml index ff66661d..ca08ae6f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,12 +34,12 @@ omit =[ ] [tool.cibuildwheel] -build = "cp39-* cp310-* cp311-* cp312-* cp313-* cp314-*" +build = "cp310-* cp311-* cp312-* cp313-* cp314-*" # XXX: since `tests.yml` already defines `matrix.cibw_skip` for # `build_binpy_wheels`, can we deduplicate and just use that? # Or do we need these when building wheels for release, which may run on # separate pipelines? -skip = ["*-win32", "cp3{9,10}-win_arm64", "cp313-musllinux_i686"] +skip = ["*-win32", "cp3{10}-win_arm64", "cp313-musllinux_i686"] build-frontend = "build" build-verbosity = 1 test-command = "python {project}/run_tests.py" @@ -65,7 +65,7 @@ repo_name = "line_profiler" rel_mod_parent_dpath = "." os = [ "all", "linux", "osx", "win",] main_python = '3.13' -min_python = '3.9' +min_python = '3.10' max_python = '3.14' author = "Robert Kern" author_email = "robert.kern@enthought.com" @@ -102,7 +102,7 @@ rules = { unused-type-ignore-comment = "ignore" } [tool.ruff] line-length = 80 -target-version = "py39" +target-version = "py310" [tool.ruff.lint] # Enable Flake8 (E, F) and isort (I) rules. From 714c414a92626ab3cb8fb74bdbdc2a83bd308077 Mon Sep 17 00:00:00 2001 From: Test User Date: Thu, 2 Apr 2026 20:27:09 -0400 Subject: [PATCH 3/3] fix setup --- setup.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/setup.py b/setup.py index 85eabaad..c5842e6a 100755 --- a/setup.py +++ b/setup.py @@ -334,8 +334,6 @@ def run_cythonize(force=False): 'Operating System :: OS Independent', 'Programming Language :: C', 'Programming Language :: Python', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', 'Programming Language :: Python :: 3.12',