diff --git a/.github/workflows/build-wheels-platforms.yml b/.github/workflows/build-wheels-platforms.yml index 6a48a5d..50965b1 100644 --- a/.github/workflows/build-wheels-platforms.yml +++ b/.github/workflows/build-wheels-platforms.yml @@ -181,6 +181,10 @@ jobs: if: matrix.os == 'Windows' run: python build_wheels.py + - name: Fix permissions on downloaded_wheels (ARMv7 Docker builds) + if: matrix.os == 'Linux ARMv7' || matrix.os == 'Linux ARMv7 Legacy' + run: sudo chown -R $USER:$USER ./downloaded_wheels + - name: Upload artifacts of downloaded_wheels directory uses: actions/upload-artifact@v4 with: diff --git a/.github/workflows/build-wheels-python-dependent.yml b/.github/workflows/build-wheels-python-dependent.yml index 7764707..89e0973 100644 --- a/.github/workflows/build-wheels-python-dependent.yml +++ b/.github/workflows/build-wheels-python-dependent.yml @@ -18,6 +18,8 @@ jobs: runs-on: ${{ matrix.runner }} env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # PyO3 (cryptography, etc.): allow building against CPython newer than PyO3's declared max when using stable ABI + PYO3_USE_ABI3_FORWARD_COMPATIBILITY: "1" strategy: fail-fast: false matrix: @@ -148,7 +150,7 @@ jobs: bash os_dependencies/linux_arm.sh # Source Rust environment after installation . \$HOME/.cargo/env - python build_wheels_from_file.py dependent_requirements_${{ matrix.arch }} + python build_wheels_from_file.py --force-interpreter-binary dependent_requirements_${{ matrix.arch }} " - name: Build Python dependent wheels - ARMv7 Legacy (in Docker) @@ -171,7 +173,7 @@ jobs: bash os_dependencies/linux_arm.sh # Source Rust environment after installation . \$HOME/.cargo/env - python build_wheels_from_file.py dependent_requirements_${{ matrix.arch }} + python build_wheels_from_file.py --force-interpreter-binary dependent_requirements_${{ matrix.arch }} " - name: Build Python dependent wheels - Linux/macOS @@ -184,12 +186,15 @@ jobs: export ARCHFLAGS="-arch x86_64" fi - python build_wheels_from_file.py dependent_requirements_${{ matrix.arch }} + python build_wheels_from_file.py --force-interpreter-binary dependent_requirements_${{ matrix.arch }} - name: Build Python dependent wheels for ${{ matrix.python-version }} - Windows if: matrix.os == 'Windows' - run: python build_wheels_from_file.py dependent_requirements_${{ matrix.arch }} + run: python build_wheels_from_file.py --force-interpreter-binary dependent_requirements_${{ matrix.arch }} + - name: Fix permissions on downloaded_wheels (ARMv7 Docker builds) + if: matrix.os == 'Linux ARMv7' || matrix.os == 'Linux ARMv7 Legacy' + run: sudo chown -R $USER:$USER ./downloaded_wheels - name: Upload artifacts uses: actions/upload-artifact@v4 diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index aee50aa..e79b428 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -32,7 +32,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install packaging pyyaml colorama requests + python -m pip install -r build_requirements.txt - name: Run unit tests run: python -m unittest discover -s . -v diff --git a/README.md b/README.md index 69dd8ba..8fc3678 100644 --- a/README.md +++ b/README.md @@ -105,6 +105,25 @@ This would mean: "From assembled **main requirements** exclude `pyserial` with v From the example above is clear that the `platform` could be left out (because all main platforms are specified) so the options `platform` or `version` or `python` are optional, one of them or both can be not specified and the key can be erased. When only `package_name` is given the package will be excluded from **main requirements**. +### PyPI `Requires-Python` preflight + +Before running `pip wheel`, the build scripts can query the [PyPI JSON API](https://docs.pypi.org/api/json/) so that **no release** matching the requirement’s **version specifier** (including `==`, `~=`, and ranges such as `>=x, str: return sys.platform +def wheel_archive_is_readable(path: Path) -> bool: + """True if the file is a zip with a readable central directory (valid wheel container). + + ``zipfile.is_zipfile()`` only checks the leading magic; truncated or corrupt wheels can + still fail with ``BadZipFile`` when reading the central directory (pip, delocate, etc.). + """ + if not zipfile.is_zipfile(path): + return False + try: + with zipfile.ZipFile(path, "r") as zf: + zf.namelist() + except zipfile.BadZipFile: + return False + return True + + +# PyPI JSON API: cache (project canonical name, version) -> requires_python or None if unset/unknown +_PYPI_REQUIRES_PYTHON_CACHE: Dict[Tuple[str, str], Optional[str]] = {} +# Full project JSON per canonical package name; None means fetch failed (cached) +_PYPI_PROJECT_JSON_CACHE: Dict[str, Optional[Dict[str, Any]]] = {} + + +def _pypi_user_agent() -> str: + return "idf-python-wheels (https://github.com/espressif/idf-python-wheels)" + + +def current_interpreter_satisfies_requires_python(requires_python: Optional[str]) -> bool: + """True if this interpreter satisfies PyPI ``Requires-Python`` (PEP 345 / PEP 566), or if unset.""" + if requires_python is None or not requires_python.strip(): + return True + try: + spec = SpecifierSet(requires_python) + except ValueError: + return True + py_ver = Version(f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}") + return bool(spec.contains(py_ver, prereleases=True)) + + +def fetch_pypi_release_requires_python(project_name: str, version: str, timeout: float = 20.0) -> Optional[str]: + """Return ``info.requires_python`` for a release, or None if unknown (missing, error, or no field).""" + key = (canonicalize_name(project_name), version) + if key in _PYPI_REQUIRES_PYTHON_CACHE: + return _PYPI_REQUIRES_PYTHON_CACHE[key] + pkg = canonicalize_name(project_name) + url = f"https://pypi.org/pypi/{quote(pkg)}/{quote(version)}/json" + try: + request = Request(url, headers={"User-Agent": _pypi_user_agent()}) + with urlopen(request, timeout=timeout) as response: + data = json.loads(response.read().decode()) + except HTTPError as e: + if e.code == 404: + _PYPI_REQUIRES_PYTHON_CACHE[key] = None + return None + _PYPI_REQUIRES_PYTHON_CACHE[key] = None + return None + except (URLError, OSError, TimeoutError, ValueError, json.JSONDecodeError): + _PYPI_REQUIRES_PYTHON_CACHE[key] = None + return None + rp = data.get("info", {}).get("requires_python") + if rp is None or (isinstance(rp, str) and not rp.strip()): + _PYPI_REQUIRES_PYTHON_CACHE[key] = None + return None + _PYPI_REQUIRES_PYTHON_CACHE[key] = str(rp).strip() + return _PYPI_REQUIRES_PYTHON_CACHE[key] + + +def fetch_pypi_project_json(project_name: str, timeout: float = 20.0) -> Optional[Dict[str, Any]]: + """Return PyPI ``/pypi/{name}/json`` payload, or None on error.""" + pkg = canonicalize_name(project_name) + if pkg in _PYPI_PROJECT_JSON_CACHE: + return _PYPI_PROJECT_JSON_CACHE[pkg] + url = f"https://pypi.org/pypi/{quote(pkg)}/json" + try: + request = Request(url, headers={"User-Agent": _pypi_user_agent()}) + with urlopen(request, timeout=timeout) as response: + # Use typing.Dict in cast(): dict[str, Any] is evaluated at runtime and breaks on Python 3.8. + data = cast(Dict[str, Any], json.loads(response.read().decode())) + except (HTTPError, URLError, OSError, TimeoutError, ValueError, json.JSONDecodeError): + _PYPI_PROJECT_JSON_CACHE[pkg] = None + return None + _PYPI_PROJECT_JSON_CACHE[pkg] = data + return data + + +def matching_release_version_strings(req: Requirement) -> Optional[List[str]]: + """List PyPI release version strings that satisfy ``req.specifier``, newest first. + + Returns None if project metadata could not be fetched (caller should not skip the build). + Returns an empty list if no published release matches the specifier. + """ + data = fetch_pypi_project_json(req.name) + if data is None: + return None + releases = data.get("releases") or {} + candidates: List[Tuple[Version, str]] = [] + for ver_str in releases: + try: + parsed = parse_version(ver_str) + except InvalidVersion: + continue + if req.specifier.contains(parsed, prereleases=True): + candidates.append((parsed, ver_str)) + candidates.sort(key=lambda x: x[0], reverse=True) + return [pair[1] for pair in candidates] + + +def pypi_requires_python_preflight_skip(req: Requirement) -> Tuple[bool, str]: + """If True, skip ``pip wheel``: no PyPI release matches the specifier for this interpreter. + + Uses project index + per-release ``Requires-Python`` (covers ``==``, ``~=``, ranges, etc.). + Set ``SKIP_PYPI_REQUIRES_PYTHON_CHECK`` to disable. + """ + if os.environ.get("SKIP_PYPI_REQUIRES_PYTHON_CHECK", "").strip().lower() in ("1", "true", "yes"): + return False, "" + candidates = matching_release_version_strings(req) + if candidates is None: + return False, "" + if not candidates: + return True, "no PyPI releases match this requirement specifier" + + for ver_str in candidates: + rp = fetch_pypi_release_requires_python(req.name, ver_str) + if current_interpreter_satisfies_requires_python(rp): + return False, "" + + newest = candidates[0] + newest_rp = fetch_pypi_release_requires_python(req.name, newest) + py_mm = f"{sys.version_info.major}.{sys.version_info.minor}" + if newest_rp: + return ( + True, + f"newest matching release {newest!r} requires Python {newest_rp!r}; " + f"no installable release for Python {py_mm} ({req.name})", + ) + return True, f"no installable release on PyPI for Python {py_mm} ({req})" + + +def filter_requirements_by_pypi_requires_python(requirements: Set) -> Set: + """Drop requirements with no PyPI release installable on this interpreter (``Requires-Python``).""" + if os.environ.get("SKIP_PYPI_REQUIRES_PYTHON_CHECK", "").strip().lower() in ("1", "true", "yes"): + return set(requirements) + kept: Set = set() + print_color("---------- PYPI Requires-Python PREFLIGHT ----------", Fore.CYAN) + for req in requirements: + if not isinstance(req, Requirement): + kept.add(req) + continue + skip, reason = pypi_requires_python_preflight_skip(req) + if skip: + print_color(f"-- skip {req} ({reason})", Fore.YELLOW) + continue + kept.add(req) + print_color("---------- END PYPI Requires-Python PREFLIGHT ----------", Fore.CYAN) + return kept + + def exclude_entry_applies_to_platform(entry: dict, current_platform: str) -> bool: """True if this exclude_list entry applies to current_platform (so we should exclude from build).""" platforms = entry.get("platform", []) @@ -107,11 +283,23 @@ def get_no_binary_args(requirement_name: str) -> list: return [] +def _safe_text_for_stdout(text: str) -> str: + """Avoid UnicodeEncodeError when printing pip/tool output on Windows (e.g. cp1252 console).""" + encoding = getattr(sys.stdout, "encoding", None) or "utf-8" + if encoding.lower() in ("utf-8", "utf8"): + return text + try: + text.encode(encoding) + return text + except UnicodeEncodeError: + return text.encode(encoding, errors="replace").decode(encoding, errors="replace") + + def print_color(text: str, color: str = Fore.BLUE): """Print colored text specified by color argument based on colorama - default color BLUE """ - print(f"{color}", f"{text}", Style.RESET_ALL) + print(f"{color}", f"{_safe_text_for_stdout(text)}", Style.RESET_ALL) def merge_requirements(requirement: Requirement, another_req: Requirement) -> Requirement: diff --git a/build_requirements.txt b/build_requirements.txt index 08e4834..0f2849f 100644 --- a/build_requirements.txt +++ b/build_requirements.txt @@ -4,6 +4,7 @@ requests~=2.31.0 packaging~=23.2 PyYAML~=6.0.1 colorama~=0.4.6 +tomli; python_version < "3.11" # ----- build process ----- boto3~=1.34.4 diff --git a/build_wheels.py b/build_wheels.py index 861537a..48a1794 100644 --- a/build_wheels.py +++ b/build_wheels.py @@ -16,10 +16,16 @@ import requests +try: + import tomllib +except ImportError: # Python < 3.11 does not have tomllib built-in module + import tomli as tomllib + from colorama import Fore from packaging.requirements import InvalidRequirement from packaging.requirements import Requirement +from _helper_functions import filter_requirements_by_pypi_requires_python from _helper_functions import get_current_platform from _helper_functions import get_no_binary_args from _helper_functions import merge_requirements @@ -35,6 +41,8 @@ IDF_RESOURCES_URL = "https://raw.githubusercontent.com/espressif/esp-idf/" # URL for IDF master CMAKE version file IDF_MASTER_VERSION_URL = f"{IDF_RESOURCES_URL}master/tools/cmake/version.cmake" +# URL for esptool pyproject.toml file +ESPTOOL_PYPROJECT_URL = "https://raw.githubusercontent.com/espressif/esptool/master/pyproject.toml" # Minimal IDF release version to take requirements from (v{MAJOR}.{MINOR}) # Requirements from all release branches and master equal or above this will be considered @@ -151,6 +159,19 @@ def _download_branch_requirements(branch: str, idf_requirements_json: dict) -> L if check_response(res, f"Failed to download feature (requirement group) '{feature['name']}'"): requirements_txt += res.text.splitlines() print(f"Added ESP-IDF {feature['name']} requirements") + + return requirements_txt + + +def _download_esptool_requirements() -> List[str]: + """Download esptool requirements from pyproject.toml file""" + requirements_txt: List[str] = [] + res = requests.get(ESPTOOL_PYPROJECT_URL, headers=AUTH_HEADER, timeout=10) + if check_response(res, "Failed to download esptool pyproject.toml file"): + pyproject_content = tomllib.loads(res.text) + esptool_deps = pyproject_content.get("project", {}).get("dependencies", []) + requirements_txt += [dep for dep in esptool_deps if dep not in requirements_txt] + print("Added esptool requirements") return requirements_txt @@ -201,6 +222,8 @@ def assemble_requirements(idf_branches: List[str], idf_constraints: List[str], m requirements_txt += _download_branch_requirements(branch, idf_requirements_json) requirements_txt += _download_branch_constraints(constraint_file_url, branch, idf_constraints[i]) + requirements_txt += _download_esptool_requirements() + if make_txt_file: # TXT file from all downloaded requirements and constraints files # useful for debugging or to see the comments for requirements @@ -395,8 +418,10 @@ def main() -> int: ).requirements after_exclude_requirements = exclude_from_requirements(requirements, exclude_list) + after_exclude_requirements = filter_requirements_by_pypi_requires_python(after_exclude_requirements) include_list = YAMLListAdapter("include_list.yaml").requirements + include_list = filter_requirements_by_pypi_requires_python(include_list) print_color("---------- ADDITIONAL REQUIREMENTS ----------") for req in include_list: print(req) @@ -424,6 +449,7 @@ def main() -> int: f"{os.path.curdir}{(os.sep)}downloaded_wheels", after_exclude_requirements ) after_exclude_dependent_wheels = exclude_from_requirements(dependent_wheels, exclude_list) + after_exclude_dependent_wheels = filter_requirements_by_pypi_requires_python(after_exclude_dependent_wheels) with open("dependent_requirements.txt", "w") as f: for wheel in after_exclude_dependent_wheels: diff --git a/build_wheels_from_file.py b/build_wheels_from_file.py index c4fcb7c..ab1c132 100644 --- a/build_wheels_from_file.py +++ b/build_wheels_from_file.py @@ -3,15 +3,77 @@ # # SPDX-License-Identifier: Apache-2.0 # +from __future__ import annotations + import argparse import os +import platform import subprocess import sys from colorama import Fore +from packaging.requirements import InvalidRequirement +from packaging.requirements import Requirement +from packaging.utils import canonicalize_name from _helper_functions import get_no_binary_args from _helper_functions import print_color +from _helper_functions import pypi_requires_python_preflight_skip + +# Do not pass --no-binary for these in --force-interpreter-binary mode: +# - sdists whose legacy setup breaks under PEP 517 isolation (pkg_resources in isolated env). +# - sdists that fail to compile on CI when a usable wheel exists (e.g. ruamel.yaml.clib + clang). +# - PyObjC: all pyobjc / pyobjc-framework-* use pyobjc_setup.py + pkg_resources (macOS). +# - cryptography: abi3 wheels; avoid PyO3 max-Python / heavy Rust rebuilds in dependent jobs. +# - pydantic-core: maturin + jiter + PyO3 can fail from sdist on some CI combos (e.g. ARM64 3.9: +# jiter vs pyo3-ffi PyUnicode_* / extract API). Prefer compatible wheels from find-links or PyPI. +_FORCE_INTERPRETER_BINARY_SKIP_EXACT = frozenset( + { + canonicalize_name("cryptography"), + canonicalize_name("pydantic-core"), + canonicalize_name("protobuf"), + canonicalize_name("ruamel.yaml.clib"), + } +) + + +def _force_interpreter_skip_package(canonical_dist_name: str) -> bool: + if canonical_dist_name in _FORCE_INTERPRETER_BINARY_SKIP_EXACT: + return True + # PyObjC meta and framework bindings (pyobjc-framework-corebluetooth, etc.) + return canonical_dist_name == "pyobjc" or canonical_dist_name.startswith("pyobjc-") + + +def _force_interpreter_no_binary_args(requirement_line: str) -> list[str]: + """Return pip --no-binary for this package so pip cannot reuse e.g. cp311-abi3 wheels on 3.13.""" + line = requirement_line.strip() + if not line: + return [] + try: + req = Requirement(line) + except InvalidRequirement: + return [] + if _force_interpreter_skip_package(canonicalize_name(req.name)): + return [] + return ["--no-binary", req.name] + + +def _apply_force_interpreter_binary(cli_flag: bool) -> bool: + """Linux/macOS only: forcing sdist builds for cryptography etc. is unreliable on Windows CI.""" + return cli_flag and platform.system() != "Windows" + + +def _pypi_preflight_skip_line(requirement_line: str) -> bool: + """Print and return True if this line should be skipped (PyPI Requires-Python).""" + try: + req = Requirement(requirement_line) + except InvalidRequirement: + return False + skip, reason = pypi_requires_python_preflight_skip(req) + if skip: + print_color(f"-- skip {requirement_line} ({reason})", Fore.YELLOW) + return skip + parser = argparse.ArgumentParser(description="Process build arguments.") parser.add_argument( @@ -36,6 +98,16 @@ action="store_true", help="CI exclude-tests mode: fail if all wheels succeed (expect some to fail, e.g. excluded packages)", ) +parser.add_argument( + "--force-interpreter-binary", + action="store_true", + help=( + "For each requirement, pass --no-binary so pip builds a wheel for the current " + "interpreter instead of reusing a compatible abi3 / older cpXY wheel from --find-links. " + "Ignored on Windows (source builds for e.g. cryptography are not used in CI there). " + "Some packages are always skipped (e.g. cryptography, pydantic-core, protobuf, PyObjC, ruamel.yaml.clib)." + ), +) args = parser.parse_args() @@ -45,6 +117,7 @@ failed_wheels = 0 succeeded_wheels = 0 +skipped_wheels = 0 # Build wheels for requirements in file if requirements_dir: @@ -55,8 +128,19 @@ raise SystemExit(f"Python version dependent requirements directory or file not found ({e})") for requirement in requirements: + requirement = requirement.strip() + if not requirement or requirement.startswith("#"): + continue + if _pypi_preflight_skip_line(requirement): + skipped_wheels += 1 + continue # Get no-binary args for packages that should be built from source no_binary_args = get_no_binary_args(requirement) + force_interpreter_args = ( + _force_interpreter_no_binary_args(requirement) + if _apply_force_interpreter_binary(args.force_interpreter_binary) + else [] + ) out = subprocess.run( [ @@ -64,13 +148,14 @@ "-m", "pip", "wheel", - f"{requirement}", + requirement, "--find-links", "downloaded_wheels", "--wheel-dir", "downloaded_wheels", ] - + no_binary_args, + + no_binary_args + + force_interpreter_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) @@ -87,10 +172,11 @@ print_color("---------- STATISTICS ----------") print_color(f"Succeeded {succeeded_wheels} wheels", Fore.GREEN) print_color(f"Failed {failed_wheels} wheels", Fore.RED) + if skipped_wheels: + print_color(f"Skipped {skipped_wheels} wheels (PyPI Requires-Python)", Fore.YELLOW) if args.ci_tests: - total = succeeded_wheels + failed_wheels - if total > 0 and failed_wheels == 0: + if succeeded_wheels > 0 and failed_wheels == 0: raise SystemExit("CI: expected some builds to fail (excluded packages)") elif failed_wheels != 0: raise SystemExit("One or more wheels failed to build") @@ -98,8 +184,16 @@ # Build wheels from passed requirements else: for requirement in in_requirements: + if _pypi_preflight_skip_line(requirement): + skipped_wheels += 1 + continue # Get no-binary args for packages that should be built from source no_binary_args = get_no_binary_args(requirement) + force_interpreter_args = ( + _force_interpreter_no_binary_args(requirement) + if _apply_force_interpreter_binary(args.force_interpreter_binary) + else [] + ) out = subprocess.run( [ @@ -113,7 +207,8 @@ "--wheel-dir", "downloaded_wheels", ] - + no_binary_args, + + no_binary_args + + force_interpreter_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) @@ -130,10 +225,11 @@ print_color("---------- STATISTICS ----------") print_color(f"Succeeded {succeeded_wheels} wheels", Fore.GREEN) print_color(f"Failed {failed_wheels} wheels", Fore.RED) + if skipped_wheels: + print_color(f"Skipped {skipped_wheels} wheels (PyPI Requires-Python)", Fore.YELLOW) if args.ci_tests: - total = succeeded_wheels + failed_wheels - if total > 0 and failed_wheels == 0: + if succeeded_wheels > 0 and failed_wheels == 0: raise SystemExit("CI: expected some builds to fail (excluded packages)") elif failed_wheels != 0: raise SystemExit("One or more wheels failed to build") diff --git a/exclude_list.yaml b/exclude_list.yaml index f513fdb..151c8ba 100644 --- a/exclude_list.yaml +++ b/exclude_list.yaml @@ -22,6 +22,10 @@ platform: 'darwin' python: '>3.11' +# dbus-python 1.2.x sdist (e.g. 1.2.18) fails configure link step against CPython on macOS CI (Python 3.11) +- package_name: 'dbus-python' + platform: 'darwin' + python: '==3.11' - package_name: 'pygobject' python: '==3.8' @@ -113,6 +117,11 @@ version: '<2.35.0' python: '>=3.14' +# pydantic_core: maturin sdist on CPython 3.14 + macOS/Windows fails in CI (PyO3 limited API); Linux 3.14 still built +- package_name: 'pydantic_core' + platform: ['darwin', 'win32'] + python: '==3.14' + # rpds_py supports Python 3.14 from version >= 0.26.0 (pyo3 compatibility) # https://pypi.org/project/rpds-py/#history - package_name: 'rpds_py' diff --git a/repair_wheels.py b/repair_wheels.py index 8afab4f..6ab90bd 100644 --- a/repair_wheels.py +++ b/repair_wheels.py @@ -16,11 +16,45 @@ import subprocess from pathlib import Path +from typing import List +from typing import Set +from typing import Tuple from typing import Union from colorama import Fore from _helper_functions import print_color +from _helper_functions import wheel_archive_is_readable + + +def _stderr_indicates_bad_zip(error_msg: str) -> bool: + """True if repair tool output indicates an unreadable/corrupt zip archive.""" + if not error_msg: + return False + return ( + "BadZipFile" in error_msg + or "Bad magic number for central directory" in error_msg + or "File is not a zip file" in error_msg + ) + + +def _dedupe_wheel_paths(wheels_dir: Path) -> List[Path]: + """Collect *.whl under wheels_dir once per inode (rglob can list the same file twice via symlinks).""" + wheels: List[Path] = [] + seen: Set[Tuple[int, int]] = set() + for p in sorted(wheels_dir.rglob("*.whl")): + try: + if not p.is_file(): + continue + st = p.stat() + key = (st.st_dev, st.st_ino) + except OSError: + continue + if key in seen: + continue + seen.add(key) + wheels.append(p) + return wheels def get_platform() -> str: @@ -90,7 +124,7 @@ def fix_universal2_wheel_name(wheel_path: Path, error_msg: str) -> Union[Path, s if "'arm64,x86_64'" in error_msg or "'x86_64,arm64'" in error_msg: # Missing BOTH architectures - wheel is corrupted, delete it print_color(" -> Deleting corrupted wheel (missing native binaries for all architectures)", Fore.RED) - wheel_path.unlink() + wheel_path.unlink(missing_ok=True) return "delete" elif "'x86_64'" in error_msg: # Missing x86_64, so it only has arm64 @@ -137,8 +171,8 @@ def main() -> None: temp_dir: Path = Path("./temp_repair") temp_dir.mkdir(exist_ok=True) - # Find all wheel files - wheels: list[Path] = list(wheels_dir.rglob("*.whl")) + # Find all wheel files (dedupe: same inode can appear twice via symlinks / layout quirks) + wheels: list[Path] = _dedupe_wheel_paths(wheels_dir) if not wheels: print_color(f"No wheels found in {wheels_dir} - nothing to repair", Fore.YELLOW) @@ -186,6 +220,14 @@ def main() -> None: skipped_count += 1 continue + # PEP 427: wheels are zip files; truncated/corrupt CI artifacts may pass is_zipfile + # but fail on central directory (delocate: BadZipFile). + if not wheel_archive_is_readable(wheel): + print_color(" -> Deleting file (not a valid / readable zip wheel archive)", Fore.RED) + wheel.unlink(missing_ok=True) + deleted_count += 1 + continue + # Clean temp directory for old_wheel in temp_dir.glob("*.whl"): old_wheel.unlink() @@ -210,6 +252,15 @@ def main() -> None: # Check for errors error_msg = result.stderr.strip() if result.stderr else "" + # Corrupt zip / bad central directory (delocate opens the wheel as a zip) + if _stderr_indicates_bad_zip(error_msg): + print_color(" -> Deleting file (repair tool reported corrupt zip archive)", Fore.RED) + for old_wheel in temp_dir.glob("*.whl"): + old_wheel.unlink() + wheel.unlink(missing_ok=True) + deleted_count += 1 + continue + # Special handling for incorrectly tagged universal2 wheels on macOS if ( current_platform == "Darwin" @@ -247,7 +298,7 @@ def main() -> None: and "This does not look like a platform wheel, no ELF executable" in error_msg ): print_color(" -> Deleting corrupted wheel", Fore.RED) - wheel.unlink() + wheel.unlink(missing_ok=True) deleted_count += 1 continue @@ -257,6 +308,12 @@ def main() -> None: # manylinux wheel can't find its libraries # it means it was already properly repaired or ("manylinux" in wheel.name and "could not be located" in error_msg) + # ARMv7 CI runs under QEMU; auditwheel may fail libc detection on abi3/native .so + or ( + current_platform == "Linux" + and current_arch == "armv7l" + and ("InvalidLibc" in error_msg or "couldn't detect libc" in error_msg) + ) ) has_error = ( @@ -278,6 +335,15 @@ def main() -> None: print_color(" -> Keeping original wheel (build issue: needs older toolchain)", Fore.YELLOW) elif "manylinux" in wheel.name and "could not be located" in error_msg: print_color(" -> Keeping original wheel (already bundled from PyPI)", Fore.GREEN) + elif ( + current_platform == "Linux" + and current_arch == "armv7l" + and ("InvalidLibc" in error_msg or "couldn't detect libc" in error_msg) + ): + print_color( + " -> Keeping original wheel (auditwheel libc detection failed on ARMv7 runner; often QEMU)", + Fore.YELLOW, + ) skipped_count += 1 elif has_error: # Actual error occurred (even if a wheel was created, it may be broken) @@ -294,15 +360,22 @@ def main() -> None: if repaired: # A repaired wheel was created successfully if repaired.name != wheel.name: - wheel.unlink() # Remove original - repaired.rename(wheel.parent / repaired.name) + wheel.unlink(missing_ok=True) # Remove original + final_path = wheel.parent / repaired.name + repaired.rename(final_path) print_color(f" -> Replaced with repaired wheel: {repaired.name}", Fore.GREEN) else: # Name unchanged - wheel.unlink() + wheel.unlink(missing_ok=True) repaired.rename(wheel) + final_path = wheel print_color(f" -> Repaired successfully: {repaired.name}", Fore.GREEN) - repaired_count += 1 + if not wheel_archive_is_readable(final_path): + print_color(" -> Deleting repaired output (not a valid / readable zip archive)", Fore.RED) + final_path.unlink(missing_ok=True) + deleted_count += 1 + else: + repaired_count += 1 elif result.returncode == 0: # No repaired wheel created, but command succeeded (already compatible) print_color(" -> Keeping original wheel (already compatible)", Fore.GREEN) diff --git a/test_build_wheels.py b/test_build_wheels.py index be1f04d..ba6af76 100644 --- a/test_build_wheels.py +++ b/test_build_wheels.py @@ -5,10 +5,12 @@ # # SPDX-License-Identifier: Apache-2.0 # +import os import sys import unittest from pathlib import Path +from typing import Optional from unittest.mock import patch # Add parent directory to path for imports @@ -16,13 +18,30 @@ from packaging.requirements import Requirement +from _helper_functions import current_interpreter_satisfies_requires_python +from _helper_functions import filter_requirements_by_pypi_requires_python from _helper_functions import get_no_binary_args from _helper_functions import merge_requirements +from _helper_functions import pypi_requires_python_preflight_skip from build_wheels import _add_into_requirements from build_wheels import get_used_idf_branches from yaml_list_adapter import YAMLListAdapter +def requirement_exact_pin_version(req: Requirement) -> Optional[str]: + """Mirror of former production helper: single non-wildcard ``==`` pin only (used by tests).""" + specs = list(req.specifier) + if len(specs) != 1: + return None + spec = specs[0] + if spec.operator != "==": + return None + ver = str(spec.version) + if ver.endswith(".*"): + return None + return ver + + class TestChangeSpecifierLogic(unittest.TestCase): """Test the _change_specifier_logic method.""" @@ -122,6 +141,22 @@ def test_exclude_version(self): result = self.adapter._yaml_to_requirement(yaml_list, exclude=True) self.assertEqual(result, {Requirement("numpy>=1.20")}) + def test_exclude_platform_and_python_intersection_single_os(self): + """exclude + platform + python (no package version) = drop only on that OS ∩ Python.""" + yaml_list = [{"package_name": "pydantic_core", "platform": "win32", "python": "==3.14"}] + result = self.adapter._yaml_to_requirement(yaml_list, exclude=True) + expected = Requirement("pydantic_core; (sys_platform != 'win32' or (python_version != '3.14'))") + self.assertEqual(result, {expected}) + + def test_exclude_platform_and_python_intersection_two_os(self): + yaml_list = [{"package_name": "pydantic_core", "platform": ["win32", "darwin"], "python": "==3.14"}] + result = self.adapter._yaml_to_requirement(yaml_list, exclude=True) + expected = Requirement( + "pydantic_core; (sys_platform != 'win32' or (python_version != '3.14')) and " + "(sys_platform != 'darwin' or (python_version != '3.14'))" + ) + self.assertEqual(result, {expected}) + class TestYAMLListAdapterIntegration(unittest.TestCase): """Integration tests using actual YAML files.""" @@ -426,5 +461,97 @@ def test_returns_empty_for_non_source_build_package(self, mock_system): self.assertEqual(result, []) +class TestPypiRequiresPythonPreflight(unittest.TestCase): + """PyPI Requires-Python preflight (specifier + project index).""" + + def setUp(self): + import _helper_functions + + _helper_functions._PYPI_REQUIRES_PYTHON_CACHE.clear() + _helper_functions._PYPI_PROJECT_JSON_CACHE.clear() + self._saved_skip_check = os.environ.pop("SKIP_PYPI_REQUIRES_PYTHON_CHECK", None) + + def tearDown(self): + if self._saved_skip_check is not None: + os.environ["SKIP_PYPI_REQUIRES_PYTHON_CHECK"] = self._saved_skip_check + + def test_requirement_exact_pin_version(self): + self.assertEqual(requirement_exact_pin_version(Requirement("foo==1.0")), "1.0") + self.assertIsNone(requirement_exact_pin_version(Requirement("foo>=1.0"))) + self.assertIsNone(requirement_exact_pin_version(Requirement("foo==1.*"))) + self.assertIsNone(requirement_exact_pin_version(Requirement("foo>1,<2"))) + + def test_current_interpreter_satisfies_requires_python(self): + self.assertTrue(current_interpreter_satisfies_requires_python(None)) + self.assertTrue(current_interpreter_satisfies_requires_python("")) + self.assertTrue(current_interpreter_satisfies_requires_python(">=3.8")) + self.assertFalse(current_interpreter_satisfies_requires_python(">999.0.0")) + + @patch.dict(os.environ, {"SKIP_PYPI_REQUIRES_PYTHON_CHECK": "1"}, clear=False) + def test_preflight_disabled_by_env(self): + req = Requirement("idf-component-manager==3.0.0") + skip, reason = pypi_requires_python_preflight_skip(req) + self.assertFalse(skip) + self.assertEqual(reason, "") + + @patch.dict(os.environ, {"SKIP_PYPI_REQUIRES_PYTHON_CHECK": "1"}, clear=False) + @patch("_helper_functions.print_color") + def test_filter_noop_when_env_disabled(self, _mock_print): + s = {Requirement("a==1"), Requirement("b==2")} + self.assertEqual(filter_requirements_by_pypi_requires_python(s), s) + + @patch("_helper_functions.fetch_pypi_project_json", return_value={"releases": {"3.0.0": []}}) + @patch("_helper_functions.current_interpreter_satisfies_requires_python", return_value=False) + @patch("_helper_functions.fetch_pypi_release_requires_python", return_value=">=3.10") + def test_preflight_skips_when_requires_python_excludes(self, _mock_rel, _mock_sat, _mock_proj): + req = Requirement("idf-component-manager==3.0.0") + skip, reason = pypi_requires_python_preflight_skip(req) + self.assertTrue(skip) + self.assertIn("3.0.0", reason) + + @patch("_helper_functions.fetch_pypi_project_json", return_value={"releases": {"3.0.0": []}}) + @patch("_helper_functions.current_interpreter_satisfies_requires_python", return_value=True) + @patch("_helper_functions.fetch_pypi_release_requires_python", return_value=">=3.10") + def test_preflight_keeps_when_compatible(self, _mock_rel, _mock_sat, _mock_proj): + req = Requirement("idf-component-manager==3.0.0") + skip, _ = pypi_requires_python_preflight_skip(req) + self.assertFalse(skip) + + @patch("_helper_functions.fetch_pypi_project_json", return_value=None) + @patch("_helper_functions.fetch_pypi_release_requires_python") + def test_preflight_no_skip_when_project_json_unavailable(self, mock_release, _mock_proj): + skip, _ = pypi_requires_python_preflight_skip(Requirement("idf-component-manager>=2")) + self.assertFalse(skip) + mock_release.assert_not_called() + + @patch("_helper_functions.fetch_pypi_project_json", return_value={"releases": {"3.0.0": [], "2.4.9": []}}) + @patch("_helper_functions.current_interpreter_satisfies_requires_python", return_value=False) + @patch("_helper_functions.fetch_pypi_release_requires_python", return_value=">=3.10") + def test_preflight_skips_compatible_release_spec(self, _mock_rel, _mock_sat, _mock_proj): + skip, reason = pypi_requires_python_preflight_skip(Requirement("idf-component-manager~=3.0")) + self.assertTrue(skip) + self.assertIn("3.0.0", reason) + + @patch("_helper_functions.fetch_pypi_project_json", return_value={"releases": {"1.0.0": []}}) + @patch("_helper_functions.fetch_pypi_release_requires_python", return_value=None) + def test_preflight_keeps_when_pypi_has_no_requires_python(self, _mock_fetch, _mock_proj): + skip, _ = pypi_requires_python_preflight_skip(Requirement("somepkg==1.0.0")) + self.assertFalse(skip) + + @patch("_helper_functions.print_color") + def test_filter_requirements_drops_one(self, _mock_print): + r_bad = Requirement("idf-component-manager==3.0.0") + r_good = Requirement("requests==2.0.0") + + def _skip(req): + if req.name == "idf-component-manager": + return (True, "incompatible") + return (False, "") + + with patch("_helper_functions.pypi_requires_python_preflight_skip", side_effect=_skip): + out = filter_requirements_by_pypi_requires_python({r_bad, r_good}) + self.assertEqual(out, {r_good}) + + if __name__ == "__main__": unittest.main() diff --git a/test_wheels_install.py b/test_wheels_install.py index 6531d1f..5ac182b 100644 --- a/test_wheels_install.py +++ b/test_wheels_install.py @@ -8,6 +8,10 @@ This script finds and installs wheels compatible with the current Python version, verifying that wheel files are valid and platform-compatible. It also checks wheels against exclude_list.yaml and removes incompatible ones. + +Wheels are ZIP archives (PEP 427). pip opens them with the zipfile module; a +BadZipFile / "Bad magic number" error means the bytes on disk are not a valid +ZIP (truncated, corrupted, or not a wheel), not that ".whl" was mistaken for ".zip". """ from __future__ import annotations @@ -24,6 +28,7 @@ from _helper_functions import get_current_platform from _helper_functions import print_color from _helper_functions import should_exclude_wheel +from _helper_functions import wheel_archive_is_readable from yaml_list_adapter import YAMLListAdapter WHEELS_DIR = Path("./downloaded_wheels") @@ -141,6 +146,29 @@ def is_compatibility_error(error_message: str) -> bool: return any(err in error_message for err in compatibility_errors) +def is_corrupt_wheel_archive_error(error_message: str) -> bool: + """True if pip failed because the file is not a readable ZIP / wheel archive.""" + if not error_message: + return False + # pip.exceptions.InvalidWheel -> "Wheel 'pkg' located at is invalid." + if "Wheel '" in error_message and " is invalid." in error_message: + return True + markers = ( + "BadZipFile", + "Bad magic number for file header", + "Bad magic number for central directory", + "has an invalid wheel", + "zipfile.BadZipFile", + ) + return any(m in error_message for m in markers) + + +def discard_corrupt_wheel(wheel_path: Path, note: str) -> None: + """Remove wheel from the test tree and print a single-line warning.""" + wheel_path.unlink(missing_ok=True) + print_color(f"-- {wheel_path.name} ({note})", Fore.YELLOW) + + def main() -> int: python_version_tag = get_python_version_tag() python_version = f"{sys.version_info.major}.{sys.version_info.minor}" @@ -187,12 +215,21 @@ def main() -> int: installed = 0 failed = 0 deleted = 0 + discarded_corrupt = 0 failed_wheels = [] deleted_wheels = [] print_color("---------- INSTALL WHEELS ----------") for wheel_path in wheels_to_install: + if not wheel_archive_is_readable(wheel_path): + discarded_corrupt += 1 + discard_corrupt_wheel( + wheel_path, + "unreadable / corrupt zip — not a valid wheel archive (PEP 427)", + ) + continue + success, error_message = install_wheel(wheel_path) if success: @@ -204,6 +241,11 @@ def main() -> int: deleted_wheels.append(wheel_path.name) wheel_path.unlink() print_color(f"-- {wheel_path.name} (compatibility constraint)", Fore.YELLOW) + elif is_corrupt_wheel_archive_error(error_message): + # Truncated/corrupt artifact or bad repair output; drop from this test artifact + # so CI can continue (see module docstring). + discarded_corrupt += 1 + discard_corrupt_wheel(wheel_path, "invalid / corrupt zip (pip could not read wheel)") else: failed += 1 failed_wheels.append((wheel_path.name, error_message)) @@ -221,6 +263,11 @@ def main() -> int: print_color(f"Excluded {excluded} wheels (exclude_list.yaml)", Fore.YELLOW) if deleted > 0: print_color(f"Deleted {deleted} wheels (compatibility constraint)", Fore.YELLOW) + if discarded_corrupt > 0: + print_color( + f"Discarded {discarded_corrupt} wheels (invalid or corrupt zip archive)", + Fore.YELLOW, + ) if failed > 0: print_color(f"Failed {failed} wheels", Fore.RED) diff --git a/yaml_list_adapter.py b/yaml_list_adapter.py index 5cc3ee8..b043442 100644 --- a/yaml_list_adapter.py +++ b/yaml_list_adapter.py @@ -138,11 +138,33 @@ def _change_specifier_logic(self, spec_with_text: str) -> tuple: break return (new_ver_spec, text, ver_specifier) + def _python_version_marker_fragment_no_package_version(self, package_python, exclude: bool) -> str: + """Build ``python_version ...`` marker fragment from YAML ``python`` when there is no package ``version``. + + If ``python`` is a list, each element becomes a fragment and fragments are joined with **and**. + """ + if not isinstance(package_python, list): + new_spec, text_after, old_spec = self._change_specifier_logic(package_python) + spec = new_spec if exclude else old_spec + return f"python_version {spec} '{text_after}'" + parts = [] + for elem in package_python: + new_spec, text_after, old_spec = self._change_specifier_logic(elem) + spec = new_spec if exclude else old_spec + parts.append(f"python_version {spec} '{text_after}'") + return " and ".join(parts) + def _yaml_to_requirement(self, yaml: list, exclude: bool = False) -> set: """Converts YAML defined requirement into packaging.requirements Requirement which can be directly used with pip. - Markers (platform and python) are ANDed between and multiple values of the marker are ORed between. + Within one YAML row, ``platform`` and ``python`` are combined with **and**. Multiple entries in a + **platform** list are **or**ed (match any listed platform). Multiple entries in a **python** list + are **and**ed (all listed python constraints must hold). + For exclude=True **without** a package ``version``, platform + python mean “exclude on this OS **and** + this Python” (intersection): the keep-marker is ``(sys_platform != p or )`` per + platform, ANDed across listed platforms (De Morgan). Rows **with** a package version keep the + split-requirement behaviour documented below. When exclude is set to True, the logic of the Requirement is changed to be excluded by pip. To preserve the logic, another requirement needs to be added @@ -187,6 +209,15 @@ def _yaml_to_requirement(self, yaml: list, exclude: bool = False) -> set: package_platform = "" package_python = package["python"] if "python" in package else "" + # Intersection exclude: "drop on (platform ∈ P) ∧ (python matches)" without a package version. + # Previous AND of inverted markers wrongly dropped e.g. Linux + same Python. + if exclude and package_platform and package_python and not package_version: + py_frag = self._python_version_marker_fragment_no_package_version(package_python, exclude=True) + plfs = list(package_platform) if isinstance(package_platform, list) else [package_platform] + terms = [f"(sys_platform != '{plf}' or ({py_frag}))" for plf in plfs] + requirements_set.add(Requirement(f"{package['package_name']}; " + " and ".join(terms))) + continue + requirement_str_list = [f"{package['package_name']}"] # if package has version specifier, process it and add to the requirement @@ -254,26 +285,9 @@ def _yaml_to_requirement(self, yaml: list, exclude: bool = False) -> set: # if package has python markers defined, add it to the requirement if package_python and not package_version: - if not isinstance(package_python, list): - new_spec, text_after, old_spec = self._change_specifier_logic(package_python) - requirement_str_list.append( - ( - f"python_version {new_spec} '{text_after}'" - if exclude - else f"python_version {old_spec} '{text_after}'" - ) - ) - - else: # list of python versions defined - python_list = [] - for elem in package_python: - new_spec, text_after, old_spec = self._change_specifier_logic(elem) - if exclude: - python_list.append(f"python_version {new_spec} '{text_after}'") - else: - python_list.append(f"python_version {old_spec} '{text_after}'") - - requirement_str_list.append(" and ".join(python_list)) + requirement_str_list.append( + self._python_version_marker_fragment_no_package_version(package_python, exclude) + ) if package_python and package_version: if not isinstance(package_python, list):