diff --git a/.github/workflows/check_scrapers.yml b/.github/workflows/check_scrapers.yml index 65f16b6..b15850f 100644 --- a/.github/workflows/check_scrapers.yml +++ b/.github/workflows/check_scrapers.yml @@ -2,7 +2,7 @@ name: Check Scrapers on: schedule: - - cron: "0 1 * * *" + - cron: 0 1 * * * jobs: test_scripts: runs-on: ubuntu-latest diff --git a/.github/workflows/tox.yml b/.github/workflows/tox.yml index d483857..82b52dc 100644 --- a/.github/workflows/tox.yml +++ b/.github/workflows/tox.yml @@ -21,7 +21,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} diff --git a/coverage.xml b/coverage.xml new file mode 100644 index 0000000..70665a9 --- /dev/null +++ b/coverage.xml @@ -0,0 +1,215 @@ + + + + + + /app/python_eol + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/pyproject.toml b/pyproject.toml index 66aac54..4581df1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,13 @@ authors = [ ] description = "Simple tool to check if python version is past EOL" readme = "README.md" -requires-python = ">=3.7" +requires-python = ">=3.8" +dependencies = [ + "appdirs", + "requests", + "setuptools", + "beautifulsoup4", +] classifiers = [ "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", @@ -23,6 +29,13 @@ classifiers = [ [project.scripts] eol = "python_eol.main:main" +[project.optional-dependencies] +test = [ + "pytest", + "freezegun", + "pytest-cov", +] + [tool.setuptools] packages = ["python_eol"] @@ -59,8 +72,12 @@ exclude_lines = ['^if __name__ == "__main__":$', 'raise SystemExit(.+)$'] python_version = "3.11" strict = true +[[tool.mypy.overrides]] +module = "appdirs" +ignore_missing_imports = true + ##### black ##### [tool.black] -target_version = ["py37", "py38", "py39", "py310", "py311"] +target_version = ["py38", "py39", "py310", "py311"] include = '\.py$' exclude = '.+\.json' diff --git a/python_eol/__init__.py b/python_eol/__init__.py index d3d57c7..5745236 100644 --- a/python_eol/__init__.py +++ b/python_eol/__init__.py @@ -1,2 +1,3 @@ """Top-level module for python-eol.""" + from __future__ import annotations diff --git a/python_eol/cache.py b/python_eol/cache.py new file mode 100644 index 0000000..41987df --- /dev/null +++ b/python_eol/cache.py @@ -0,0 +1,133 @@ +"""Cache management for python-eol.""" + +from __future__ import annotations + +import json +import logging +from datetime import datetime, timedelta +from pathlib import Path +from typing import Any + +import appdirs +import requests +from bs4 import BeautifulSoup + +logger = logging.getLogger(__name__) + +CACHE_DIR = Path(appdirs.user_cache_dir("python-eol")) +CACHE_FILE = CACHE_DIR / "eol_data.json" +CACHE_FILE_NEP = CACHE_DIR / "eol_data_nep.json" +CACHE_EXPIRY = timedelta(days=31) + + +def _fetch_eol_data() -> list[dict[str, Any]] | None: + """Fetch EOL data from the API.""" + api_url = "https://endoflife.date/api/python.json" + try: + response = requests.get(api_url, timeout=10) + response.raise_for_status() + data = response.json() + except requests.RequestException as e: + logger.warning(f"Failed to fetch EOL data: {e}") + return None + + processed_data = [] + for entry in data: + raw_version = entry["latest"] + major_minor_parts = raw_version.split(".")[:2] + parsed_version = ".".join(major_minor_parts) + end_of_life_date = datetime.strptime(entry["eol"], "%Y-%m-%d").date() + entry_data = {"Version": parsed_version, "End of Life": str(end_of_life_date)} + processed_data.append(entry_data) + return processed_data + + +def _fetch_nep_data() -> list[dict[str, Any]] | None: + """Fetch NEP 29 EOL data.""" + url = "https://numpy.org/neps/nep-0029-deprecation_policy.html#support-table" + try: + response = requests.get(url, timeout=10) + response.raise_for_status() + except requests.RequestException as e: + logger.warning(f"Failed to fetch NEP data: {e}") + return None + + soup = BeautifulSoup(response.content, "html.parser") + table = soup.find("table") + + if table is None or isinstance(table, str): + return None + + data: list[dict[str, Any]] = [] + for row in table.find_all("tr")[1:]: + columns = row.find_all("td") + end_of_life = columns[0].text.strip() + version = columns[1].text.strip().rstrip("+") + version_number = version.split(".") + version_number[-1] = str(int(version_number[-1]) - 1) + parsed_version = ".".join(version_number) + end_of_life_date = datetime.strptime(end_of_life, "%b %d, %Y").date() + + existing_data = next((d for d in data if d["Version"] == parsed_version), None) + if existing_data: + existing_data["End of Life"] = min( + existing_data["End of Life"], + str(end_of_life_date), + ) + else: + row_data = { + "Version": parsed_version, + "End of Life": str(end_of_life_date), + } + data.append(row_data) + return data + + +def _read_cache(*, nep_mode: bool = False) -> list[dict[str, Any]] | None: + """Read EOL data from cache.""" + cache_file = CACHE_FILE_NEP if nep_mode else CACHE_FILE + if not cache_file.exists(): + return None + + is_expired = ( + datetime.fromtimestamp(cache_file.stat().st_mtime) + < datetime.now() - CACHE_EXPIRY + ) + if is_expired: + logger.debug("Cache is expired.") + return None + + try: + with cache_file.open() as f: + return json.load(f) # type: ignore[no-any-return] + except (OSError, json.JSONDecodeError) as e: + logger.warning(f"Failed to read cache: {e}") + return None + + +def _write_cache(data: list[dict[str, Any]], *, nep_mode: bool = False) -> None: + """Write EOL data to cache.""" + cache_file = CACHE_FILE_NEP if nep_mode else CACHE_FILE + try: + CACHE_DIR.mkdir(parents=True, exist_ok=True) + with cache_file.open("w") as f: + json.dump(data, f, indent=4) + except OSError as e: + logger.warning(f"Failed to write cache: {e}") + + +def get_eol_data(*, nep_mode: bool = False) -> list[dict[str, Any]] | None: + """Get EOL data from cache or fetch if stale.""" + cached_data = _read_cache(nep_mode=nep_mode) + if cached_data: + logger.debug("Using cached EOL data.") + return cached_data + + logger.debug("Fetching new EOL data.") + fetch_function = _fetch_nep_data if nep_mode else _fetch_eol_data + fetched_data = fetch_function() + if fetched_data: + _write_cache(fetched_data, nep_mode=nep_mode) + return fetched_data + + return None diff --git a/python_eol/main.py b/python_eol/main.py index 2116897..8cf598e 100644 --- a/python_eol/main.py +++ b/python_eol/main.py @@ -1,15 +1,23 @@ """python-eol checks if the current running python version is (close) to end of life.""" + from __future__ import annotations import argparse import json import logging import platform +import sys from datetime import date from pathlib import Path from typing import Any +if sys.version_info >= (3, 9): + import importlib.resources +else: + import pkg_resources + from ._docker_utils import _extract_python_version_from_docker_file, _find_docker_files +from .cache import get_eol_data EOL_WARN_DAYS = 60 @@ -22,20 +30,12 @@ def _get_major_minor() -> str: def _get_db_file_path(*, nep_mode: bool = False) -> Path: - major, minor, _ = platform.python_version_tuple() filename = "db.json" if not nep_mode else "db_nep.json" - if int(major) == 3 and int(minor) >= 9: # noqa: PLR2004 - import importlib.resources - + if sys.version_info >= (3, 9): data_path = importlib.resources.files("python_eol") - db_file = f"{data_path}/{filename}" + db_file = str(data_path.joinpath(filename)) else: - import pkg_resources # pragma: no cover - - db_file = pkg_resources.resource_filename( - "python_eol", - filename, - ) # pragma: no cover + db_file = pkg_resources.resource_filename("python_eol", filename) return Path(db_file) @@ -47,7 +47,11 @@ def _check_eol( fail_close_to_eol: bool = False, prefix: str = "", ) -> int: - my_version_info = version_info[python_version] + my_version_info = version_info.get(python_version) + if not my_version_info: + logger.warning(f"Could not find EOL information for python {python_version}") + return 0 + today = date.today() eol_date = date.fromisoformat(my_version_info["End of Life"]) time_to_eol = eol_date - today @@ -76,9 +80,12 @@ def _check_python_eol( check_docker_files: bool = False, nep_mode: bool = False, ) -> int: - db_file = _get_db_file_path(nep_mode=nep_mode) - with db_file.open() as f: - eol_data = json.load(f) + eol_data = get_eol_data(nep_mode=nep_mode) + if eol_data is None: + logger.debug("Falling back to packaged EOL data.") + db_file = _get_db_file_path(nep_mode=nep_mode) + with db_file.open() as f: + eol_data = json.load(f) version_info = {entry["Version"]: entry for entry in eol_data} diff --git a/scripts/eol_scraper.py b/scripts/eol_scraper.py deleted file mode 100644 index ef041f7..0000000 --- a/scripts/eol_scraper.py +++ /dev/null @@ -1,38 +0,0 @@ -import json -from datetime import datetime - -import requests - -# URL of the API -api_url = "https://endoflife.date/api/python.json" - -# Send a GET request to the API -response = requests.get(api_url) - -# Parse the JSON response -data = json.loads(response.content) - -# Initialize an empty list to store the processed data -processed_data = [] - -# Iterate over the entries in the API response -for entry in data: - raw_version = entry["latest"] - # Strip out the patch part of the version - major_minor_parts = raw_version.split(".")[:2] - parsed_version = ".".join(major_minor_parts) - - # Convert end_of_life to datetime object - end_of_life_date = datetime.strptime(entry["eol"], "%Y-%m-%d").date() - - # Create a new dictionary for the entry data - entry_data = {"Version": parsed_version, "End of Life": end_of_life_date} - - # Append the entry data to the list - processed_data.append(entry_data) - -# Convert the processed data list to JSON format -json_data = json.dumps(processed_data, indent=4, default=str) - -# Print the JSON data -print(json_data) diff --git a/tests/test_cache.py b/tests/test_cache.py new file mode 100644 index 0000000..e7af1d8 --- /dev/null +++ b/tests/test_cache.py @@ -0,0 +1,115 @@ +from __future__ import annotations + +import json +from datetime import datetime +from typing import TYPE_CHECKING, Generator +from unittest import mock + +import pytest + +if TYPE_CHECKING: + from pathlib import Path +import requests +from freezegun import freeze_time + +from python_eol.cache import ( + CACHE_EXPIRY, + _fetch_eol_data, + _read_cache, + _write_cache, + get_eol_data, +) + +FAKE_EOL_DATA = [{"Version": "3.9", "End of Life": "2025-10-01"}] + + +@pytest.fixture +def mock_cache_file(tmp_path: Path) -> Generator[Path, None, None]: + """Mock the cache file and its directory.""" + cache_dir = tmp_path / "python-eol" + cache_file = cache_dir / "eol_data.json" + with mock.patch("python_eol.cache.CACHE_DIR", cache_dir), mock.patch( + "python_eol.cache.CACHE_FILE", + cache_file, + ): + yield cache_file + + +def test_fetch_eol_data_success() -> None: + """Test fetching EOL data successfully.""" + with mock.patch("requests.get") as mock_get: + mock_get.return_value.raise_for_status.return_value = None + mock_get.return_value.json.return_value = [ + {"latest": "3.9.0", "eol": "2025-10-01"}, + ] + data = _fetch_eol_data() + assert data == FAKE_EOL_DATA + + +def test_fetch_eol_data_failure() -> None: + """Test fetching EOL data with a request failure.""" + with mock.patch( + "requests.get", + side_effect=requests.RequestException("API is down"), + ): + data = _fetch_eol_data() + assert data is None + + +def test_read_write_cache(mock_cache_file: Path) -> None: + """Test writing to and reading from the cache.""" + _write_cache(FAKE_EOL_DATA) + assert mock_cache_file.exists() + with mock_cache_file.open() as f: + data = json.load(f) + assert data == FAKE_EOL_DATA + + read_data = _read_cache() + assert read_data == FAKE_EOL_DATA + + +@pytest.mark.usefixtures("mock_cache_file") +def test_read_cache_expired() -> None: + """Test that an expired cache returns None.""" + _write_cache(FAKE_EOL_DATA) + with freeze_time(datetime.now() + CACHE_EXPIRY + CACHE_EXPIRY): + assert _read_cache() is None + + +@pytest.mark.usefixtures("mock_cache_file") +def test_read_cache_not_found() -> None: + """Test that a non-existent cache returns None.""" + assert _read_cache() is None + + +@pytest.mark.usefixtures("mock_cache_file") +def test_get_eol_data_from_cache() -> None: + """Test get_eol_data reads from a valid cache.""" + _write_cache(FAKE_EOL_DATA) + with mock.patch("python_eol.cache._fetch_eol_data") as mock_fetch: + data = get_eol_data() + mock_fetch.assert_not_called() + assert data == FAKE_EOL_DATA + + +@pytest.mark.usefixtures("mock_cache_file") +def test_get_eol_data_fetches_when_cache_is_stale() -> None: + """Test get_eol_data fetches new data when cache is stale.""" + _write_cache(FAKE_EOL_DATA) + with freeze_time( + datetime.now() + CACHE_EXPIRY + CACHE_EXPIRY, + ), mock.patch("python_eol.cache._fetch_eol_data") as mock_fetch: + mock_fetch.return_value = [{"Version": "3.10", "End of Life": "2026-10-01"}] + data = get_eol_data() + mock_fetch.assert_called_once() + assert data == [{"Version": "3.10", "End of Life": "2026-10-01"}] + + +@pytest.mark.usefixtures("mock_cache_file") +def test_get_eol_data_fetches_when_no_cache() -> None: + """Test get_eol_data fetches new data when no cache exists.""" + with mock.patch("python_eol.cache._fetch_eol_data") as mock_fetch: + mock_fetch.return_value = FAKE_EOL_DATA + data = get_eol_data() + mock_fetch.assert_called_once() + assert data == FAKE_EOL_DATA diff --git a/tests/test_docker_utils.py b/tests/test_docker_utils.py index 0ddb417..41f10b0 100644 --- a/tests/test_docker_utils.py +++ b/tests/test_docker_utils.py @@ -1,12 +1,10 @@ from __future__ import annotations from pathlib import Path -from typing import TYPE_CHECKING, Any -from unittest import mock +from typing import TYPE_CHECKING import pytest -import python_eol from python_eol._docker_utils import ( _extract_python_version_from_docker_file, _find_docker_files, @@ -18,28 +16,15 @@ class TestPath(Path): """Class to make MyPy happy (hack!).""" -@pytest.fixture() -def test_path_class(tmpdir: Path) -> type[TestPath]: - class TestPath(type(Path())): # type: ignore[misc] - def __new__( - cls: type[TestPath], - *pathsegments: list[Path], - ) -> Any: # noqa: ANN401 - return super().__new__(cls, *[tmpdir, *pathsegments]) - - return TestPath - - -def test_find_docker_files(tmpdir: Path, test_path_class: type[TestPath]) -> None: - p = Path(tmpdir / "Dockerfile") +def test_find_docker_files(tmpdir: Path, monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.chdir(tmpdir) + p = Path("Dockerfile") p.touch() - Path(tmpdir) - with mock.patch.object( - python_eol._docker_utils, # noqa: SLF001 - "Path", - test_path_class, - ): - assert _find_docker_files() == [p] + d = Path("a/b") + d.mkdir(parents=True) + p2 = d / "Dockerfile-test" + p2.touch() + assert sorted(_find_docker_files()) == sorted([p, p2]) @pytest.mark.parametrize( diff --git a/tests/test_main.py b/tests/test_main.py index 712ed84..3c380b9 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -20,7 +20,7 @@ ) -@pytest.fixture() +@pytest.fixture def _mock_py37() -> Iterable[None]: with mock.patch("platform.python_version_tuple") as mocked_python_version_tuple: mocked_python_version_tuple.return_value = (3, 7, 0) @@ -30,7 +30,15 @@ def _mock_py37() -> Iterable[None]: mock_py37 = pytest.mark.usefixtures("_mock_py37") -@pytest.fixture() +@pytest.fixture(autouse=True) +def _mock_get_eol_data() -> Iterable[None]: + """Mock get_eol_data to avoid network calls.""" + with mock.patch("python_eol.main.get_eol_data") as mocked_get_eol_data: + mocked_get_eol_data.return_value = None # Fallback to packaged db.json + yield + + +@pytest.fixture def _mock_py311() -> Iterable[None]: with mock.patch("platform.python_version_tuple") as mocked_python_version_tuple: mocked_python_version_tuple.return_value = (3, 11, 0) diff --git a/tox.ini b/tox.ini index fe53c52..ba79cf7 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,6 @@ [tox] skip_missing_interpreters = {env:TOX_SKIP_MISSING_INTERPRETERS:True} -envlist = py{3.7,3.8,3.9,3.10,3.11},lint +envlist = py{3.8,3.9,3.10,3.11,3.12,3.13},lint isolated_build = True [testenv] @@ -9,15 +9,19 @@ deps = pytest coverage freezegun + requests + appdirs + setuptools + beautifulsoup4 commands = coverage run -m pytest {posargs} -[testenv:py{3.9,3.10,3.11}] +[testenv:py{3.9,3.10,3.11,3.12,3.13}] commands = {[testenv]commands} coverage xml -[testenv:py{3.7,3.8}] +[testenv:py3.8] commands = {[testenv]commands} coverage xml @@ -25,18 +29,7 @@ commands = [testenv:ruff] skip_install = true deps = ruff -commands = ruff check python_eol/ tests/ - -[testenv:black] -skip_install = true -deps = black -commands = black --check python_eol/ tests/ - -[testenv:flake8] -skip_install = true -deps = flake8 -max-line-length = 88 -commands = flake8 python_eol/ tests/ +commands = ruff check --fix python_eol/ tests/ [testenv:mypy] skip_install = true @@ -45,6 +38,8 @@ deps = types-setuptools types-freezegun pytest + types-requests + types-beautifulsoup4 commands = mypy python_eol/ tests/ [testenv:yamlfix] @@ -55,14 +50,10 @@ commands = yamlfix . [testenv:lint] skip_install = true deps = - {[testenv:flake8]deps} {[testenv:ruff]deps} - {[testenv:black]deps} {[testenv:mypy]deps} {[testenv:yamlfix]deps} commands = - {[testenv:flake8]commands} {[testenv:ruff]commands} - {[testenv:black]commands} {[testenv:mypy]commands} {[testenv:yamlfix]commands}