From 931042eece114cfb6a82370863f0bb8ced3b3e9c Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Thu, 5 May 2022 21:46:07 -0400 Subject: [PATCH 001/199] Update version-utils from 0.3.0 to 0.3.2 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index cd65f2c0..719fa6aa 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,6 +11,6 @@ requests==2.27.1 colorama==0.4.4 djangorestframework==3.13.1 humanize==3.13.1 -version-utils==0.3.0 +version-utils==0.3.2 python-magic==0.4.25 python-memcached==1.59 From a957f83deb9fe7410ff0bbddcf09e1550add2f94 Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Tue, 7 Jun 2022 18:05:15 -0400 Subject: [PATCH 002/199] Update python-magic from 0.4.25 to 0.4.27 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index e20bfcc4..9615d854 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,5 +12,5 @@ colorama==0.4.4 djangorestframework==3.13.1 humanize==3.13.1 version-utils==0.3.0 -python-magic==0.4.25 +python-magic==0.4.27 python-memcached==1.59 From 8fa1fbf8405500d2cc077e7b9246b8f472dbd451 Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Tue, 25 Oct 2022 01:42:28 -0400 Subject: [PATCH 003/199] Update colorama from 0.4.4 to 0.4.6 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index e20bfcc4..3a526437 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,7 +8,7 @@ lxml==4.7.1 defusedxml==0.7.1 chardet==4.0.0 requests==2.27.1 -colorama==0.4.4 +colorama==0.4.6 djangorestframework==3.13.1 humanize==3.13.1 version-utils==0.3.0 From 3f3c6766576f4363b8d2a8c0c27264865bd62f2d Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Sat, 19 Nov 2022 20:17:54 -0500 Subject: [PATCH 004/199] Update python-debian from 0.1.43 to 0.1.49 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index e20bfcc4..edcce3a5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ django-tagging==0.5.0 django-extensions==3.1.5 django-bootstrap3==15.0.0 progressbar==2.5 -python-debian==0.1.43 +python-debian==0.1.49 lxml==4.7.1 defusedxml==0.7.1 chardet==4.0.0 From f88d88b006d217ddacaf92bb60623f46104717c9 Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Thu, 1 Dec 2022 22:44:03 -0500 Subject: [PATCH 005/199] Update chardet from 4.0.0 to 5.1.0 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index e20bfcc4..beab1609 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,7 +6,7 @@ progressbar==2.5 python-debian==0.1.43 lxml==4.7.1 defusedxml==0.7.1 -chardet==4.0.0 +chardet==5.1.0 requests==2.27.1 colorama==0.4.4 djangorestframework==3.13.1 From 9f26b1e892d0b088d49fbde6a5f69bee9ecf8a35 Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Mon, 5 Jun 2023 15:33:08 -0400 Subject: [PATCH 006/199] Update django-extensions from 3.1.5 to 3.2.3 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 7b9746aa..60a8659a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ Django==2.2.28 django-tagging==0.5.0 -django-extensions==3.1.5 +django-extensions==3.2.3 django-bootstrap3==15.0.0 progressbar==2.5 python-debian==0.1.43 From 41ae867cd0ac18d6ad809af7ca01a7f5e880f13b Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Tue, 6 Aug 2024 20:19:59 -0400 Subject: [PATCH 007/199] Update pyyaml from 6.0.1 to 6.0.2 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index ec368528..f49736db 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,7 +6,7 @@ progressbar==2.5 python-debian==0.1.49 lxml==4.9.4 defusedxml==0.7.1 -PyYAML==6.0.1 +PyYAML==6.0.2 chardet==4.0.0 requests==2.32.3 colorama==0.4.4 From 1cec161a462cd8d919fcdf3cc557918dea2e815e Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Sat, 5 Oct 2024 11:15:07 -0400 Subject: [PATCH 008/199] Update humanize from 3.13.1 to 4.11.0 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index ec368528..b92a024e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,7 +12,7 @@ requests==2.32.3 colorama==0.4.4 djangorestframework==3.13.1 django-filter==21.1 -humanize==3.13.1 +humanize==4.11.0 version-utils==0.3.0 python-magic==0.4.25 pymemcache==4.0.0 From cfcc4aae649f6f1ac5bdc951d2c53c7d85ca3e2a Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 26 Feb 2025 00:19:19 -0500 Subject: [PATCH 009/199] change compression format to support older rpm versions --- patchman-client.spec | 2 ++ 1 file changed, 2 insertions(+) diff --git a/patchman-client.spec b/patchman-client.spec index 8aeef6fc..4d1f7b68 100644 --- a/patchman-client.spec +++ b/patchman-client.spec @@ -10,6 +10,8 @@ Source: %{expand:%%(pwd)} BuildArch: noarch Requires: curl which coreutils util-linux +%define binary_payload w9.gzdio + %description patchman-client provides a client that uploads reports to a patchman server From bfd791e2bf8e724ae949d2404aecb109a187852d Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 26 Feb 2025 05:25:28 +0000 Subject: [PATCH 010/199] auto-commit to update version skip-checks: true --- VERSION.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION.txt b/VERSION.txt index e265a8cb..ec187c44 100644 --- a/VERSION.txt +++ b/VERSION.txt @@ -1 +1 @@ -3.0.15 +3.0.16 From da76f6085c3acec76fc9c0dc222ad0a899034410 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 26 Feb 2025 05:25:29 +0000 Subject: [PATCH 011/199] auto-commit to update debian changelog skip-checks: true --- debian/changelog | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/debian/changelog b/debian/changelog index fa0d8651..8d428ccb 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,10 @@ +patchman (3.0.16-1) stable; urgency=medium + + * change compression format to support older rpm versions + * auto-commit to update version skip-checks: true + + -- Marcus Furlong Wed, 26 Feb 2025 05:25:29 +0000 + patchman (3.0.15-1) stable; urgency=medium [ Vladimir Lettiev ] From ae41fe22fdbd6dcae13f4b361254ecb6bcd5a13f Mon Sep 17 00:00:00 2001 From: Hugo Deprez Date: Thu, 27 Feb 2025 14:55:53 +0100 Subject: [PATCH 012/199] add python3-yaml dependency --- debian/control | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/debian/control b/debian/control index ca0792ce..95dd6933 100644 --- a/debian/control +++ b/debian/control @@ -18,7 +18,7 @@ Depends: ${misc:Depends}, python3 (>= 3.10), python3-django (>= 3.2), python3-djangorestframework, python3-django-filters, python3-debian, python3-rpm, python3-progressbar, python3-lxml, python3-defusedxml, python3-requests, python3-colorama, python3-magic, python3-humanize, - python3-pip, python3-pymemcache, memcached, libapache2-mod-wsgi-py3, apache2 + python3-pip, python3-pymemcache, python3-yaml, memcached, libapache2-mod-wsgi-py3, apache2 Suggests: python3-django-celery, python3-mysqldb, python3-psycopg2 Description: Django-based patch status monitoring tool for linux systems. . From 6673880a5104cf375887c02a46fb65b73e640b6b Mon Sep 17 00:00:00 2001 From: Hugo Deprez Date: Thu, 27 Feb 2025 15:30:40 +0100 Subject: [PATCH 013/199] update readme for depenency --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index f4f9bfe2..d425c5fd 100644 --- a/README.md +++ b/README.md @@ -106,6 +106,7 @@ python3-requests python3-colorama python3-magic python3-humanize +python3-yaml ``` The server can optionally make use of celery to asynchronously process the From fc3069bdd2e5d98ebc269d67610502a2564a10fd Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Thu, 27 Feb 2025 16:07:14 +0000 Subject: [PATCH 014/199] auto-commit to update version skip-checks: true --- VERSION.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION.txt b/VERSION.txt index ec187c44..d19cf84f 100644 --- a/VERSION.txt +++ b/VERSION.txt @@ -1 +1 @@ -3.0.16 +3.0.17 From 803a0bf626b4321aae23c35cb6fc559838c2c7b9 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Thu, 27 Feb 2025 16:07:15 +0000 Subject: [PATCH 015/199] auto-commit to update debian changelog skip-checks: true --- debian/changelog | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/debian/changelog b/debian/changelog index 8d428ccb..109f4687 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,14 @@ +patchman (3.0.17-1) stable; urgency=medium + + [ Hugo Deprez ] + * add python3-yaml dependency + * update readme for depenency + + [ Marcus Furlong ] + * auto-commit to update version skip-checks: true + + -- Marcus Furlong Thu, 27 Feb 2025 16:07:15 +0000 + patchman (3.0.16-1) stable; urgency=medium * change compression format to support older rpm versions From fdeea13da8d147f72f3f005ce84f199ae62ac676 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Fri, 28 Feb 2025 01:32:55 -0500 Subject: [PATCH 016/199] recognize https mirrors in mirrorlists --- repos/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/repos/utils.py b/repos/utils.py index cb40aaf1..0255d58a 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -226,7 +226,7 @@ def get_mirrorlist_urls(url): data = download_url(res, 'Downloading repo info:') if data is None: return - mirror_urls = re.findall('^http://.*$|^ftp://.*$', + mirror_urls = re.findall('^http[s]*://.*$|^ftp://.*$', data.decode('utf-8'), re.MULTILINE) if mirror_urls: return mirror_urls From 9e32c7b267fcb7a439afe985c3c7d4d4197fed24 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 20:36:56 +0000 Subject: [PATCH 017/199] auto-commit to update version skip-checks: true --- VERSION.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION.txt b/VERSION.txt index d19cf84f..29cec99e 100644 --- a/VERSION.txt +++ b/VERSION.txt @@ -1 +1 @@ -3.0.17 +3.0.18 From dd466594cfab3de05a6f48c7badaf59dfbd2baae Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 20:36:58 +0000 Subject: [PATCH 018/199] auto-commit to update debian changelog skip-checks: true --- debian/changelog | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/debian/changelog b/debian/changelog index 109f4687..e54b390b 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,10 @@ +patchman (3.0.18-1) stable; urgency=medium + + * recognize https mirrors in mirrorlists + * auto-commit to update version skip-checks: true + + -- Marcus Furlong Sat, 01 Mar 2025 20:36:58 +0000 + patchman (3.0.17-1) stable; urgency=medium [ Hugo Deprez ] From 67cc8d868736b56eed878acd84753dfda921ff34 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 15:40:08 -0500 Subject: [PATCH 019/199] switch default branch from master to main --- .github/workflows/codeql-analysis.yml | 4 ++-- .../workflows/create-release-and-upload-assets.yml | 12 ++++++------ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index ccd9317a..16a1c3bc 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -2,9 +2,9 @@ name: "Code Scanning - Action" on: push: - branches: [master] + branches: [main] pull_request: - branches: [master] + branches: [main] jobs: CodeQL-Build: diff --git a/.github/workflows/create-release-and-upload-assets.yml b/.github/workflows/create-release-and-upload-assets.yml index 4b8852ef..78b8079d 100644 --- a/.github/workflows/create-release-and-upload-assets.yml +++ b/.github/workflows/create-release-and-upload-assets.yml @@ -11,7 +11,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - ref: master + ref: main - name: git fetch --all run: | git fetch --all @@ -40,7 +40,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - ref: master + ref: main - name: git fetch --all run: | git fetch --all @@ -62,7 +62,7 @@ jobs: echo "${{ github.ref }}" | cut -dv -f2 > VERSION.txt git add VERSION.txt git diff --quiet && git diff --staged --quiet || git commit -m "${COMMIT_MSG}" - git push origin master + git push origin main - name: Update debian changelog env: EMAIL: furlongm@gmail.com @@ -72,7 +72,7 @@ jobs: skip-checks: true run: | gbp dch --new-version=$(cat VERSION.txt)-1 --release --distribution=stable --spawn-editor=never --commit --commit-msg="${COMMIT_MSG}" - git push origin master + git push origin main build-and-upload-deb-assets: needs: update-version-and-changelog runs-on: ubuntu-latest @@ -91,7 +91,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - ref: master + ref: main - name: git fetch --all run: | git config --global --add safe.directory /__w/patchman/patchman @@ -156,7 +156,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - ref: master + ref: main - name: git fetch --all run: | git config --global --add safe.directory /__w/patchman/patchman From 8cf653c35b2e89016242039c1b76fd42ecbf725d Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 20:45:01 +0000 Subject: [PATCH 020/199] auto-commit to update version skip-checks: true --- VERSION.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION.txt b/VERSION.txt index 29cec99e..25875f01 100644 --- a/VERSION.txt +++ b/VERSION.txt @@ -1 +1 @@ -3.0.18 +3.0.19 From c08e72a11d59ce31b206f9e10e909b4f13b445fe Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 20:45:02 +0000 Subject: [PATCH 021/199] auto-commit to update debian changelog skip-checks: true --- debian/changelog | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/debian/changelog b/debian/changelog index e54b390b..452ac0b4 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,10 @@ +patchman (3.0.19-1) stable; urgency=medium + + * switch default branch from master to main + * auto-commit to update version skip-checks: true + + -- Marcus Furlong Sat, 01 Mar 2025 20:45:02 +0000 + patchman (3.0.18-1) stable; urgency=medium * recognize https mirrors in mirrorlists From 51f971a92f7718ee2f0b13d0a51fefd53f52aa6b Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 14 Jan 2025 20:17:00 -0500 Subject: [PATCH 022/199] support for timezone-aware datetimes --- util/__init__.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/util/__init__.py b/util/__init__.py index 5e7e9633..a240258c 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -22,11 +22,15 @@ import zlib import lzma from colorama import Fore, Style +from datetime import datetime from enum import Enum from hashlib import md5, sha1, sha256, sha512 from progressbar import Bar, ETA, Percentage, ProgressBar from patchman.signals import error_message, info_message +from django.utils.timezone import make_aware +from django.utils.dateparse import parse_datetime + if ProgressBar.__dict__.get('maxval'): pbar2 = False @@ -239,3 +243,16 @@ def get_md5(data): """ Return the md5 checksum for data """ return md5(data).hexdigest() + + +def tz_aware_datetime(date): + """ Ensure a datetime is timezone-aware + Returns the tz-aware datetime object + """ + if isinstance(date, int): + parsed_date = datetime.fromtimestamp(date) + else: + parsed_date = parse_datetime(date) + if not parsed_date.tzinfo: + parsed_date = make_aware(parsed_date) + return parsed_date From 093696257104fd47a2a6335979df3650ceff87a9 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 3 Feb 2025 15:28:41 -0500 Subject: [PATCH 023/199] add function to check django settings --- hosts/templatetags/report_alert.py | 6 ++-- repos/models.py | 4 +-- repos/utils.py | 49 +++++++++++++++--------------- util/__init__.py | 12 ++++++++ util/templatetags/common.py | 5 +-- util/views.py | 4 +-- 6 files changed, 47 insertions(+), 33 deletions(-) diff --git a/hosts/templatetags/report_alert.py b/hosts/templatetags/report_alert.py index 025e2cde..72cd2540 100644 --- a/hosts/templatetags/report_alert.py +++ b/hosts/templatetags/report_alert.py @@ -18,12 +18,13 @@ from datetime import timedelta from django.conf import settings - from django.template import Library from django.utils.html import format_html from django.templatetags.static import static from django.utils import timezone +from util import has_setting_of_type + register = Library() @@ -31,8 +32,7 @@ def report_alert(lastreport): html = '' alert_icon = static('img/icon-alert.gif') - if hasattr(settings, 'DAYS_WITHOUT_REPORT') and \ - isinstance(settings.DAYS_WITHOUT_REPORT, int): + if has_setting_of_type('DAYS_WITHOUT_REPORT', int): days = settings.DAYS_WITHOUT_REPORT else: days = 14 diff --git a/repos/models.py b/repos/models.py index 7e6089f9..a2862f59 100644 --- a/repos/models.py +++ b/repos/models.py @@ -21,6 +21,7 @@ from arch.models import MachineArchitecture from packages.models import Package +from util import has_setting_of_type from repos.utils import refresh_deb_repo, refresh_rpm_repo, \ refresh_arch_repo, update_mirror_packages @@ -162,8 +163,7 @@ def fail(self): text = f'No usable mirror found at {self.url!s}' error_message.send(sender=None, text=text) default_max_mirror_failures = 28 - if hasattr(settings, 'MAX_MIRROR_FAILURES') and \ - isinstance(settings.MAX_MIRROR_FAILURES, int): + if has_setting_of_type('MAX_MIRROR_FAILURES', int): max_mirror_failures = settings.MAX_MIRROR_FAILURES else: max_mirror_failures = default_max_mirror_failures diff --git a/repos/utils.py b/repos/utils.py index 0255d58a..35a96ce1 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -32,7 +32,7 @@ from packages.utils import parse_package_string, get_or_create_package from arch.models import PackageArchitecture from util import get_url, download_url, response_is_valid, extract, \ - get_checksum, Checksum + get_checksum, Checksum, has_setting_of_type from patchman.signals import progress_info_s, progress_update_s, \ info_message, warning_message, error_message, debug_message @@ -235,20 +235,17 @@ def get_mirrorlist_urls(url): def add_mirrors_from_urls(repo, mirror_urls): """ Creates mirrors from a list of mirror urls """ + max_mirrors = get_max_mirrors() for mirror_url in mirror_urls: mirror_url = mirror_url.replace('$ARCH', repo.arch.name) mirror_url = mirror_url.replace('$basearch', repo.arch.name) - if hasattr(settings, 'MAX_MIRRORS') and \ - isinstance(settings.MAX_MIRRORS, int): - max_mirrors = settings.MAX_MIRRORS - # only add X mirrors, where X = max_mirrors - q = Q(mirrorlist=False, refresh=True) - existing = repo.mirror_set.filter(q).count() - if existing >= max_mirrors: - text = f'{max_mirrors!s} mirrors already ' - text += f'exist, not adding {mirror_url!s}' - warning_message.send(sender=None, text=text) - continue + q = Q(mirrorlist=False, refresh=True) + existing = repo.mirror_set.filter(q).count() + if existing >= max_mirrors: + text = f'{max_mirrors!s} mirrors already ' + text += f'exist, not adding {mirror_url!s}' + warning_message.send(sender=None, text=text) + continue from repos.models import Mirror m, c = Mirror.objects.get_or_create(repo=repo, url=mirror_url) if c: @@ -545,10 +542,8 @@ def refresh_yum_repo(mirror, data, mirror_url, ts): if not mirror_checksum_is_valid(computed_checksum, modules_checksum, mirror, 'module'): return - if hasattr(settings, 'MAX_MIRRORS') and \ - isinstance(settings.MAX_MIRRORS, int): - max_mirrors = settings.MAX_MIRRORS # only refresh X mirrors, where X = max_mirrors + max_mirrors = get_max_mirrors() checksum_q = Q(mirrorlist=False, refresh=True, timestamp=ts, file_checksum=primary_checksum) have_checksum = mirror.repo.mirror_set.filter(checksum_q).count() @@ -584,11 +579,9 @@ def mirror_checksum_is_valid(computed, provided, mirror, metadata_type): def refresh_arch_repo(repo): """ Refresh all mirrors of an arch linux repo """ - if hasattr(settings, 'MAX_MIRRORS') and \ - isinstance(settings.MAX_MIRRORS, int): - max_mirrors = settings.MAX_MIRRORS + max_mirrors = get_max_mirrors() fname = f'{repo.arch!s}/{repo.repo_id!s}.db' - ts = datetime.now().replace(microsecond=0) + ts = datetime.now().astimezone().replace(microsecond=0) for i, mirror in enumerate(repo.mirror_set.filter(refresh=True)): res = find_mirror_url(mirror.url, [fname]) mirror.last_access_ok = response_is_valid(res) @@ -665,10 +658,8 @@ def refresh_rpm_repo(repo): check_for_mirrorlists(repo) check_for_metalinks(repo) - if hasattr(settings, 'MAX_MIRRORS') and \ - isinstance(settings.MAX_MIRRORS, int): - max_mirrors = settings.MAX_MIRRORS - ts = datetime.now().replace(microsecond=0) + max_mirrors = get_max_mirrors() + ts = datetime.now().astimezone().replace(microsecond=0) enabled_mirrors = repo.mirror_set.filter(mirrorlist=False, refresh=True) for i, mirror in enumerate(enabled_mirrors): res = find_mirror_url(mirror.url, formats) @@ -706,7 +697,7 @@ def refresh_deb_repo(repo): formats = ['Packages.xz', 'Packages.bz2', 'Packages.gz', 'Packages'] - ts = datetime.now().replace(microsecond=0) + ts = datetime.now().astimezone().replace(microsecond=0) for mirror in repo.mirror_set.filter(refresh=True): res = find_mirror_url(mirror.url, formats) mirror.last_access_ok = response_is_valid(res) @@ -756,3 +747,13 @@ def find_best_repo(package, hostrepos): if hostrepo.priority > best_repo.priority: best_repo = hostrepo return best_repo + + +def get_max_mirrors(): + """ Find the max number of mirrors for refresh + """ + if has_setting_of_type('MAX_MIRRORS', int): + max_mirrors = settings.MAX_MIRRORS + else: + max_mirrors = 5 + return max_mirrors diff --git a/util/__init__.py b/util/__init__.py index a240258c..3cdfe7d3 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -30,6 +30,7 @@ from django.utils.timezone import make_aware from django.utils.dateparse import parse_datetime +from django.conf import settings if ProgressBar.__dict__.get('maxval'): @@ -149,6 +150,17 @@ def response_is_valid(res): return False +def has_setting_of_type(setting_name, expected_type): + """ Checks if the Django settings module has the specified attribute + and if it is of the expected type + Returns True if the setting exists and is of the expected type, False otherwise. + """ + if not hasattr(settings, setting_name): + return False + setting_value = getattr(settings, setting_name) + return isinstance(setting_value, expected_type) + + def gunzip(contents): """ gunzip contents in memory and return the data """ diff --git a/util/templatetags/common.py b/util/templatetags/common.py index bac898cc..b177ca33 100644 --- a/util/templatetags/common.py +++ b/util/templatetags/common.py @@ -27,6 +27,8 @@ from django.templatetags.static import static from django.core.paginator import Paginator +from util import has_setting_of_type + register = Library() @@ -101,8 +103,7 @@ def searchform(terms): @register.simple_tag def reports_timedelta(): - if hasattr(settings, 'DAYS_WITHOUT_REPORT') and \ - isinstance(settings.DAYS_WITHOUT_REPORT, int): + if has_setting_of_type('DAYS_WITHOUT_REPORT', int): days = settings.DAYS_WITHOUT_REPORT else: days = 14 diff --git a/util/views.py b/util/views.py index e55575f2..8de578e5 100644 --- a/util/views.py +++ b/util/views.py @@ -28,6 +28,7 @@ from repos.models import Repository, Mirror from packages.models import Package from reports.models import Report +from util import has_setting_of_type @login_required @@ -45,8 +46,7 @@ def dashboard(request): packages = Package.objects.all() # host issues - if hasattr(settings, 'DAYS_WITHOUT_REPORT') and \ - isinstance(settings.DAYS_WITHOUT_REPORT, int): + if has_setting_of_type('DAYS_WITHOUT_REPORT', int): days = settings.DAYS_WITHOUT_REPORT else: days = 14 From e0215387873138a250b8e93c9256e7baa84f8471 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 14 Jan 2025 19:59:10 -0500 Subject: [PATCH 024/199] update os and osgroup to use os codenames --- hosts/models.py | 3 + operatingsystems/fixtures/os.json | 66 +++++++++++++++++++ operatingsystems/fixtures/osgroup.json | 58 ++++++++++++++++ operatingsystems/managers.py | 22 +++++++ .../migrations/0003_osgroup_codename.py | 18 +++++ .../0004_alter_osgroup_unique_together.py | 17 +++++ operatingsystems/models.py | 13 +++- packages/admin.py | 14 +++- reports/models.py | 18 ++++- repos/models.py | 3 + 10 files changed, 225 insertions(+), 7 deletions(-) create mode 100644 operatingsystems/fixtures/os.json create mode 100644 operatingsystems/fixtures/osgroup.json create mode 100644 operatingsystems/managers.py create mode 100644 operatingsystems/migrations/0003_osgroup_codename.py create mode 100644 operatingsystems/migrations/0004_alter_osgroup_unique_together.py diff --git a/hosts/models.py b/hosts/models.py index 0e477d1d..b0d23ba9 100644 --- a/hosts/models.py +++ b/hosts/models.py @@ -58,6 +58,9 @@ class Host(models.Model): tags = TagField() updated_at = models.DateTimeField(default=timezone.now) + from hosts.managers import HostManager + objects = HostManager() + class Meta: verbose_name = 'Host' verbose_name_plural = 'Hosts' diff --git a/operatingsystems/fixtures/os.json b/operatingsystems/fixtures/os.json new file mode 100644 index 00000000..a581487f --- /dev/null +++ b/operatingsystems/fixtures/os.json @@ -0,0 +1,66 @@ +[ + { + "model": "operatingsystems.os", + "fields": { + "name": "Rocky Linux 9.3", + "osgroup": [ + "Rocky Linux 9", + "Blue Onyx" + ] + } + }, + { + "model": "operatingsystems.os", + "fields": { + "name": "Rocky Linux 8.9", + "osgroup": [ + "Rocky Linux 8", + "Green Obsidian" + ] + } + }, + { + "model": "operatingsystems.os", + "fields": { + "name": "Debian 12.5", + "osgroup": [ + "Debian 12", + "bookworm" + ] + } + }, + { + "model": "operatingsystems.os", + "fields": { + "name": "Arch Linux", + "osgroup": null + } + }, + { + "model": "operatingsystems.os", + "fields": { + "name": "openSUSE Leap 15.5", + "osgroup": null + } + }, + { + "model": "operatingsystems.os", + "fields": { + "name": "AlmaLinux 8.10", + "osgroup": [ + "AlmaLinux 8", + "Cerulean Leopard" + ] + } + }, + { + "model": "operatingsystems.os", + "fields": { + "name": "AlmaLinux 9.5", + "osgroup": [ + "AlmaLinux 9", + "Teal Serval" + ] + } + } +] diff --git a/operatingsystems/fixtures/osgroup.json b/operatingsystems/fixtures/osgroup.json new file mode 100644 index 00000000..e4b785ee --- /dev/null +++ b/operatingsystems/fixtures/osgroup.json @@ -0,0 +1,58 @@ +[ + { + "model": "operatingsystems.osgroup", + "fields": { + "name": "CentOS 7", + "codename": "", + "repos": [] + } + }, + { + "model": "operatingsystems.osgroup", + "fields": { + "name": "CentOS 8", + "codename": "", + "repos": [] + } + }, + { + "model": "operatingsystems.osgroup", + "fields": { + "name": "Rocky Linux 8", + "codename": "Green Obsidian", + "repos": [] + } + }, + { + "model": "operatingsystems.osgroup", + "fields": { + "name": "Rocky Linux 9", + "codename": "Blue Onyx", + "repos": [] + } + }, + { + "model": "operatingsystems.osgroup", + "fields": { + "name": "AlmaLinux 8", + "codename": "Cerulean Leopard", + "repos": [] + } + }, + { + "model": "operatingsystems.osgroup", + "fields": { + "name": "AlmaLinux 9", + "codename": "Teal Serval", + "repos": [] + } + }, + { + "model": "operatingsystems.osgroup", + "fields": { + "name": "Debian 12", + "codename": "bookworm", + "repos": [] + } + } +] diff --git a/operatingsystems/managers.py b/operatingsystems/managers.py new file mode 100644 index 00000000..99bdfa1f --- /dev/null +++ b/operatingsystems/managers.py @@ -0,0 +1,22 @@ +# Copyright 2024 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.db import models + + +class OSGroupManager(models.Manager): + def get_by_natural_key(self, name, codename): + return self.get(name=name, codename=codename) diff --git a/operatingsystems/migrations/0003_osgroup_codename.py b/operatingsystems/migrations/0003_osgroup_codename.py new file mode 100644 index 00000000..426c7a15 --- /dev/null +++ b/operatingsystems/migrations/0003_osgroup_codename.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.15 on 2025-01-13 18:55 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('operatingsystems', '0002_initial'), + ] + + operations = [ + migrations.AddField( + model_name='osgroup', + name='codename', + field=models.CharField(blank=True, max_length=255), + ), + ] diff --git a/operatingsystems/migrations/0004_alter_osgroup_unique_together.py b/operatingsystems/migrations/0004_alter_osgroup_unique_together.py new file mode 100644 index 00000000..dbda97e6 --- /dev/null +++ b/operatingsystems/migrations/0004_alter_osgroup_unique_together.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.15 on 2025-01-13 19:57 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('operatingsystems', '0003_osgroup_codename'), + ] + + operations = [ + migrations.AlterUniqueTogether( + name='osgroup', + unique_together={('name', 'codename')}, + ), + ] diff --git a/operatingsystems/models.py b/operatingsystems/models.py index 905dafda..e69ced6e 100644 --- a/operatingsystems/models.py +++ b/operatingsystems/models.py @@ -26,18 +26,29 @@ class OSGroup(models.Model): name = models.CharField(max_length=255, unique=True) repos = models.ManyToManyField(Repository, blank=True) + codename = models.CharField(max_length=255, blank=True) + + from operatingsystems.managers import OSGroupManager + objects = OSGroupManager() class Meta: verbose_name = 'Operating System Group' verbose_name_plural = 'Operating System Groups' + unique_together = ('name', 'codename') ordering = ('name',) def __str__(self): - return self.name + if self.codename: + return f'{self.name} ({self.codename})' + else: + return self.name def get_absolute_url(self): return reverse('operatingsystems:osgroup_detail', args=[str(self.id)]) + def natural_key(self): + return (self.name, self.codename) + class OS(models.Model): diff --git a/packages/admin.py b/packages/admin.py index 4a782eb9..1a5a2dfa 100644 --- a/packages/admin.py +++ b/packages/admin.py @@ -21,11 +21,19 @@ class ErratumAdmin(admin.ModelAdmin): - readonly_fields = ('packages',) + readonly_fields = ('packages', 'references') -admin.site.register(Package) +class PackageAdmin(admin.ModelAdmin): + readonly_fields = ('name',) + + +class PackageUpdateAdmin(admin.ModelAdmin): + readonly_fields = ('oldpackage', 'newpackage') + + +admin.site.register(Package, PackageAdmin) admin.site.register(PackageName) -admin.site.register(PackageUpdate) +admin.site.register(PackageUpdate, PackageUpdateAdmin) admin.site.register(Erratum, ErratumAdmin) admin.site.register(ErratumReference) diff --git a/reports/models.py b/reports/models.py index b778f212..c7aecb7b 100644 --- a/reports/models.py +++ b/reports/models.py @@ -15,12 +15,14 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see +import re + from django.db import models, IntegrityError, DatabaseError, transaction from django.urls import reverse from hosts.models import Host from arch.models import MachineArchitecture -from operatingsystems.models import OS +from operatingsystems.models import OS, OSGroup from domains.models import Domain from patchman.signals import error_message, info_message @@ -105,10 +107,20 @@ def process(self, find_updates=True, verbose=False): """ if self.os and self.kernel and self.arch and not self.processed: - + osgroup_codename = None + match = re.match(r'(.*) \((.*)\)', self.os) + if match: + os_name = match.group(1) + osgroup_codename = match.group(2) + else: + os_name = self.os oses = OS.objects.all() with transaction.atomic(): - os, c = oses.get_or_create(name=self.os) + os, c = oses.get_or_create(name=os_name) + if osgroup_codename: + osgroups = OSGroup.objects.filter(codename=osgroup_codename) + if osgroups.count() == 1: + os.osgroup = osgroups[0] machine_arches = MachineArchitecture.objects.all() with transaction.atomic(): diff --git a/repos/models.py b/repos/models.py index a2862f59..888fd686 100644 --- a/repos/models.py +++ b/repos/models.py @@ -48,6 +48,9 @@ class Repository(models.Model): repo_id = models.CharField(max_length=255, null=True, blank=True) auth_required = models.BooleanField(default=False) + from repos.managers import RepositoryManager + objects = RepositoryManager() + class Meta: verbose_name_plural = 'Repository' verbose_name_plural = 'Repositories' From 71c7893741e5034010fed5528a49951c0d020ac4 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 5 Jan 2025 15:18:06 -0500 Subject: [PATCH 025/199] switch from obsolete django-tagging to django-taggit --- debian/control | 4 +++- .../0004_remove_host_tags_host_tags.py | 24 +++++++++++++++++++ hosts/models.py | 4 ++-- hosts/templates/hosts/host_delete.html | 4 +--- hosts/templates/hosts/host_detail.html | 4 +--- hosts/views.py | 12 +++++----- patchman/settings.py | 3 +++ requirements.txt | 1 + sbin/patchman | 2 +- setup.cfg | 1 + 10 files changed, 43 insertions(+), 16 deletions(-) create mode 100644 hosts/migrations/0004_remove_host_tags_host_tags.py diff --git a/debian/control b/debian/control index 95dd6933..992929c5 100644 --- a/debian/control +++ b/debian/control @@ -18,7 +18,9 @@ Depends: ${misc:Depends}, python3 (>= 3.10), python3-django (>= 3.2), python3-djangorestframework, python3-django-filters, python3-debian, python3-rpm, python3-progressbar, python3-lxml, python3-defusedxml, python3-requests, python3-colorama, python3-magic, python3-humanize, - python3-pip, python3-pymemcache, python3-yaml, memcached, libapache2-mod-wsgi-py3, apache2 + python3-pip, python3-pymemcache, python3-yaml, memcached, libapache2-mod-wsgi-py3, + apache2, python3-django-taggit +>>>>>>> 922796e (switch from obsolete django-tagging to django-taggit) Suggests: python3-django-celery, python3-mysqldb, python3-psycopg2 Description: Django-based patch status monitoring tool for linux systems. . diff --git a/hosts/migrations/0004_remove_host_tags_host_tags.py b/hosts/migrations/0004_remove_host_tags_host_tags.py new file mode 100644 index 00000000..2f77b14f --- /dev/null +++ b/hosts/migrations/0004_remove_host_tags_host_tags.py @@ -0,0 +1,24 @@ +# Generated by Django 4.2.18 on 2025-02-04 23:37 + +from django.db import migrations +import taggit.managers + + +class Migration(migrations.Migration): + + dependencies = [ + ('taggit', '0005_auto_20220424_2025'), + ('hosts', '0003_host_modules'), + ] + + operations = [ + migrations.RemoveField( + model_name='host', + name='tags', + ), + migrations.AddField( + model_name='host', + name='tags', + field=taggit.managers.TaggableManager(help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags'), + ), + ] diff --git a/hosts/models.py b/hosts/models.py index b0d23ba9..8aa3570e 100644 --- a/hosts/models.py +++ b/hosts/models.py @@ -24,7 +24,7 @@ from version_utils.rpm import labelCompare except ImportError: from rpm import labelCompare -from tagging.fields import TagField +from taggit.managers import TaggableManager from packages.models import Package, PackageUpdate from domains.models import Domain @@ -55,7 +55,7 @@ class Host(models.Model): updates = models.ManyToManyField(PackageUpdate, blank=True) reboot_required = models.BooleanField(default=False) host_repos_only = models.BooleanField(default=True) - tags = TagField() + tags = TaggableManager() updated_at = models.DateTimeField(default=timezone.now) from hosts.managers import HostManager diff --git a/hosts/templates/hosts/host_delete.html b/hosts/templates/hosts/host_delete.html index 13d367f8..b80ea170 100644 --- a/hosts/templates/hosts/host_delete.html +++ b/hosts/templates/hosts/host_delete.html @@ -23,9 +23,7 @@ Tags - {% load tagging_tags %} - {% tags_for_object host as tags %} - {% for tag in tags %} + {% for tag in host.tags.all %} {{ tag }} {% endfor %} diff --git a/hosts/templates/hosts/host_detail.html b/hosts/templates/hosts/host_detail.html index 57d7afae..421cda3d 100644 --- a/hosts/templates/hosts/host_detail.html +++ b/hosts/templates/hosts/host_detail.html @@ -33,9 +33,7 @@ Tags - {% load tagging_tags %} - {% tags_for_object host as tags %} - {% for tag in tags %} + {% for tag in host.tags.all %} {{ tag }} {% endfor %} diff --git a/hosts/views.py b/hosts/views.py index 46a56985..0f38dcdc 100644 --- a/hosts/views.py +++ b/hosts/views.py @@ -22,7 +22,7 @@ from django.db.models import Q from django.contrib import messages -from tagging.models import Tag, TaggedItem +from taggit.models import Tag from rest_framework import viewsets from util.filterspecs import Filter, FilterBar @@ -62,7 +62,7 @@ def host_list(request): hosts = hosts.filter(os__osgroup=int(request.GET['osgroup'])) if 'tag' in request.GET: - hosts = TaggedItem.objects.get_by_model(hosts, request.GET['tag']) + hosts = hosts.filter(tags__name__in=[request.GET['tag']]) if 'reboot_required' in request.GET: reboot_required = request.GET['reboot_required'] == 'True' @@ -89,10 +89,10 @@ def host_list(request): page = paginator.page(paginator.num_pages) filter_list = [] - mytags = {} - for tag in Tag.objects.usage_for_model(Host): - mytags[tag.name] = tag.name - filter_list.append(Filter(request, 'tag', mytags)) + tags = {} + for tag in Tag.objects.all(): + tags[tag.name] = tag.name + filter_list.append(Filter(request, 'tag', tags)) filter_list.append(Filter(request, 'domain', Domain.objects.all())) filter_list.append(Filter(request, 'os', OS.objects.all())) filter_list.append(Filter(request, 'osgroup', OSGroup.objects.all())) diff --git a/patchman/settings.py b/patchman/settings.py index 4a943a2f..6bf681db 100644 --- a/patchman/settings.py +++ b/patchman/settings.py @@ -78,6 +78,7 @@ THIRD_PARTY_APPS = [ 'django_extensions', 'tagging', + 'taggit', 'bootstrap3', 'rest_framework', 'django_filters', @@ -102,6 +103,8 @@ 'PAGE_SIZE': 100, } +TAGGIT_CASE_INSENSITIVE = True + try: from celery import Celery # noqa except ImportError: diff --git a/requirements.txt b/requirements.txt index 16d37ee2..a6970774 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,6 @@ Django==3.2.25 django-tagging==0.5.0 +django-taggit==4.0.0 django-extensions==3.2.1 django-bootstrap3==23.1 progressbar==2.5 diff --git a/sbin/patchman b/sbin/patchman index df6cd0ca..6fb90768 100755 --- a/sbin/patchman +++ b/sbin/patchman @@ -30,7 +30,7 @@ from django import setup as django_setup django_setup() from datetime import date, datetime -from tagging.models import TaggedItem +from taggit.models import TaggedItem from hosts.models import Host from packages.models import Package, PackageName, PackageUpdate diff --git a/setup.cfg b/setup.cfg index d001be97..28f45479 100644 --- a/setup.cfg +++ b/setup.cfg @@ -5,6 +5,7 @@ post-install = scripts/rpm-post-install.sh requires = /usr/bin/python3 python3-django >= 3.2.20 python3-django-tagging + python3-django-taggit python3-django-extensions python3-django-bootstrap3 python3-django-rest-framework From 23ee057ed1dcc29817fa0d4e5e0a48ba5ad804df Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Thu, 6 Feb 2025 22:22:10 -0500 Subject: [PATCH 026/199] additional errata sources and processing --- errata/admin.py | 26 ++ errata/apps.py | 21 ++ errata/managers.py | 22 ++ errata/migrations/0001_initial.py | 43 +++ errata/migrations/__init__.py | 0 errata/models.py | 102 +++++++ errata/serializers.py | 31 ++ errata/sources/distros/alma.py | 135 +++++++++ errata/sources/distros/arch.py | 132 +++++++++ errata/sources/distros/centos.py | 163 +++++++++++ errata/sources/distros/debian.py | 268 ++++++++++++++++++ errata/sources/distros/rocky.py | 176 ++++++++++++ errata/sources/distros/ubuntu.py | 203 +++++++++++++ errata/templates/errata/erratum_detail.html | 74 +++++ errata/templates/errata/erratum_list.html | 7 + errata/templates/errata/erratum_table.html | 29 ++ .../errata/erratumreference_list.html | 7 + .../errata/erratumreference_table.html | 19 ++ errata/urls.py | 27 ++ errata/utils.py | 134 +++++++++ errata/views.py | 138 +++++++++ hosts/forms.py | 2 +- hosts/models.py | 16 +- hosts/templates/hosts/host_delete.html | 6 +- hosts/templates/hosts/host_detail.html | 24 +- hosts/templates/hosts/host_table.html | 4 +- hosts/views.py | 30 +- modules/templates/modules/module_table.html | 2 +- operatingsystems/admin.py | 8 +- operatingsystems/fixtures/os.json | 30 +- operatingsystems/fixtures/osgroup.json | 14 +- operatingsystems/forms.py | 22 +- operatingsystems/managers.py | 2 +- operatingsystems/models.py | 24 +- operatingsystems/serializers.py | 14 +- operatingsystems/urls.py | 13 +- operatingsystems/views.py | 173 +++++------ packages/admin.py | 9 +- ..._delete_erratum_delete_erratumreference.py | 19 ++ packages/models.py | 38 +-- packages/serializers.py | 16 +- .../templates/packages/package_detail.html | 45 ++- .../packages/package_name_detail.html | 40 +++ .../templates/packages/package_name_list.html | 7 + .../packages/package_name_table.html | 17 ++ .../templates/packages/package_table.html | 22 +- packages/urls.py | 7 +- packages/utils.py | 238 +++------------- packages/views.py | 102 +++++-- patchman/settings.py | 2 + patchman/static/js/expandable-text.js | 8 + patchman/urls.py | 13 +- reports/models.py | 31 +- repos/templates/repos/mirror_table.html | 10 +- repos/templates/repos/repo_detail.html | 6 +- repos/views.py | 8 +- sbin/patchman | 43 ++- security/admin.py | 23 ++ security/apps.py | 21 ++ security/managers.py | 22 ++ security/migrations/0001_initial.py | 48 ++++ security/migrations/0002_alter_cve_options.py | 17 ++ security/migrations/__init__.py | 0 security/models.py | 162 +++++++++++ security/serializers.py | 32 +++ security/templates/security/cve_detail.html | 83 ++++++ security/templates/security/cve_list.html | 7 + security/templates/security/cve_table.html | 37 +++ security/templates/security/cwe_detail.html | 20 ++ security/templates/security/cwe_list.html | 7 + security/templates/security/cwe_table.html | 21 ++ .../templates/security/security_landing.html | 19 ++ security/urls.py | 29 ++ security/utils.py | 48 ++++ security/views.py | 140 +++++++++ util/__init__.py | 11 +- util/templates/base.html | 1 + util/templates/dashboard.html | 30 +- util/templates/navbar.html | 6 +- util/templates/objectlist.html | 2 +- util/views.py | 39 +-- 81 files changed, 3046 insertions(+), 601 deletions(-) create mode 100644 errata/admin.py create mode 100644 errata/apps.py create mode 100644 errata/managers.py create mode 100644 errata/migrations/0001_initial.py create mode 100644 errata/migrations/__init__.py create mode 100644 errata/models.py create mode 100644 errata/serializers.py create mode 100644 errata/sources/distros/alma.py create mode 100644 errata/sources/distros/arch.py create mode 100644 errata/sources/distros/centos.py create mode 100644 errata/sources/distros/debian.py create mode 100644 errata/sources/distros/rocky.py create mode 100644 errata/sources/distros/ubuntu.py create mode 100644 errata/templates/errata/erratum_detail.html create mode 100644 errata/templates/errata/erratum_list.html create mode 100644 errata/templates/errata/erratum_table.html create mode 100644 errata/templates/errata/erratumreference_list.html create mode 100644 errata/templates/errata/erratumreference_table.html create mode 100644 errata/urls.py create mode 100644 errata/utils.py create mode 100644 errata/views.py create mode 100644 packages/migrations/0002_delete_erratum_delete_erratumreference.py create mode 100644 packages/templates/packages/package_name_detail.html create mode 100644 packages/templates/packages/package_name_list.html create mode 100644 packages/templates/packages/package_name_table.html create mode 100644 patchman/static/js/expandable-text.js create mode 100644 security/admin.py create mode 100644 security/apps.py create mode 100644 security/managers.py create mode 100644 security/migrations/0001_initial.py create mode 100644 security/migrations/0002_alter_cve_options.py create mode 100644 security/migrations/__init__.py create mode 100644 security/models.py create mode 100644 security/serializers.py create mode 100644 security/templates/security/cve_detail.html create mode 100644 security/templates/security/cve_list.html create mode 100644 security/templates/security/cve_table.html create mode 100644 security/templates/security/cwe_detail.html create mode 100644 security/templates/security/cwe_list.html create mode 100644 security/templates/security/cwe_table.html create mode 100644 security/templates/security/security_landing.html create mode 100644 security/urls.py create mode 100644 security/utils.py create mode 100644 security/views.py diff --git a/errata/admin.py b/errata/admin.py new file mode 100644 index 00000000..20e7066f --- /dev/null +++ b/errata/admin.py @@ -0,0 +1,26 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.contrib import admin +from errata.models import Erratum, ErratumReference + + +class ErratumAdmin(admin.ModelAdmin): + readonly_fields = ('packages', 'references') + + +admin.site.register(Erratum, ErratumAdmin) +admin.site.register(ErratumReference) diff --git a/errata/apps.py b/errata/apps.py new file mode 100644 index 00000000..9411f035 --- /dev/null +++ b/errata/apps.py @@ -0,0 +1,21 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.apps import AppConfig + + +class ErrataConfig(AppConfig): + name = 'errata' diff --git a/errata/managers.py b/errata/managers.py new file mode 100644 index 00000000..e39147be --- /dev/null +++ b/errata/managers.py @@ -0,0 +1,22 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.db import models + + +class ErratumManager(models.Manager): + def get_queryset(self): + return super().get_queryset().select_related() diff --git a/errata/migrations/0001_initial.py b/errata/migrations/0001_initial.py new file mode 100644 index 00000000..85fe88b4 --- /dev/null +++ b/errata/migrations/0001_initial.py @@ -0,0 +1,43 @@ +# Generated by Django 4.2.18 on 2025-02-08 20:40 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('packages', '0002_delete_erratum_delete_erratumreference'), + ('operatingsystems', '0005_rename_osgroup_osrelease_rename_os_osvariant_and_more'), + ('security', '0001_initial'), + ] + + operations = [ + migrations.CreateModel( + name='ErratumReference', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('er_type', models.CharField(max_length=255)), + ('url', models.URLField(max_length=2000)), + ], + ), + migrations.CreateModel( + name='Erratum', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=255, unique=True)), + ('e_type', models.CharField(max_length=255)), + ('issue_date', models.DateTimeField()), + ('synopsis', models.CharField(max_length=255)), + ('cves', models.ManyToManyField(blank=True, to='security.cve')), + ('osreleases', models.ManyToManyField(blank=True, to='operatingsystems.osrelease')), + ('packages', models.ManyToManyField(blank=True, to='packages.package')), + ('references', models.ManyToManyField(blank=True, to='errata.erratumreference')), + ], + options={ + 'verbose_name': 'Erratum', + 'verbose_name_plural': 'Errata', + }, + ), + ] diff --git a/errata/migrations/__init__.py b/errata/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/errata/models.py b/errata/models.py new file mode 100644 index 00000000..89db7159 --- /dev/null +++ b/errata/models.py @@ -0,0 +1,102 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.db import models +from django.urls import reverse +from django.db import transaction, IntegrityError + +from packages.models import Package, PackageUpdate +from errata.managers import ErratumManager +from security.models import CVE +from security.utils import get_or_create_cve +from patchman.signals import error_message + + +class ErratumReference(models.Model): + + er_type = models.CharField(max_length=255) + url = models.URLField(max_length=2000) + + def __str__(self): + return self.url + + +class Erratum(models.Model): + + name = models.CharField(max_length=255, unique=True) + e_type = models.CharField(max_length=255) + issue_date = models.DateTimeField() + synopsis = models.CharField(max_length=255) + packages = models.ManyToManyField(Package, blank=True) + from operatingsystems.models import OSRelease + osreleases = models.ManyToManyField(OSRelease, blank=True) + cves = models.ManyToManyField(CVE, blank=True) + references = models.ManyToManyField(ErratumReference, blank=True) + + objects = ErratumManager() + + class Meta: + verbose_name = 'Erratum' + verbose_name_plural = 'Errata' + + def __str__(self): + text = f'{self.name!s} ({self.e_type}), {self.cves.count()} related CVEs, ' + text += f'affecting {self.packages.count()} packages and {self.osreleases.count()} OS Releases' + return text + + def get_absolute_url(self): + return reverse('errata:erratum_detail', args=[self.name]) + + def scan_for_security_updates(self): + if self.e_type == 'security': + for package in self.packages.all(): + affected_updates = PackageUpdate.objects.filter( + newpackage=package, + security=False + ) + for affected_update in affected_updates: + if not affected_update.security: + affected_update.security = True + try: + with transaction.atomic(): + affected_update.save() + except IntegrityError as e: + error_message.send(sender=None, text=e) + # a version of this update already exists that is + # marked as a security update, so delete this one + affected_update.delete() + + def add_packages(self, packages): + for package in packages: + self.packages.add(package) + + def add_cve(self, cve_id): + """ Add a CVE to an Erratum object + """ + self.cves.add(get_or_create_cve(cve_id)) + + def add_reference(self, e_type, url): + """ Add a reference to an Erratum object + """ + from errata.utils import fixup_erratum_reference + reference = fixup_erratum_reference({'er_type': e_type, 'url': url}) + if reference: + with transaction.atomic(): + er, created = ErratumReference.objects.get_or_create( + er_type=reference.get('er_type'), + url=reference.get('url'), + ) + self.references.add(er) diff --git a/errata/serializers.py b/errata/serializers.py new file mode 100644 index 00000000..274326cc --- /dev/null +++ b/errata/serializers.py @@ -0,0 +1,31 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from rest_framework import serializers + +from errata.models import Erratum, ErratumReference + + +class ErratumSerializer(serializers.HyperlinkedModelSerializer): + class Meta: + model = Erratum + fields = ('id', 'name', 'e_type', 'issue_date', 'synopsis', 'cves', 'releases', 'references') + + +class ErratumReferenceSerializer(serializers.HyperlinkedModelSerializer): + class Meta: + model = ErratumReference + fields = ('id', 'er_type', 'url') diff --git a/errata/sources/distros/alma.py b/errata/sources/distros/alma.py new file mode 100644 index 00000000..6d7d6ed0 --- /dev/null +++ b/errata/sources/distros/alma.py @@ -0,0 +1,135 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import json + +from django.db import transaction +from django.conf import settings + +from packages.models import Package +from packages.utils import get_or_create_package, parse_package_string +from util import get_url, download_url, has_setting_of_type +from patchman.signals import progress_info_s, progress_update_s + + +def update_alma_errata(): + """ Update Alma Linux advisories from errata.almalinux.org: + https://errata.almalinux.org/8/errata.full.json + https://errata.almalinux.org/9/errata.full.json + and process advisories + """ + default_alma_releases = [8, 9] + if has_setting_of_type('ALMA_RELEASES', list): + alma_releases = settings.ALMA_RELEASES + else: + alma_releases = default_alma_releases + for release in alma_releases: + advisories = download_alma_advisories(release) + process_alma_errata(release, advisories) + + +def download_alma_advisories(release): + """ Download Alma Linux advisories + """ + alma_errata_url = f'https://errata.almalinux.org/{release}/errata.full.json' + headers = {'Accept': 'application/json', 'Cache-Control': 'no-cache, no-tranform'} + res = get_url(alma_errata_url, headers=headers) + data = download_url(res, 'Downloading Alma Linux Errata:') + advisories = json.loads(data).get('data') + return advisories + + +def process_alma_errata(release, advisories): + """ Process Alma Linux Errata + """ + from errata.utils import get_or_create_erratum + elen = len(advisories) + ptext = f'Processing {elen} Errata:' + progress_info_s.send(sender=None, ptext=ptext, plen=elen) + for i, advisory in enumerate(advisories): + progress_update_s.send(sender=None, index=i + 1) + erratum_name = advisory.get('id') + issue_date = advisory.get('issued_date') + synopsis = advisory.get('title') + e_type = advisory.get('type') + e, created = get_or_create_erratum( + name=erratum_name, + e_type=e_type, + issue_date=issue_date, + synopsis=synopsis, + ) + add_alma_erratum_osreleases(e, release) + add_alma_erratum_references(e, advisory) + add_alma_erratum_packages(e, advisory) + add_alma_erratum_modules(e, advisory) + + +def add_alma_erratum_osreleases(e, release): + """ Update OS Release for Alma Linux errata + """ + from operatingsystems.models import OSRelease + with transaction.atomic(): + osrelease, created = OSRelease.objects.get_or_create(name=f'Alma Linux {release}') + e.osreleases.add(osrelease) + e.save() + + +def add_alma_erratum_references(e, advisory): + """ Add references for Alma Linux errata + """ + references = advisory.get('references') + for reference in references: + ref_id = reference.get('id') + er_type = reference.get('type') + er_url = reference.get('href') + if er_type == 'cve': + e.add_cve(ref_id) + continue + if er_type == 'self': + er_type = ref_id.split('-')[0].upper() + e.add_reference(er_type, er_url) + + +def add_alma_erratum_packages(e, advisory): + """ Parse and add packages for Alma Linux errata + """ + packages = advisory.get('packages') + for package in packages: + package_name = package.get('filename') + if package_name: + name, epoch, ver, rel, dist, arch = parse_package_string(package_name) + p_type = Package.RPM + pkg = get_or_create_package(name, epoch, ver, rel, arch, p_type) + e.packages.add(pkg) + e.save() + + +def add_alma_erratum_modules(e, advisory): + """ Parse and add modules for Alma Linux errata + """ + from modules.utils import get_matching_modules + modules = advisory.get('modules') + for module in modules: + name = module.get('name') + arch = module.get('arch') + context = module.get('context') + stream = module.get('stream') + version = module.get('version') + matching_modules = get_matching_modules(name, stream, version, context, arch) + for match in matching_modules: + for package in match.packages.all(): + e.packages.add(package) + e.save() diff --git a/errata/sources/distros/arch.py b/errata/sources/distros/arch.py new file mode 100644 index 00000000..4c8f6a4b --- /dev/null +++ b/errata/sources/distros/arch.py @@ -0,0 +1,132 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import json + +from operatingsystems.models import OSRelease, OSVariant +from packages.models import Package +from packages.utils import find_evr, get_matching_packages +from util import get_url, download_url + + +def update_arch_errata(): + """ Update Arch Linux Errata from the following sources: + https://security.archlinux.org/advisories.json + """ + add_arch_linux_osrelease() + advisories = download_arch_errata() + parse_arch_errata(advisories) + + +def download_arch_errata(): + """ Download Arch Linux Errata Advisories + https://security.archlinux.org/advisories.json + """ + res = get_url('https://security.archlinux.org/advisories.json') + advisories = download_url(res, 'Downloading Arch Linux Advisories:') + return json.loads(advisories) + + +def parse_arch_errata(advisories): + """ Parse Arch Linux Errata Advisories + """ + from errata.utils import get_or_create_erratum + osrelease = OSRelease.objects.get(name='Arch Linux') + for advisory in advisories: + name = advisory.get('name') + issue_date = advisory.get('date') + package = advisory.get('package') + issue_type = advisory.get('type') + synopsis = f'{package} - {issue_type}' + e, created = get_or_create_erratum( + name=name, + e_type='security', + issue_date=issue_date, + synopsis=synopsis, + ) + e.osreleases.add(osrelease) + add_arch_erratum_references(e, advisory) + add_arch_erratum_packages(e, advisory) + + +def add_arch_linux_osrelease(): + """ Add Arch Linux OSRelease and link existing OSVariants + """ + osrelease, created = OSRelease.objects.get_or_create(name='Arch Linux') + osvariants = OSVariant.objects.filter(name__startswith='Arch Linux') + for osvariant in osvariants: + osvariant.osrelease = osrelease + osvariant.save() + + +def add_arch_erratum_references(e, advisory): + """ Add Arch Linux Erratum References + """ + reference = advisory.get('reference') + e.add_reference('Mailing List', reference) + asa_id = advisory.get('name') + url = f'https://security.archlinux.org/advisory/{asa_id}' + e.add_reference('ASA', url) + raw_url = f'{url}/raw' + res = get_url(raw_url) + raw_data = download_url(res, f'Downloading Arch Linux Erratum Reference: {raw_url}') + parse_arch_erratum_raw(e, raw_data.decode()) + + +def parse_arch_erratum_raw(e, data): + """ Parse Arch Linux Erratum Raw Data for CVEs and References + """ + in_reference_section = False + for line in data.splitlines(): + if line.startswith('CVE-ID'): + cve_ids = line.split(':')[1].strip().split() + for cve_id in cve_ids: + e.add_cve(cve_id) + elif line.startswith('References'): + in_reference_section = True + continue + if in_reference_section: + if line.startswith('='): + continue + else: + reference = line.strip() + if reference: + e.add_reference('Link', reference) + + +def add_arch_erratum_packages(e, advisory): + """ Add Arch Linux Erratum Packages + """ + group_id = advisory.get('group') + group_url = f'https://security.archlinux.org/group/{group_id}.json' + res = get_url(group_url) + data = download_url(res) + group = json.loads(data) + packages = group.get('packages') + affected = group.get('affected') + epoch, version, release = find_evr(affected) + package_type = Package.ARCH + for package in packages: + matching_packages = get_matching_packages(package, epoch, version, release, package_type) + if matching_packages: + for match in matching_packages: + e.packages.add(match) + references = group.get('references') + for reference in references: + e.add_reference('Link', reference) + cve_ids = group.get('issues') + for cve_id in cve_ids: + e.add_cve(cve_id) diff --git a/errata/sources/distros/centos.py b/errata/sources/distros/centos.py new file mode 100644 index 00000000..569f5df3 --- /dev/null +++ b/errata/sources/distros/centos.py @@ -0,0 +1,163 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import re +from lxml import etree + +from django.conf import settings +from django.db import transaction + +from packages.models import Package +from packages.utils import parse_package_string, get_or_create_package +from patchman.signals import error_message, progress_info_s, progress_update_s +from util import bunzip2, get_url, download_url, get_sha1, has_setting_of_type + + +def update_centos_errata(): + """ Update CentOS errata from https://cefs.steve-meier.de/ + """ + data = download_centos_errata_checksum() + expected_checksum = parse_centos_errata_checksum(data) + data = download_centos_errata() + actual_checksum = get_sha1(data) + if actual_checksum != expected_checksum: + e = 'CEFS checksum mismatch, skipping CentOS errata parsing\n' + e += f'{actual_checksum} (actual) != {expected_checksum} (expected)' + error_message.send(sender=None, text=e) + else: + if data: + parse_centos_errata(bunzip2(data)) + + +def download_centos_errata_checksum(): + """ Download CentOS errata checksum from https://cefs.steve-meier.de/ + """ + res = get_url('https://cefs.steve-meier.de/errata.latest.sha1') + return download_url(res, 'Downloading CentOS Errata Checksum:') + + +def download_centos_errata(): + """ Download CentOS errata from https://cefs.steve-meier.de/ + """ + res = get_url('https://cefs.steve-meier.de/errata.latest.xml.bz2') + return download_url(res, 'Downloading CentOS Errata:') + + +def parse_centos_errata_checksum(data): + """ Parse the errata checksum and return the bz2 checksum + """ + for line in data.decode('utf-8').splitlines(): + if line.endswith('errata.latest.xml.bz2'): + return line.split()[0] + + +def parse_centos_errata(data): + """ Parse CentOS errata from https://cefs.steve-meier.de/ + """ + result = etree.XML(data) + errata_xml = result.findall('*') + elen = len(errata_xml) + ptext = f'Processing {elen!s} Errata:' + progress_info_s.send(sender=None, ptext=ptext, plen=elen) + for i, child in enumerate(errata_xml): + progress_update_s.send(sender=None, index=i + 1) + releases = get_centos_erratum_releases(child.findall('os_release')) + if not accepted_centos_release(releases): + continue + e = parse_centos_errata_tag(child.tag, child.attrib) + if e is not None: + parse_centos_errata_children(e, child.getchildren()) + + +def parse_centos_errata_tag(name, attribs): + """ Parse all tags that contain errata. If the erratum already exists, + we assume that it already has all refs, packages, releases and arches. + """ + from errata.utils import get_or_create_erratum + e = None + if name.startswith('CE'): + issue_date = attribs['issue_date'] + references = attribs['references'] + synopsis = attribs['synopsis'] + if name.startswith('CEBA'): + e_type = 'bugfix' + elif name.startswith('CESA'): + e_type = 'security' + elif name.startswith('CEEA'): + e_type = 'enhancement' + e, created = get_or_create_erratum( + name=name.replace('--', ':'), + e_type=e_type, + issue_date=issue_date, + synopsis=synopsis, + ) + add_centos_erratum_references(e, references) + return e + + +def add_centos_erratum_references(e, references): + """ Add references for CentOS errata + """ + for reference in references.split(' '): + e.add_reference('Link', reference) + + +def parse_centos_errata_children(e, children): + """ Parse errata children to obtain architecture, release and packages + """ + from operatingsystems.models import OSRelease + for c in children: + if c.tag == 'os_arch': + pass + elif c.tag == 'os_release': + if accepted_centos_release([c.text]): + osrelease_name = f'CentOS {c.text}' + with transaction.atomic(): + osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name) + e.osreleases.add(osrelease) + elif c.tag == 'packages': + name, epoch, ver, rel, dist, arch = parse_package_string(c.text) + match = re.match(r'.*el([0-9]+).*', rel) + if match: + release = match.group(1) + if accepted_centos_release([release]): + p_type = Package.RPM + pkg = get_or_create_package(name, epoch, ver, rel, arch, p_type) + e.packages.add(pkg) + + +def get_centos_erratum_releases(releases_xml): + """ Collect the releases a given erratum pertains to + """ + releases = set() + for release in releases_xml: + releases.add(int(release.text)) + return releases + + +def accepted_centos_release(releases): + """ Check if we accept the releases that the erratum pertains to + If any release is accepted we return True, else False + """ + if has_setting_of_type('MIN_CENTOS_RELEASE', int): + min_release = settings.MIN_CENTOS_RELEASE + else: + min_release = 7 + acceptable_release = False + for release in releases: + if int(release) >= min_release: + acceptable_release = True + return acceptable_release diff --git a/errata/sources/distros/debian.py b/errata/sources/distros/debian.py new file mode 100644 index 00000000..77f184b5 --- /dev/null +++ b/errata/sources/distros/debian.py @@ -0,0 +1,268 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import csv +import re +from datetime import datetime +from debian.deb822 import Dsc +from io import StringIO + +from django.conf import settings + +from operatingsystems.models import OSRelease, OSVariant +from packages.models import Package +from packages.utils import get_or_create_package, find_evr +from util import get_url, download_url, has_setting_of_type +from patchman.signals import progress_info_s, progress_update_s + + +def update_debian_errata(): + """ Update Debian errata using: + https://salsa.debian.org/security-tracker-team/security-tracker/raw/master/data/DSA/list + https://salsa.debian.org/security-tracker-team/security-tracker/raw/master/data/DSA/list + """ + codenames = retrieve_debian_codenames() + create_debian_os_releases(codenames) + dsas = download_debian_dsa_advisories() + dlas = download_debian_dla_advisories() + advisories = dsas + dlas + process_debian_errata(advisories) + + +def download_debian_dsa_advisories(): + """ Download the current Debian DLA file + """ + debian_dsa_url = 'https://salsa.debian.org/security-tracker-team/security-tracker/raw/master/data/DSA/list' + res = get_url(debian_dsa_url) + data = download_url(res, 'Downloading Debian DSAs') + return data.decode() + + +def download_debian_dla_advisories(): + """ Download the current Debian DSA file + """ + debian_dsa_url = 'https://salsa.debian.org/security-tracker-team/security-tracker/raw/master/data/DLA/list' + res = get_url(debian_dsa_url) + data = download_url(res, 'Downloading Debian DLAs') + return data.decode() + + +def process_debian_errata(advisories): + """ Parse a Debian DSA/DLA file for security advisories + """ + distro_pattern = re.compile(r'^\t\[(.+?)\] - .*') + title_pattern = re.compile(r'^\[(.+?)\] (.+?) (.+?)[ ]+[-]+ (.*)') + accepted_codenames = get_accepted_debian_codenames() + errata = [] + e = {'packages': {}, 'cve_ids': [], 'releases': []} + for line in advisories.splitlines(): + if line.startswith('['): + errata = add_errata_by_codename(errata, e, accepted_codenames) + e = {'packages': {}, 'cve_ids': [], 'releases': []} + match = re.match(title_pattern, line) + if match: + e = parse_debian_erratum_advisory(e, match) + elif line.startswith('\t{'): + for cve_id in line.strip('\t{}').split(): + e['cve_ids'].append(cve_id) + elif line.startswith('\t['): + match = re.match(distro_pattern, line) + if match: + release = match.group(1) + e['releases'].append(release) + if not e.get('packages').get(release): + e['packages'][release] = [] + e['packages'][release].append(parse_debian_erratum_packages(line, accepted_codenames)) + # add the last one + errata = add_errata_by_codename(errata, e, accepted_codenames) + create_debian_errata(errata, accepted_codenames) + + +def add_errata_by_codename(errata, e, accepted_codenames): + """ Get errata by codename and add to errata + """ + if e: + for release in e.get('releases'): + if release in accepted_codenames: + errata.append(e) + return errata + + +def parse_debian_erratum_advisory(e, match): + """ Parse the initial details for an erratum in a DSA/DLA file + Returns the updated dictionary + """ + date = match.group(1) + issue_date = int(datetime.strptime(date, '%d %b %Y').strftime('%s')) + erratum_name = match.group(2) + synopsis = match.group(4) + e['name'] = erratum_name + e['issue_date'] = issue_date + e['synopsis'] = synopsis + return e + + +def create_debian_errata(errata, accepted_codenames): + from errata.utils import get_or_create_erratum + elen = len(errata) + text = f'Processing {elen} Debian Errata:' + progress_info_s.send(sender=None, ptext=text, plen=elen) + for i, erratum in enumerate(errata): + progress_update_s.send(sender=None, index=i + 1) + erratum_name = erratum.get('name') + e, created = get_or_create_erratum( + name=erratum_name, + e_type='security', + issue_date=erratum.get('issue_date'), + synopsis=erratum.get('synopsis'), + ) + e.add_reference('Link', f'https://security-tracker.debian.org/tracker/{erratum_name}') + for cve_id in erratum.get('cve_ids'): + e.add_cve(cve_id) + for codename, packages in erratum.get('packages').items(): + if codename not in accepted_codenames: + continue + osrelease = OSRelease.objects.get(codename=codename) + e.osreleases.add(osrelease) + for package in packages: + process_debian_erratum_affected_packages(e, package) + + +def parse_debian_erratum_packages(line, accepted_codenames): + """ Parse the codename and source packages from a DSA/DLA file + Return the DSC object + """ + distro_package_pattern = re.compile(r'^\t\[(.+?)\] - (.+?) (.*)') + match = re.match(distro_package_pattern, line) + if match: + codename = match.group(1) + if codename in accepted_codenames: + source_package = match.group(2) + source_version = match.group(3) + return download_debian_package_dsc(codename, source_package, source_version) + + +def download_debian_package_dsc(codename, package, version): + """ Download a DSC file for the given source package + From this we can determine which packages are built from + a given source package + """ + dsc_pattern = re.compile(r'.*"(http.*dsc)"') + source_url = f'https://packages.debian.org/source/{codename}/{package}' + res = get_url(source_url) + data = download_url(res, f'debian src {package}-{version}', 60) + dscs = re.findall(dsc_pattern, data.decode()) + if dscs: + dsc_url = dscs[0] + res = get_url(dsc_url) + data = download_url(res, f'debian dsc {package}-{version}', 60) + return Dsc(data.decode()) + + +def get_accepted_debian_codenames(): + """ Get acceptable Debian OS codenames + Can be overridden by specifying DEBIAN_CODENAMES in settings + """ + default_codenames = ['bookworm', 'bullseye'] + if has_setting_of_type('DEBIAN_CODENAMES', list): + accepted_codenames = settings.DEBIAN_CODENAMES + else: + accepted_codenames = default_codenames + return accepted_codenames + + +def retrieve_debian_codenames(): + """ Returns the codename to version mapping + """ + distro_info_url = 'https://debian.pages.debian.net/distro-info-data/debian.csv' + res = get_url(distro_info_url) + debian_csv = download_url(res, 'Downloading Debian distro info:') + reader = csv.DictReader(StringIO(debian_csv.decode())) + codename_to_version = {} + for row in reader: + version = row.get('version') + series = row.get('series') + codename_to_version[series] = version + return codename_to_version + + +def create_debian_os_releases(codename_to_version): + """ Create OSReleases for acceptable Debian codenames + """ + accepted_codenames = get_accepted_debian_codenames() + for codename, version in codename_to_version.items(): + if codename in accepted_codenames: + osrelease_name = f'Debian {version}' + osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name, codename=codename) + for osvariant in OSVariant.objects.filter(name__startswith=osrelease_name): + osvariant.osrelease = osrelease + osvariant.save() + + +def process_debian_erratum_affected_packages(e, dsc): + """ Process packages affected by Debian errata + """ + if not dsc: + return + epoch, ver, rel = find_evr(str(dsc.get_version())) + package_list = dsc.get('package-list') + for line in package_list.splitlines(): + if not line: + continue + line_parts = line.split() + if line_parts[1] != 'deb': + continue + name = line_parts[0] + arches = process_debian_dsc_arches(line_parts[4]) + for arch in arches: + package = get_or_create_package(name, epoch, ver, rel, arch, Package.DEB) + e.packages.add(package) + + +def process_debian_dsc_arches(arches): + """ Process arches for dsc files + Return a list of arches for a given package in a dsc file + """ + arches = arches.replace('arch=', '') + accepted_arches = [] + # https://www.debian.org/ports/ + official_ports = [ + 'amd64', + 'arm64', + 'armel', + 'armhf', + 'i386', + 'mips64el', + 'ppc64el', + 'riscv64', + 's390x', + 'all', # architecture-independent packages + ] + for arch in arches.split(','): + if arch == 'any': + return official_ports + elif arch in official_ports: + accepted_arches.append(arch) + continue + elif arch.startswith('any-'): + real_arch = arch.split('-')[1] + if real_arch in official_ports: + accepted_arches.append(real_arch) + continue + elif arch.endswith('-any'): + if arch.startswith('linux'): + return official_ports + return accepted_arches diff --git a/errata/sources/distros/rocky.py b/errata/sources/distros/rocky.py new file mode 100644 index 00000000..4927c4e1 --- /dev/null +++ b/errata/sources/distros/rocky.py @@ -0,0 +1,176 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import json + +from django.db import transaction + +from arch.models import MachineArchitecture +from packages.models import Package +from packages.utils import parse_package_string, get_or_create_package +from util import get_url, download_url, info_message, error_message +from patchman.signals import progress_info_s, progress_update_s + + +def update_rocky_errata(): + """ Update Rocky Linux errata + """ + rocky_errata_api_host = 'https://apollo.build.resf.org' + rocky_errata_api_url = '/api/v3/' + if check_rocky_errata_endpoint_health(rocky_errata_api_host): + advisories = download_rocky_advisories(rocky_errata_api_host, rocky_errata_api_url) + process_rocky_errata(advisories) + + +def check_rocky_errata_endpoint_health(rocky_errata_api_host): + """ Check Rocky Linux errata endpoint health + """ + rocky_errata_healthcheck_path = '/_/healthz' + rocky_errata_healthcheck_url = rocky_errata_api_host + rocky_errata_healthcheck_path + headers = {'Accept': 'application/json'} + res = get_url(rocky_errata_healthcheck_url, headers=headers) + data = download_url(res, 'Rocky Linux Errata API healthcheck') + try: + health = json.loads(data) + if health.get('status') == 'ok': + s = f'Rocky Linux Errata API healthcheck OK: {rocky_errata_healthcheck_url}' + info_message.send(sender=None, text=s) + return True + else: + s = f'Rocky Linux Errata API healthcheck FAILED: {rocky_errata_healthcheck_url}' + error_message.send(sender=None, text=s) + return False + except Exception as e: + s = f'Rocky Linux Errata API healthcheck exception occured: {rocky_errata_healthcheck_url}\n' + s += str(e) + error_message.send(sender=None, text=s) + return False + + +def download_rocky_advisories(rocky_errata_api_host, rocky_errata_api_url): + """ Download Rocky Linux advisories and return the list + """ + rocky_errata_advisories_url = rocky_errata_api_host + rocky_errata_api_url + 'advisories/' + headers = {'Accept': 'application/json'} + page = 1 + pages = None + advisories = [] + params = {'page': 1, 'size': 100} + while True: + res = get_url(rocky_errata_advisories_url, headers=headers, params=params) + data = download_url(res, f'Rocky Linux Advisories {page}{"/"+pages if pages else ""}') + advisories_dict = json.loads(data) + advisories += advisories_dict.get('advisories') + links = advisories_dict.get('links') + if page == 1: + last_link = links.get('last') + pages = last_link.split('=')[-1] + next_link = links.get('next') + if next_link: + rocky_errata_advisories_url = rocky_errata_api_host + next_link + params = {} + page += 1 + else: + break + return advisories + + +def process_rocky_errata(advisories): + """ Process Rocky Linux errata + """ + from errata.utils import get_or_create_erratum + elen = len(advisories) + ptext = f'Processing {elen} Errata:' + progress_info_s.send(sender=None, ptext=ptext, plen=elen) + for i, advisory in enumerate(advisories): + progress_update_s.send(sender=None, index=i + 1) + erratum_name = advisory.get('name') + e_type = advisory.get('kind').lower().replace(' ', '') + issue_date = advisory.get('published_at') + synopsis = advisory.get('synopsis') + e, created = get_or_create_erratum( + name=erratum_name, + e_type=e_type, + issue_date=issue_date, + synopsis=synopsis, + ) + add_rocky_erratum_references(e, advisory) + add_rocky_erratum_oses(e, advisory) + add_rocky_erratum_packages(e, advisory) + + +def add_rocky_erratum_references(e, advisory): + """ Add Rocky Linux errata references + """ + advisory_cves = advisory.get('cves') + for a_cve in advisory_cves: + cve_id = a_cve.get('cve') + e.add_cve(cve_id) + fixes = advisory.get('fixes') + for fix in fixes: + url = fix.get('source') + e.add_reference('Bug Report', url) + + +def add_rocky_erratum_oses(e, advisory): + """ Update OS Variant, OS Release and MachineArch for Rocky Linux errata + """ + affected_oses = advisory.get('affected_products') + from operatingsystems.models import OSVariant, OSRelease + for affected_os in affected_oses: + arch = affected_os.get('arch') + variant = affected_os.get('variant') + major_version = affected_os.get('major_version') + osrelease_name = f'{variant} {major_version}' + with transaction.atomic(): + osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name) + osvariant_name = affected_os.get('name').replace(' (Legacy)', '') + with transaction.atomic(): + m_arch, created = MachineArchitecture.objects.get_or_create(name=arch) + with transaction.atomic(): + osvariant, created = OSVariant.objects.get_or_create(name=osvariant_name, arch=m_arch) + osvariant.osrelease = osrelease + osvariant.save() + e.osreleases.add(osrelease) + e.save() + + +def add_rocky_erratum_packages(e, advisory): + """ Parse and add packages for Rocky Linux errata + """ + from modules.utils import get_matching_modules + packages = advisory.get('packages') + for package in packages: + package_name = package.get('nevra') + if package_name: + name, epoch, ver, rel, dist, arch = parse_package_string(package_name) + p_type = Package.RPM + pkg = get_or_create_package(name, epoch, ver, rel, arch, p_type) + e.packages.add(pkg) + module_name = package.get('module_name') + module_context = package.get('module_context') + module_stream = package.get('module_stream') + module_version = package.get('module_version') + if module_name and module_context and module_stream and module_version: + matching_modules = get_matching_modules( + module_name, + module_stream, + module_version, + module_context, + arch) + for match in matching_modules: + match.packages.add(pkg) + e.save() diff --git a/errata/sources/distros/ubuntu.py b/errata/sources/distros/ubuntu.py new file mode 100644 index 00000000..3dbc97d2 --- /dev/null +++ b/errata/sources/distros/ubuntu.py @@ -0,0 +1,203 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import csv +import os +import json +from io import StringIO +from urllib.parse import urlparse + +from django.conf import settings + +from operatingsystems.models import OSRelease, OSVariant +from packages.models import Package, PackageName +from packages.utils import get_or_create_package, parse_package_string, find_evr +from patchman.signals import progress_info_s, progress_update_s, error_message +from util import get_url, download_url, get_sha256, bunzip2, has_setting_of_type + + +def update_ubuntu_errata(): + """ Update Ubuntu errata + """ + codenames = retrieve_ubuntu_codenames() + create_ubuntu_os_releases(codenames) + data = download_ubuntu_usn_db() + if data: + expected_checksum = download_ubuntu_usn_db_checksum() + actual_checksum = get_sha256(data) + if actual_checksum == expected_checksum: + extracted = bunzip2(data).decode() + parse_usn_data(extracted) + else: + e = 'Ubuntu USN DB checksum mismatch, skipping Ubuntu errata parsing\n' + e += f'{actual_checksum} (actual) != {expected_checksum} (expected)' + error_message.send(sender=None, text=e) + + +def download_ubuntu_usn_db(): + """ Download the Ubuntu USN database + """ + ubuntu_usn_db_json_url = 'https://usn.ubuntu.com/usn-db/database.json.bz2' + res = get_url(ubuntu_usn_db_json_url) + return download_url(res, 'Downloading Ubuntu Errata:') + + +def download_ubuntu_usn_db_checksum(): + """ Download the Ubuntu USN database checksum + """ + ubuntu_usn_db_checksum_url = 'https://usn.ubuntu.com/usn-db/database.json.bz2.sha256' + res = get_url(ubuntu_usn_db_checksum_url) + return download_url(res, 'Downloading Ubuntu Errata Checksum:').decode().split()[0] + + +def parse_usn_data(data): + """ Parse the Ubuntu USN data + """ + from errata.utils import get_or_create_erratum + advisories = json.loads(data) + accepted_releases = get_accepted_ubuntu_codenames() + elen = len(advisories) + ptext = f'Processing {elen} Errata:' + progress_info_s.send(sender=None, ptext=ptext, plen=elen) + for i, (usn_id, advisory) in enumerate(advisories.items()): + progress_update_s.send(sender=None, index=i + 1) + affected_releases = advisory.get('releases', {}).keys() + if not release_is_affected(affected_releases, accepted_releases): + continue + name = f'USN-{usn_id}' + issue_date = int(advisory.get('timestamp')) + synopsis = advisory.get('title') + e, created = get_or_create_erratum( + name=name, + e_type='security', + issue_date=issue_date, + synopsis=synopsis, + ) + add_ubuntu_erratum_osreleases(e, affected_releases, accepted_releases) + add_ubuntu_erratum_references(e, usn_id, advisory) + add_ubuntu_erratum_packages(e, advisory) + + +def add_ubuntu_erratum_osreleases(e, affected_releases, accepted_releases): + """ Add Ubuntu erratum OSReleases + """ + for release in affected_releases: + if release in accepted_releases: + osrelease = OSRelease.objects.get(codename=release) + e.osreleases.add(osrelease) + e.save() + + +def release_is_affected(affected_releases, accepted_releases): + """ Check if release is affected by the erratum + """ + for release in affected_releases: + if release in accepted_releases: + return True + return False + + +def add_ubuntu_erratum_references(e, usn_id, advisory): + """ Add Ubuntu erratum references and CVEs + """ + usn_url = f'https://ubuntu.com/security/notices/USN-{usn_id}' + e.add_reference('USN', usn_url) + cve_ids = advisory.get('cves') + if cve_ids: + for cve_id in cve_ids: + if cve_id.startswith('CVE'): + e.add_cve(cve_id) + else: + e.add_reference('Link', cve_id) + + +def add_ubuntu_erratum_packages(e, advisory): + """ Add Ubuntu erratum packages + """ + affected_releases = advisory.get('releases') + package_names = PackageName.objects.all() + p_type = Package.DEB + for release, packages in affected_releases.items(): + if release in get_accepted_ubuntu_codenames(): + arches = packages.get('archs') + if arches: + for arch, urls in arches.items(): + for url in urls.get('urls'): + path = urlparse(url).path + package_name = os.path.basename(path) + if package_name.endswith('.deb'): + name, epoch, ver, rel, dist, arch = parse_package_string(package_name) + pkg = get_or_create_package(name, epoch, ver, rel, arch, p_type) + e.packages.add(pkg) + else: + binaries = packages.get('binaries') + allbinaries = packages.get('allbinaries') + for package_name, package_data in (binaries | allbinaries).items(): + epoch, ver, rel = find_evr(package_data.get('version')) + try: + p_name = package_names.get(name=package_name) + except PackageName.DoesNotExist: + continue + matching_packages = Package.objects.filter( + name=p_name, + epoch=epoch, + version=ver, + release=rel, + packagetype=p_type, + ) + for package in matching_packages: + e.packages.add(package) + e.save() + + +def get_accepted_ubuntu_codenames(): + """ Get acceptable Ubuntu OS codenames + Can be overridden by specifying UBUNTU_CODENAMES in settings + """ + default_codenames = ['bionic', 'focal', 'jammy', 'noble'] + if has_setting_of_type('UBUNTU_CODENAMES', list): + accepted_codenames = settings.UBUNTU_CODENAMES + else: + accepted_codenames = default_codenames + return accepted_codenames + + +def retrieve_ubuntu_codenames(): + """ Returns the codename to version mapping + """ + distro_info_url = 'https://debian.pages.debian.net/distro-info-data/ubuntu.csv' + res = get_url(distro_info_url) + ubuntu_csv = download_url(res, 'Downloading Ubuntu distro info:') + reader = csv.DictReader(StringIO(ubuntu_csv.decode())) + codename_to_version = {} + for row in reader: + version = row.get('version') + series = row.get('series') + codename_to_version[series] = version + return codename_to_version + + +def create_ubuntu_os_releases(codename_to_version): + """ Create OSReleases for acceptable Ubuntu codenames + """ + accepted_codenames = get_accepted_ubuntu_codenames() + for codename, version in codename_to_version.items(): + if codename in accepted_codenames: + osrelease_name = f'Ubuntu {version}' + osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name, codename=codename) + for osvariant in OSVariant.objects.filter(name__startswith=osrelease_name.replace(' LTS', '')): + osvariant.osrelease = osrelease + osvariant.save() diff --git a/errata/templates/errata/erratum_detail.html b/errata/templates/errata/erratum_detail.html new file mode 100644 index 00000000..5394caea --- /dev/null +++ b/errata/templates/errata/erratum_detail.html @@ -0,0 +1,74 @@ +{% extends "base.html" %} + +{% block page_title %}Erratum - {{ erratum }} {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Errata
  • {{ erratum }}
  • {% endblock %} + +{% block content_title %} Erratum - {{ erratum }} {% endblock %} + +{% block content %} + + + +
    +
    +
    + + + + + + + + + + + + + + + + + + +
    Name {{ erratum.name }}
    Type {{ erratum.e_type }}
    Published Date{{ erratum.issue_date }}
    Synopsis {{ erratum.synopsis }}
    Packages Affected {{ erratum.packages.count }}
    OS Releases Affected + {% for osrelease in erratum.osreleases.all %} + {{ osrelease }}
    + {% endfor %} +
    CVEs + + {% for cve in erratum.cves.all %} + + {% endfor %} +
    {{ cve }}
    +
    References + + {% for reference in erratum.references.all %} + + + + + {% endfor %} +
    {{ reference.er_type }}{{reference.url}}
    +
    +
    +
    +
    +
    +
    +
    + {% for package in erratum.packages.select_related %} + + {{ package }} + + {% endfor %} +
    +
    +
    +
    +
    + +{% endblock %} diff --git a/errata/templates/errata/erratum_list.html b/errata/templates/errata/erratum_list.html new file mode 100644 index 00000000..ef732386 --- /dev/null +++ b/errata/templates/errata/erratum_list.html @@ -0,0 +1,7 @@ +{% extends "objectlist.html" %} + +{% block page_title %}Errata{% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Errata
  • {% endblock %} + +{% block content_title %} Errata {% endblock %} diff --git a/errata/templates/errata/erratum_table.html b/errata/templates/errata/erratum_table.html new file mode 100644 index 00000000..ad933947 --- /dev/null +++ b/errata/templates/errata/erratum_table.html @@ -0,0 +1,29 @@ +{% load common %} + + + + + + + + + + + + + + + {% for erratum in object_list %} + + + + + + + + + + + {% endfor %} + +
    IDTypePublished DateSynopsisPackages AffectedOS Releases AffectedCVEsReferences
    {{ erratum.name }}{{ erratum.e_type }}{{ erratum.issue_date }}{{ erratum.synopsis }}{{ erratum.packages.count }}{{ erratum.osreleases.count }}{{ erratum.cves.count }}{{ erratum.references.count }}
    diff --git a/errata/templates/errata/erratumreference_list.html b/errata/templates/errata/erratumreference_list.html new file mode 100644 index 00000000..36ac6357 --- /dev/null +++ b/errata/templates/errata/erratumreference_list.html @@ -0,0 +1,7 @@ +{% extends "objectlist.html" %} + +{% block page_title %}Erratum References{% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Errata
  • Erratum References
  • {% endblock %} + +{% block content_title %} Erratum References {% endblock %} diff --git a/errata/templates/errata/erratumreference_table.html b/errata/templates/errata/erratumreference_table.html new file mode 100644 index 00000000..5a96ca5c --- /dev/null +++ b/errata/templates/errata/erratumreference_table.html @@ -0,0 +1,19 @@ +{% load common %} + + + + + + + + + + {% for eref in object_list %} + + + + + + {% endfor %} + +
    TypeURLLinked Errata
    {{ eref.er_type }}{{ eref.url }}{{ eref.erratum_set.count }}
    diff --git a/errata/urls.py b/errata/urls.py new file mode 100644 index 00000000..33624459 --- /dev/null +++ b/errata/urls.py @@ -0,0 +1,27 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.urls import path + +from errata import views + +app_name = 'errata' + +urlpatterns = [ + path('', views.erratum_list, name='erratum_list'), + path('errata//', views.erratum_detail, name='erratum_detail'), + path('references/', views.erratumreference_list, name='erratumreference_list'), +] diff --git a/errata/utils.py b/errata/utils.py new file mode 100644 index 00000000..16a98289 --- /dev/null +++ b/errata/utils.py @@ -0,0 +1,134 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from urllib.parse import urlparse + +from django.conf import settings +from django.db import transaction + +from util import tz_aware_datetime, has_setting_of_type +from errata.models import Erratum +from errata.sources.distros.arch import update_arch_errata +from errata.sources.distros.alma import update_alma_errata +from errata.sources.distros.debian import update_debian_errata +from errata.sources.distros.centos import update_centos_errata +from errata.sources.distros.rocky import update_rocky_errata +from errata.sources.distros.ubuntu import update_ubuntu_errata +from patchman.signals import progress_info_s, progress_update_s + + +def update_errata(): + """ Update all distros errata + """ + if has_setting_of_type('ERRATA_OS_UPDATES', list): + errata_os_updates = settings.ERRATA_OS_UPDATES + else: + errata_os_updates = ['rocky', 'alma', 'centos', 'arch', 'ubuntu', 'debian', 'rhel', 'suse'] + if 'arch' in errata_os_updates: + update_arch_errata() + if 'alma' in errata_os_updates: + update_alma_errata() + if 'rocky' in errata_os_updates: + update_rocky_errata() + if 'debian' in errata_os_updates: + update_debian_errata() + if 'ubuntu' in errata_os_updates: + update_ubuntu_errata() + if 'centos' in errata_os_updates: + update_centos_errata() + + +def get_or_create_erratum(name, e_type, issue_date, synopsis): + """ Get or create an Erratum object. Returns the object and created + """ + with transaction.atomic(): + e, created = Erratum.objects.get_or_create( + name=name, + e_type=e_type, + issue_date=tz_aware_datetime(issue_date), + synopsis=synopsis, + ) + return e, created + + +def fixup_erratum_reference(eref): + """ Fix up an ErratumReference object to normalize the URL and type + """ + url = urlparse(eref.get('url')) + er_type = eref.get('er_type') + if 'lists' in url.hostname or 'lists' in url.path: + er_type = 'Mailing List' + if er_type == 'bugzilla' or 'bug' in url.hostname or 'bugs' in url.path: + er_type = 'Bug Tracker' + if ('ubuntu.com' in url.hostname and 'usn/' in url.path) or url.hostname == 'usn.ubuntu.com': + netloc = url.netloc.replace('usn.', '').replace('www.', '') + path = url.path.replace('usn/', 'security/notices/').replace('usn', 'USN').rstrip('/') + usn_id = path.split('/')[-1] + if 'USN' not in usn_id: + path = '/'.join(path.split('/')[:-1]) + '/USN-' + usn_id + url = url._replace(netloc=netloc, path=path) + if url.hostname == 'ubuntu.com' and url.path.startswith('/security/notices/USN'): + er_type = 'USN' + if 'launchpad.net' in url.hostname: + er_type = 'Bug Tracker' + netloc = url.netloc.replace('bugs.', '') + bug = url.path.split('/')[-1] + path = f'/bugs/{bug}' + url = url._replace(netloc=netloc, path=path) + if url.hostname == 'bugzilla.redhat.com' and url.path == '/show_bug.cgi': + bug = url.query.split('=')[1] + path = f'/{bug}' + url = url._replace(path=path, query='') + if url.hostname == 'rhn.redhat.com': + netloc = url.netloc.replace('rhn', 'access') + path = url.path.replace('.html', '') + url = url._replace(netloc=netloc, path=path) + if url.hostname == 'access.redhat.com': + if 'l1d-cache-eviction-and-vector-register-sampling' in url.path or \ + 'security/vulnerabilities/speculativeexecution' in url.path or \ + 'security/vulnerabilities/stackguard' in url.path: + er_type = 'Link' + elif 'security/cve' in url.path: + return + else: + old_ref = url.path.split('/')[-1] + refs = old_ref.split('-') + if ':' not in url.path: + try: + new_ref = f'{refs[0]}-{refs[1]}:{refs[2]}' + path = url.path.replace(old_ref, new_ref) + url = url._replace(path=path) + except IndexError: + pass + er_type = refs[0].upper() + final_url = url.geturl() + if final_url in ['https://launchpad.net/bugs/', 'https://launchpad.net/bugs/XXXXXX']: + return + eref['er_type'] = er_type + eref['url'] = final_url + return eref + + +def mark_errata_security_updates(): + """ For each set of erratum packages, modify any PackageUpdate that + should be marked as a security update. + """ + elen = Erratum.objects.count() + ptext = f'Scanning {elen} Errata:' + progress_info_s.send(sender=None, ptext=ptext, plen=elen) + for i, e in enumerate(Erratum.objects.all()): + progress_update_s.send(sender=None, index=i + 1) + e.scan_for_security_updates() diff --git a/errata/views.py b/errata/views.py new file mode 100644 index 00000000..8b5fcb31 --- /dev/null +++ b/errata/views.py @@ -0,0 +1,138 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.shortcuts import get_object_or_404, render +from django.contrib.auth.decorators import login_required +from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger +from django.db.models import Q + +from rest_framework import viewsets + +from util.filterspecs import Filter, FilterBar +from errata.models import Erratum, ErratumReference +from errata.serializers import ErratumSerializer, ErratumReferenceSerializer + + +@login_required +def erratum_list(request): + errata = Erratum.objects.select_related() + + if 'e_type' in request.GET: + errata = errata.filter(e_type=request.GET['e_type']).distinct() + + if 'reference_id' in request.GET: + errata = errata.filter(references=int(request.GET['reference_id'])) + + if 'cve_id' in request.GET: + errata = errata.filter(cves__cve_id=request.GET['cve_id']) + + if 'package_id' in request.GET: + errata = errata.filter(packages=int(request.GET['package_id'])) + + if 'search' in request.GET: + terms = request.GET['search'].lower() + query = Q() + for term in terms.split(' '): + q = Q(name__icontains=term) | Q(synopsis__icontains=term) + query = query & q + errata = errata.filter(query) + else: + terms = '' + + page_no = request.GET.get('page') + paginator = Paginator(errata, 50) + + try: + page = paginator.page(page_no) + except PageNotAnInteger: + page = paginator.page(1) + except EmptyPage: + page = paginator.page(paginator.num_pages) + + filter_list = [] + filter_list.append(Filter(request, 'Erratum Type', 'e_type', + Erratum.objects.values_list('e_type', flat=True).distinct())) + filter_bar = FilterBar(request, filter_list) + + return render(request, + 'errata/erratum_list.html', + {'page': page, + 'filter_bar': filter_bar, + 'terms': terms}) + + +@login_required +def erratumreference_list(request): + erefs = ErratumReference.objects.select_related() + + if 'er_type' in request.GET: + erefs = erefs.filter(er_type=request.GET['er_type']).distinct() + + if 'erratum_id' in request.GET: + erefs = erefs.filter(erratum__id=int(request.GET['erratum_id'])) + + if 'search' in request.GET: + terms = request.GET['search'].lower() + query = Q() + for term in terms.split(' '): + q = Q(url__icontains=term) + query = query & q + erefs = erefs.filter(query) + else: + terms = '' + + page_no = request.GET.get('page') + paginator = Paginator(erefs, 50) + + try: + page = paginator.page(page_no) + except PageNotAnInteger: + page = paginator.page(1) + except EmptyPage: + page = paginator.page(paginator.num_pages) + + filter_list = [] + filter_list.append(Filter(request, 'Reference Type', 'er_type', + ErratumReference.objects.values_list('er_type', flat=True).distinct())) + filter_bar = FilterBar(request, filter_list) + + return render(request, + 'errata/erratumreference_list.html', + {'page': page, + 'filter_bar': filter_bar, + 'terms': terms}) + + +@login_required +def erratum_detail(request, erratum_name): + erratum = get_object_or_404(Erratum, name=erratum_name) + return render(request, + 'errata/erratum_detail.html', + {'erratum': erratum}) + + +class ErratumViewSet(viewsets.ModelViewSet): + """ API endpoint that allows errata to be viewed or edited. + """ + queryset = Erratum.objects.all() + serializer_class = ErratumSerializer + + +class ErratumReferenceViewSet(viewsets.ModelViewSet): + """ API endpoint that allows erratum references to be viewed or edited. + """ + queryset = ErratumReference.objects.all() + serializer_class = ErratumReferenceSerializer diff --git a/hosts/forms.py b/hosts/forms.py index 115e23b1..931de03c 100644 --- a/hosts/forms.py +++ b/hosts/forms.py @@ -33,7 +33,7 @@ class Meta: fields = ('hostname', 'reversedns', 'ipaddress', - 'os', + 'osvariant', 'kernel', 'arch', 'reboot_required', diff --git a/hosts/models.py b/hosts/models.py index 8aa3570e..6f812727 100644 --- a/hosts/models.py +++ b/hosts/models.py @@ -29,7 +29,7 @@ from packages.models import Package, PackageUpdate from domains.models import Domain from repos.models import Repository -from operatingsystems.models import OS +from operatingsystems.models import OSVariant from arch.models import MachineArchitecture from modules.models import Module from patchman.signals import info_message, error_message @@ -44,7 +44,7 @@ class Host(models.Model): ipaddress = models.GenericIPAddressField() reversedns = models.CharField(max_length=255, blank=True, null=True) check_dns = models.BooleanField(default=False) - os = models.ForeignKey(OS, on_delete=models.CASCADE) + osvariant = models.ForeignKey(OSVariant, on_delete=models.CASCADE) kernel = models.CharField(max_length=255) arch = models.ForeignKey(MachineArchitecture, on_delete=models.CASCADE) domain = models.ForeignKey(Domain, on_delete=models.CASCADE) @@ -76,7 +76,7 @@ def show(self): text += f'IP address : {self.ipaddress!s}\n' text += f'Reverse DNS : {self.reversedns!s}\n' text += f'Domain : {self.domain!s}\n' - text += f'OS : {self.os!s}\n' + text += f'OS Variant : {self.osvariant!s}\n' text += f'Kernel : {self.kernel!s}\n' text += f'Architecture : {self.arch!s}\n' text += f'Last report : {self.lastreport!s}\n' @@ -132,7 +132,7 @@ def get_host_repo_packages(self): mirror__repo__hostrepo__enabled=True) else: hostrepos_q = \ - Q(mirror__repo__osgroup__os__host=self, + Q(mirror__repo__osrelease__osvariant__host=self, mirror__repo__arch=self.arch, mirror__enabled=True, mirror__repo__enabled=True) | \ @@ -146,7 +146,7 @@ def process_update(self, package, highest_package): host_repos = Q(repo__host=self) else: host_repos = \ - Q(repo__osgroup__os__host=self, repo__arch=self.arch) | \ + Q(repo__osrelease__osvariant__host=self, repo__arch=self.arch) | \ Q(repo__host=self) mirrors = highest_package.mirror_set.filter(host_repos) security = False @@ -200,8 +200,8 @@ def find_updates(self): update_ids = self.find_host_repo_updates(host_packages, repo_packages) else: - update_ids = self.find_osgroup_repo_updates(host_packages, - repo_packages) + update_ids = self.find_osgrelease_repo_updates(host_packages, + repo_packages) kernel_update_ids = self.find_kernel_updates(kernel_packages, repo_packages) @@ -265,7 +265,7 @@ def find_host_repo_updates(self, host_packages, repo_packages): return update_ids - def find_osgroup_repo_updates(self, host_packages, repo_packages): + def find_osrelease_repo_updates(self, host_packages, repo_packages): update_ids = [] diff --git a/hosts/templates/hosts/host_delete.html b/hosts/templates/hosts/host_delete.html index b80ea170..28c5ef3b 100644 --- a/hosts/templates/hosts/host_delete.html +++ b/hosts/templates/hosts/host_delete.html @@ -16,8 +16,8 @@ Domain {{ host.domain }} Reporting IP Address {{ host.ipaddress }} Reverse DNS {{ host.reversedns }} - OS {{ host.os }} - OS Group {{ host.os.osgroup }} + OS Release {{ host.osvariant.osrelease }} + OS Variant {{ host.osvariant }} Kernel {{ host.kernel }} Architecture {{ host.arch }} @@ -33,7 +33,7 @@ Updates Available {{ host.updates.count }} Reboot Required {{ host.reboot_required }} Packages Installed {{ host.packages.count}} - Repos In Use{% if host.host_repos_only %}Host Repos{% else %}Host and OS Group Repos{% endif %} + Repos In Use{% if host.host_repos_only %}Host Repos{% else %}Host and OS Release Repos{% endif %} Last 3 reports diff --git a/hosts/templates/hosts/host_detail.html b/hosts/templates/hosts/host_detail.html index 421cda3d..da199d36 100644 --- a/hosts/templates/hosts/host_detail.html +++ b/hosts/templates/hosts/host_detail.html @@ -26,8 +26,8 @@ Domain {{ host.domain }} Reporting IP Address {{ host.ipaddress }} Reverse DNS {{ host.reversedns }} - OS {{ host.os }} - OS Group {{ host.os.osgroup }} + OS Release {{ host.osvariant.osrelease }} + OS Variant {{ host.osvariant }} Kernel {{ host.kernel }} Architecture {{ host.arch }} @@ -42,8 +42,8 @@ Last Report {{ host.lastreport }} Updates Available {{ host.updates.count }} Reboot Required {{ host.reboot_required }} - Packages Installed {{ host.packages.count}} - Repos In Use{% if host.host_repos_only %}Host Repos{% else %}Host and OS Group Repos{% endif %} + Packages Installed {{ host.packages.count}} + Repos In Use{% if host.host_repos_only %}Host Repos{% else %}Host and OS Release Repos{% endif %} Last 3 reports @@ -114,16 +114,16 @@ {% endfor %} {% endif %} - {% with osrepos=host.os.osgroup.repos.select_related %} - {% if osrepos and not host.host_repos_only %} - {% for osrepo in osrepos %} - {% if osrepo.arch == host.arch %} + {% with osrelease_repos=host.osvariant.osrelease.repos.select_related %} + {% if osrelease_repos and not host.host_repos_only %} + {% for osrelease_repo in osrelease_repos %} + {% if osrelease_repo.arch == host.arch or osrelease_repo.arch == 'any' %} - {{ osrepo.name }} - OS Group + {{ osrelease_repo.name }} + OS Release N/A - {% yes_no_img osrepo.enabled %} - {% yes_no_img osrepo.security %} + {% yes_no_img osrelease_repo.enabled %} + {% yes_no_img osrelease_repo.security %} {% endif %} {% endfor %} diff --git a/hosts/templates/hosts/host_table.html b/hosts/templates/hosts/host_table.html index a1777944..da3f760e 100644 --- a/hosts/templates/hosts/host_table.html +++ b/hosts/templates/hosts/host_table.html @@ -5,7 +5,7 @@ Hostname Updates Running Kernel - OS + OS Variant Last Report Reboot Status @@ -17,7 +17,7 @@ {% with count=host.get_num_security_updates %}{% if count != 0 %}{{ count }}{% else %} {% endif %}{% endwith %} {% with count=host.get_num_bugfix_updates %}{% if count != 0 %}{{ count }}{% else %} {% endif %}{% endwith %} {{ host.kernel }} - {{ host.os }} + {{ host.osvariant }} {{ host.lastreport }}{% report_alert host.lastreport %} {% no_yes_img host.reboot_required %} diff --git a/hosts/views.py b/hosts/views.py index 0f38dcdc..e0a90584 100644 --- a/hosts/views.py +++ b/hosts/views.py @@ -29,7 +29,7 @@ from hosts.models import Host, HostRepo from domains.models import Domain from arch.models import MachineArchitecture -from operatingsystems.models import OS, OSGroup +from operatingsystems.models import OSVariant, OSRelease from reports.models import Report from hosts.forms import EditHostForm from hosts.serializers import HostSerializer, HostRepoSerializer @@ -37,7 +37,6 @@ @login_required def host_list(request): - hosts = Host.objects.select_related() if 'domain' in request.GET: @@ -55,11 +54,11 @@ def host_list(request): if 'arch' in request.GET: hosts = hosts.filter(arch=int(request.GET['arch'])) - if 'os' in request.GET: - hosts = hosts.filter(os=int(request.GET['os'])) + if 'osvariant' in request.GET: + hosts = hosts.filter(osvariant=int(request.GET['osvariant'])) - if 'osgroup' in request.GET: - hosts = hosts.filter(os__osgroup=int(request.GET['osgroup'])) + if 'osrelease' in request.GET: + hosts = hosts.filter(osvariant__osrelease=int(request.GET['osrelease'])) if 'tag' in request.GET: hosts = hosts.filter(tags__name__in=[request.GET['tag']]) @@ -94,10 +93,9 @@ def host_list(request): tags[tag.name] = tag.name filter_list.append(Filter(request, 'tag', tags)) filter_list.append(Filter(request, 'domain', Domain.objects.all())) - filter_list.append(Filter(request, 'os', OS.objects.all())) - filter_list.append(Filter(request, 'osgroup', OSGroup.objects.all())) - filter_list.append(Filter(request, 'arch', - MachineArchitecture.objects.all())) + filter_list.append(Filter(request, 'osvariant', OSVariant.objects.all())) + filter_list.append(Filter(request, 'osrelease', OSRelease.objects.all())) + filter_list.append(Filter(request, 'arch', MachineArchitecture.objects.all())) filter_list.append(Filter(request, 'reboot_required', {False: 'No', True: 'Yes'})) filter_bar = FilterBar(request, filter_list) @@ -106,30 +104,24 @@ def host_list(request): 'hosts/host_list.html', {'page': page, 'filter_bar': filter_bar, - 'terms': terms}, ) + 'terms': terms}) @login_required def host_detail(request, hostname): - host = get_object_or_404(Host, hostname=hostname) - reports = Report.objects.filter(host=hostname).order_by('-created')[:3] - hostrepos = HostRepo.objects.filter(host=host) - return render(request, 'hosts/host_detail.html', {'host': host, 'reports': reports, - 'hostrepos': hostrepos}, ) + 'hostrepos': hostrepos}) @login_required def host_edit(request, hostname): - host = get_object_or_404(Host, hostname=hostname) - reports = Report.objects.filter(host=hostname).order_by('-created')[:3] if request.method == 'POST': @@ -157,7 +149,6 @@ def host_edit(request, hostname): @login_required def host_delete(request, hostname): - host = get_object_or_404(Host, hostname=hostname) if request.method == 'POST': @@ -168,7 +159,6 @@ def host_delete(request, hostname): return redirect(reverse('hosts:host_list')) elif 'cancel' in request.POST: return redirect(host.get_absolute_url()) - reports = Report.objects.filter(host=hostname).order_by('-created')[:3] return render(request, diff --git a/modules/templates/modules/module_table.html b/modules/templates/modules/module_table.html index d01dcbc1..cda47ea3 100644 --- a/modules/templates/modules/module_table.html +++ b/modules/templates/modules/module_table.html @@ -19,7 +19,7 @@ {{ module.version }} {{ module.context }} {{ module.repo }} - {{ module.packages.count }} + {{ module.packages.count }} {{ module.host_set.count }} {% endfor %} diff --git a/operatingsystems/admin.py b/operatingsystems/admin.py index 49851bc6..15f5e200 100644 --- a/operatingsystems/admin.py +++ b/operatingsystems/admin.py @@ -16,12 +16,12 @@ # along with Patchman. If not, see from django.contrib import admin -from operatingsystems.models import OS, OSGroup +from operatingsystems.models import OSVariant, OSRelease -class OSGroupAdmin(admin.ModelAdmin): +class OSReleaseAdmin(admin.ModelAdmin): filter_horizontal = ('repos',) -admin.site.register(OS) -admin.site.register(OSGroup, OSGroupAdmin) +admin.site.register(OSVariant) +admin.site.register(OSRelease, OSReleaseAdmin) diff --git a/operatingsystems/fixtures/os.json b/operatingsystems/fixtures/os.json index a581487f..5192cee5 100644 --- a/operatingsystems/fixtures/os.json +++ b/operatingsystems/fixtures/os.json @@ -1,63 +1,63 @@ [ { - "model": "operatingsystems.os", + "model": "operatingsystems.osvariant", "fields": { "name": "Rocky Linux 9.3", - "osgroup": [ + "osrelease": [ "Rocky Linux 9", "Blue Onyx" ] } }, { - "model": "operatingsystems.os", + "model": "operatingsystems.osvariant", "fields": { "name": "Rocky Linux 8.9", - "osgroup": [ + "osrelease": [ "Rocky Linux 8", "Green Obsidian" ] } }, { - "model": "operatingsystems.os", + "model": "operatingsystems.osvariant", "fields": { "name": "Debian 12.5", - "osgroup": [ + "osrelease": [ "Debian 12", "bookworm" ] } }, { - "model": "operatingsystems.os", + "model": "operatingsystems.osvariant", "fields": { "name": "Arch Linux", - "osgroup": null + "osrelease": null } }, { - "model": "operatingsystems.os", + "model": "operatingsystems.osvariant", "fields": { - "name": "openSUSE Leap 15.5", - "osgroup": null + "name": "openSUSE Leap 15.6", + "osrelease": null } }, { - "model": "operatingsystems.os", + "model": "operatingsystems.osvariant", "fields": { "name": "AlmaLinux 8.10", - "osgroup": [ + "osrelease": [ "AlmaLinux 8", "Cerulean Leopard" ] } }, { - "model": "operatingsystems.os", + "model": "operatingsystems.osvariant", "fields": { "name": "AlmaLinux 9.5", - "osgroup": [ + "osrelease": [ "AlmaLinux 9", "Teal Serval" ] diff --git a/operatingsystems/fixtures/osgroup.json b/operatingsystems/fixtures/osgroup.json index e4b785ee..27a87a17 100644 --- a/operatingsystems/fixtures/osgroup.json +++ b/operatingsystems/fixtures/osgroup.json @@ -1,6 +1,6 @@ [ { - "model": "operatingsystems.osgroup", + "model": "operatingsystems.osrelease", "fields": { "name": "CentOS 7", "codename": "", @@ -8,7 +8,7 @@ } }, { - "model": "operatingsystems.osgroup", + "model": "operatingsystems.osrelease", "fields": { "name": "CentOS 8", "codename": "", @@ -16,7 +16,7 @@ } }, { - "model": "operatingsystems.osgroup", + "model": "operatingsystems.osrelease", "fields": { "name": "Rocky Linux 8", "codename": "Green Obsidian", @@ -24,7 +24,7 @@ } }, { - "model": "operatingsystems.osgroup", + "model": "operatingsystems.osrelease", "fields": { "name": "Rocky Linux 9", "codename": "Blue Onyx", @@ -32,7 +32,7 @@ } }, { - "model": "operatingsystems.osgroup", + "model": "operatingsystems.osrelease", "fields": { "name": "AlmaLinux 8", "codename": "Cerulean Leopard", @@ -40,7 +40,7 @@ } }, { - "model": "operatingsystems.osgroup", + "model": "operatingsystems.osrelease", "fields": { "name": "AlmaLinux 9", "codename": "Teal Serval", @@ -48,7 +48,7 @@ } }, { - "model": "operatingsystems.osgroup", + "model": "operatingsystems.osrelease", "fields": { "name": "Debian 12", "codename": "bookworm", diff --git a/operatingsystems/forms.py b/operatingsystems/forms.py index d21b6e20..8dcd35b9 100644 --- a/operatingsystems/forms.py +++ b/operatingsystems/forms.py @@ -18,44 +18,44 @@ from django.forms import ModelForm, ModelMultipleChoiceField from django.contrib.admin.widgets import FilteredSelectMultiple -from operatingsystems.models import OS, OSGroup +from operatingsystems.models import OSVariant, OSRelease from repos.models import Repository -class AddOSToOSGroupForm(ModelForm): +class AddOSVariantToOSReleaseForm(ModelForm): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.fields['osgroup'].label = 'OS Groups' + self.fields['osrelease'].label = 'OS Releases' class Meta: - model = OS - fields = ('osgroup',) + model = OSVariant + fields = ('osrelease',) -class CreateOSGroupForm(ModelForm): +class CreateOSReleaseForm(ModelForm): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.fields['name'].label = 'New OS Group' + self.fields['name'].label = 'New OS Release' class Meta: - model = OSGroup + model = OSRelease fields = ('name',) -class AddReposToOSGroupForm(ModelForm): +class AddReposToOSReleaseForm(ModelForm): repos = ModelMultipleChoiceField( queryset=Repository.objects.select_related(), required=False, label=None, - widget=FilteredSelectMultiple('Repos', False)) + widget=FilteredSelectMultiple('Repos', False, attrs={'size':'30'})) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['repos'].label = '' class Meta: - model = OSGroup + model = OSRelease fields = ('repos',) diff --git a/operatingsystems/managers.py b/operatingsystems/managers.py index 99bdfa1f..630484a1 100644 --- a/operatingsystems/managers.py +++ b/operatingsystems/managers.py @@ -17,6 +17,6 @@ from django.db import models -class OSGroupManager(models.Manager): +class OSReleaseManager(models.Manager): def get_by_natural_key(self, name, codename): return self.get(name=name, codename=codename) diff --git a/operatingsystems/models.py b/operatingsystems/models.py index e69ced6e..1b4c4245 100644 --- a/operatingsystems/models.py +++ b/operatingsystems/models.py @@ -20,20 +20,21 @@ from arch.models import MachineArchitecture from repos.models import Repository +from arch.models import MachineArchitecture -class OSGroup(models.Model): +class OSRelease(models.Model): name = models.CharField(max_length=255, unique=True) repos = models.ManyToManyField(Repository, blank=True) codename = models.CharField(max_length=255, blank=True) - from operatingsystems.managers import OSGroupManager - objects = OSGroupManager() + from operatingsystems.managers import OSReleaseManager + objects = OSReleaseManager() class Meta: - verbose_name = 'Operating System Group' - verbose_name_plural = 'Operating System Groups' + verbose_name = 'Operating System Release' + verbose_name_plural = 'Operating System Releases' unique_together = ('name', 'codename') ordering = ('name',) @@ -44,26 +45,25 @@ def __str__(self): return self.name def get_absolute_url(self): - return reverse('operatingsystems:osgroup_detail', args=[str(self.id)]) + return reverse('operatingsystems:osrelease_detail', args=[str(self.id)]) def natural_key(self): return (self.name, self.codename) -class OS(models.Model): +class OSVariant(models.Model): name = models.CharField(max_length=255, unique=True) arch = models.ForeignKey(MachineArchitecture, blank=True, null=True, on_delete=models.CASCADE) - osgroup = models.ForeignKey(OSGroup, blank=True, null=True, - on_delete=models.SET_NULL) + osrelease = models.ForeignKey(OSRelease, blank=True, null=True, on_delete=models.SET_NULL) class Meta: - verbose_name = 'Operating System' - verbose_name_plural = 'Operating Systems' + verbose_name = 'Operating System Variant' + verbose_name_plural = 'Operating System Variants' ordering = ('name',) def __str__(self): return self.name def get_absolute_url(self): - return reverse('operatingsystems:os_detail', args=[str(self.id)]) + return reverse('operatingsystems:osvariant_detail', args=[str(self.id)]) diff --git a/operatingsystems/serializers.py b/operatingsystems/serializers.py index 3edd33c3..8418c720 100644 --- a/operatingsystems/serializers.py +++ b/operatingsystems/serializers.py @@ -16,16 +16,16 @@ from rest_framework import serializers -from operatingsystems.models import OS, OSGroup +from operatingsystems.models import OSVariant, OSRelease -class OSSerializer(serializers.HyperlinkedModelSerializer): +class OSVariantSerializer(serializers.HyperlinkedModelSerializer): class Meta: - model = OS - fields = ('id', 'name', 'osgroup') + model = OSVariant + fields = ('id', 'name', 'osrelease', 'arch') -class OSGroupSerializer(serializers.HyperlinkedModelSerializer): +class OSReleaseSerializer(serializers.HyperlinkedModelSerializer): class Meta: - model = OSGroup - fields = ('id', 'name', 'repos') + model = OSRelease + fields = ('id', 'name', 'codename', 'repos') diff --git a/operatingsystems/urls.py b/operatingsystems/urls.py index 24e9500a..923c8b62 100644 --- a/operatingsystems/urls.py +++ b/operatingsystems/urls.py @@ -22,10 +22,11 @@ app_name = 'operatingsystems' urlpatterns = [ - path('', views.os_list, name='os_list'), - path('/', views.os_detail, name='os_detail'), - path('/delete/', views.os_delete, name='os_delete'), - path('groups/', views.osgroup_list, name='osgroup_list'), - path('groups//', views.osgroup_detail, name='osgroup_detail'), # noqa - path('groups//delete/', views.osgroup_delete, name='osgroup_delete'), # noqa + path('', views.os_landing, name='os_landing'), + path('variants/', views.osvariant_list, name='osvariant_list'), + path('variants//', views.osvariant_detail, name='osvariant_detail'), + path('variants//delete/', views.osvariant_delete, name='osvariant_delete'), + path('releases/', views.osrelease_list, name='osrelease_list'), + path('releases//', views.osrelease_detail, name='osrelease_detail'), + path('releases//delete/', views.osrelease_delete, name='osrelease_delete'), ] diff --git a/operatingsystems/views.py b/operatingsystems/views.py index 8966eb98..7873e627 100644 --- a/operatingsystems/views.py +++ b/operatingsystems/views.py @@ -24,17 +24,14 @@ from rest_framework import viewsets -from operatingsystems.models import OS, OSGroup -from operatingsystems.forms import AddOSToOSGroupForm, \ - AddReposToOSGroupForm, CreateOSGroupForm -from operatingsystems.serializers import OSSerializer, \ - OSGroupSerializer +from operatingsystems.models import OSVariant, OSRelease +from operatingsystems.forms import AddOSVariantToOSReleaseForm, AddReposToOSReleaseForm, CreateOSReleaseForm +from operatingsystems.serializers import OSVariantSerializer, OSReleaseSerializer @login_required -def os_list(request): - - oses = OS.objects.select_related() +def osvariant_list(request): + osvariants = OSVariant.objects.select_related() if 'search' in request.GET: terms = request.GET['search'].lower() @@ -42,12 +39,12 @@ def os_list(request): for term in terms.split(' '): q = Q(name__icontains=term) query = query & q - oses = oses.filter(query) + osvariants = osvariants.filter(query) else: terms = '' page_no = request.GET.get('page') - paginator = Paginator(oses, 50) + paginator = Paginator(osvariants, 50) try: page = paginator.page(page_no) @@ -56,86 +53,88 @@ def os_list(request): except EmptyPage: page = paginator.page(paginator.num_pages) - empty_oses = list(OS.objects.filter(host__isnull=True)) + empty_osvariants = list(OSVariant.objects.filter(host__isnull=True)) return render(request, - 'operatingsystems/os_list.html', - {'page': page, 'terms': terms, 'empty_oses': empty_oses}, ) + 'operatingsystems/osvariant_list.html', + {'page': page, + 'terms': terms, + 'empty_osvariants': empty_osvariants}) @login_required -def os_detail(request, os_id): - - os = get_object_or_404(OS, id=os_id) +def osvariant_detail(request, osvariant_id): + osvariant = get_object_or_404(OSVariant, id=osvariant_id) if request.method == 'POST': - create_form = CreateOSGroupForm(request.POST, prefix='create') + create_form = CreateOSReleaseForm(request.POST, prefix='create') if create_form.is_valid(): - osgroup = create_form.save() - os.osgroup = osgroup - os.save() - text = f'Created OS Group {osgroup!s} ' - text += f'and added OS {os!s} to it' + osrelease = create_form.save() + osvariant.osrelease = osrelease + osvariant.save() + text = f'Created OS Release {osrelease} and added OS Variant {osvariant} to it' messages.info(request, text) - return redirect(os.get_absolute_url()) - add_form = AddOSToOSGroupForm(request.POST, instance=os, prefix='add') + return redirect(osvariant.get_absolute_url()) + add_form = AddOSVariantToOSReleaseForm(request.POST, instance=osvariant, prefix='add') if add_form.is_valid(): add_form.save() - text = f'OS {os!s} added to OS Group {os.osgroup!s}' + text = f'OS Variant {osvariant} added to OS Release {osvariant.osrelease}' messages.info(request, text) - return redirect(os.get_absolute_url()) + return redirect(osvariant.get_absolute_url()) else: - add_form = AddOSToOSGroupForm(instance=os, prefix='add') - create_form = CreateOSGroupForm(prefix='create') + add_form = AddOSVariantToOSReleaseForm(instance=osvariant, prefix='add') + create_form = CreateOSReleaseForm(prefix='create') return render(request, - 'operatingsystems/os_detail.html', - {'os': os, + 'operatingsystems/osvariant_detail.html', + {'osvariant': osvariant, 'add_form': add_form, - 'create_form': create_form}, ) + 'create_form': create_form}) @login_required -def os_delete(request, os_id): - - if os_id == 'empty_oses': - os = False - oses = list(OS.objects.filter(host__isnull=True)) +def osvariant_delete(request, osvariant_id): + if osvariant_id == 'empty_osvariants': + osvariant = False + osvariants = list(OSVariant.objects.filter(host__isnull=True)) else: - os = get_object_or_404(OS, id=os_id) - oses = False + osvariant = get_object_or_404(OSVariant, id=osvariant_id) + osvariants = False if request.method == 'POST': if 'delete' in request.POST: - if os: - os.delete() - messages.info(request, f'OS {os!s} has been deleted') - return redirect(reverse('operatingsystems:os_list')) + if osvariant: + osvariant.delete() + messages.info(request, f'OS Variant {osvariant} has been deleted') + return redirect(reverse('operatingsystems:osvariant_list')) else: - if not oses: - text = 'There are no OS\'s with no Hosts' + if not osvariants: + text = 'There are no OS Variants with no Hosts' messages.info(request, text) - return redirect(reverse('operatingsystems:os_list')) - for os in oses: - os.delete() - text = f'{len(oses)!s} OS\'s have been deleted' + return redirect(reverse('operatingsystems:osvariant_list')) + for osvariant in osvariants: + osvariant.delete() + text = f'{len(osvariants)} OS Variants have been deleted' messages.info(request, text) - return redirect(reverse('operatingsystems:os_list')) + return redirect(reverse('operatingsystems:osvariant_list')) elif 'cancel' in request.POST: - if os_id == 'empty_oses': - return redirect(reverse('operatingsystems:os_list')) + if osvariant_id == 'empty_oses': + return redirect(reverse('operatingsystems:osvariant_list')) else: - return redirect(os.get_absolute_url()) + return redirect(osvariant.get_absolute_url()) return render(request, - 'operatingsystems/os_delete.html', - {'os': os, 'oses': oses}, ) + 'operatingsystems/osvariant_delete.html', + {'osvariant': osvariant, + 'osvariants': osvariants}) @login_required -def osgroup_list(request): +def osrelease_list(request): + osreleases = OSRelease.objects.select_related() - osgroups = OSGroup.objects.select_related() + if 'erratum_id' in request.GET: + osrelease = osreleases.filter(erratum=int(request.GET['erratum_id'])) if 'search' in request.GET: terms = request.GET['search'].lower() @@ -143,12 +142,12 @@ def osgroup_list(request): for term in terms.split(' '): q = Q(name__icontains=term) query = query & q - osgroups = osgroups.filter(query) + osreleases = osreleases.filter(query) else: terms = '' page_no = request.GET.get('page') - paginator = Paginator(osgroups, 50) + paginator = Paginator(osreleases, 50) try: page = paginator.page(page_no) @@ -158,61 +157,65 @@ def osgroup_list(request): page = paginator.page(paginator.num_pages) return render(request, - 'operatingsystems/osgroup_list.html', - {'page': page, 'terms': terms}, ) + 'operatingsystems/osrelease_list.html', + {'page': page, + 'terms': terms}) @login_required -def osgroup_detail(request, osgroup_id): - - osgroup = get_object_or_404(OSGroup, id=osgroup_id) +def osrelease_detail(request, osrelease_id): + osrelease = get_object_or_404(OSRelease, id=osrelease_id) if request.method == 'POST': - repos_form = AddReposToOSGroupForm(request.POST, instance=osgroup) + repos_form = AddReposToOSReleaseForm(request.POST, instance=osrelease) if repos_form.is_valid(): repos_form.save() messages.info(request, 'Modified Repositories') - return redirect(osgroup.get_absolute_url()) + return redirect(osrelease.get_absolute_url()) - repos_form = AddReposToOSGroupForm(instance=osgroup) + repos_form = AddReposToOSReleaseForm(instance=osrelease) return render(request, - 'operatingsystems/osgroup_detail.html', - {'osgroup': osgroup, 'repos_form': repos_form}, ) + 'operatingsystems/osrelease_detail.html', + {'osrelease': osrelease, + 'repos_form': repos_form}) @login_required -def osgroup_delete(request, osgroup_id): - - osgroup = get_object_or_404(OSGroup, id=osgroup_id) +def osrelease_delete(request, osrelease_id): + osrelease = get_object_or_404(OSRelease, id=osrelease_id) if request.method == 'POST': if 'delete' in request.POST: - osgroup.delete() - text = f'OS Group {osgroup!s} has been deleted' + osrelease.delete() + text = f'OS Release {osrelease} has been deleted' messages.info(request, text) - return redirect(reverse('operatingsystems:os_list')) + return redirect(reverse('operatingsystems:osvariant_list')) elif 'cancel' in request.POST: - return redirect(osgroup.get_absolute_url()) + return redirect(osrelease.get_absolute_url()) return render(request, - 'operatingsystems/osgroup_delete.html', - {'osgroup': osgroup}, ) + 'operatingsystems/osrelease_delete.html', + {'osrelease': osrelease}) + +@login_required +def os_landing(request): + return render(request, 'operatingsystems/os_landing.html') -class OSViewSet(viewsets.ModelViewSet): +class OSVariantViewSet(viewsets.ModelViewSet): """ - API endpoint that allows operating systems to be viewed or edited. + API endpoint that allows operating system variants to be viewed or edited. """ - queryset = OS.objects.all() - serializer_class = OSSerializer + queryset = OSVariant.objects.all() + serializer_class = OSVariantSerializer filterset_fields = ['name'] -class OSGroupViewSet(viewsets.ModelViewSet): +class OSReleaseViewSet(viewsets.ModelViewSet): """ - API endpoint that allows operating system groups to be viewed or edited. + API endpoint that allows operating system releases to be viewed or edited. """ - queryset = OSGroup.objects.all() - serializer_class = OSGroupSerializer + queryset = OSRelease.objects.all() + serializer_class = OSReleaseSerializer filterset_fields = ['name'] diff --git a/packages/admin.py b/packages/admin.py index 1a5a2dfa..979ba779 100644 --- a/packages/admin.py +++ b/packages/admin.py @@ -16,12 +16,7 @@ # along with Patchman. If not, see from django.contrib import admin -from packages.models import Package, PackageName, \ - PackageUpdate, Erratum, ErratumReference - - -class ErratumAdmin(admin.ModelAdmin): - readonly_fields = ('packages', 'references') +from packages.models import Package, PackageName, PackageUpdate class PackageAdmin(admin.ModelAdmin): @@ -35,5 +30,3 @@ class PackageUpdateAdmin(admin.ModelAdmin): admin.site.register(Package, PackageAdmin) admin.site.register(PackageName) admin.site.register(PackageUpdate, PackageUpdateAdmin) -admin.site.register(Erratum, ErratumAdmin) -admin.site.register(ErratumReference) diff --git a/packages/migrations/0002_delete_erratum_delete_erratumreference.py b/packages/migrations/0002_delete_erratum_delete_erratumreference.py new file mode 100644 index 00000000..418bf4e5 --- /dev/null +++ b/packages/migrations/0002_delete_erratum_delete_erratumreference.py @@ -0,0 +1,19 @@ +# Generated by Django 4.2.18 on 2025-02-04 23:41 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('packages', '0001_initial'), + ] + + operations = [ + migrations.DeleteModel( + name='Erratum', + ), + migrations.DeleteModel( + name='ErratumReference', + ), + ] diff --git a/packages/models.py b/packages/models.py index 560d0efb..45cb6656 100644 --- a/packages/models.py +++ b/packages/models.py @@ -24,7 +24,7 @@ from rpm import labelCompare from debian.debian_support import Version, version_compare -from arch.models import PackageArchitecture, MachineArchitecture +from arch.models import PackageArchitecture from packages.managers import PackageManager @@ -41,7 +41,7 @@ def __str__(self): return self.name def get_absolute_url(self): - return reverse('packages:package_detail', args=[self.name]) + return reverse('packages:package_name_detail', args=[self.name]) class Package(models.Model): @@ -89,7 +89,7 @@ def __str__(self): return f'{self.name!s}-{epo!s}{self.version!s}{rel!s}-{self.arch!s}' def get_absolute_url(self): - return self.name.get_absolute_url() + return reverse('packages:package_detail', args=[self.id]) def __key(self): return (self.name, self.epoch, self.version, self.release, self.arch, @@ -210,35 +210,3 @@ def __str__(self): else: update_type = 'Bugfix' return f'{self.oldpackage!s} -> {self.newpackage!s} ({update_type!s})' - - -class ErratumReference(models.Model): - - url = models.URLField(max_length=255) - - def __str__(self): - return self.url - - -class Erratum(models.Model): - - name = models.CharField(max_length=255) - etype = models.CharField(max_length=255) - issue_date = models.DateTimeField() - synopsis = models.CharField(max_length=255) - packages = models.ManyToManyField(Package, blank=True) - arches = models.ManyToManyField(MachineArchitecture, blank=True) - from operatingsystems.models import OSGroup - releases = models.ManyToManyField(OSGroup, blank=True) - references = models.ManyToManyField(ErratumReference, blank=True) - - class Meta: - verbose_name = 'Erratum' - verbose_name_plural = 'Errata' - - def __str__(self): - text = f'{self.name!s} {self.issue_date!s} ({self.etype!s}) : ' - text += f'{self.packages.count()!s} packages, ' - text += f'{self.arches.count()!s} arches, ' - text += f'{self.releases.count()!s} releases' - return text diff --git a/packages/serializers.py b/packages/serializers.py index 8664a40f..902cb3e0 100644 --- a/packages/serializers.py +++ b/packages/serializers.py @@ -16,8 +16,7 @@ from rest_framework import serializers -from packages.models import PackageName, Package, PackageUpdate, \ - Erratum, ErratumReference +from packages.models import PackageName, Package, PackageUpdate class PackageNameSerializer(serializers.HyperlinkedModelSerializer): @@ -36,16 +35,3 @@ class PackageUpdateSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = PackageUpdate fields = ('id', 'oldpackage', 'newpackage', 'security') - - -class ErratumSerializer(serializers.HyperlinkedModelSerializer): - class Meta: - model = Erratum - fields = ('id', 'name', 'etype', 'issue_date', 'synopsis', 'arches', - 'releases', 'references') - - -class ErratumReferenceSerializer(serializers.HyperlinkedModelSerializer): - class Meta: - model = ErratumReference - fields = ('id', 'url') diff --git a/packages/templates/packages/package_detail.html b/packages/templates/packages/package_detail.html index 631ac113..112eacf4 100644 --- a/packages/templates/packages/package_detail.html +++ b/packages/templates/packages/package_detail.html @@ -9,30 +9,27 @@ {% block content %}
    - {% if allversions %} - - - - - - - - - - - {% for version in allversions %} - - - - - - - - {% endfor %} -
    EpochVersionReleaseArchTypeRepositoriesHosts
    {{ version.epoch }} {{ version.version }} {{ version.release }} {{ version.arch }} {{ version.get_packagetype_display }} Available from {{ version.repo_count }} Repositories Installed on {{ version.host_set.count }} Hosts
    - {% else %} - No versions of this Package exist. - {% endif %} + + + + + + + + + + + + + + + + + + + + +
    EpochVersionReleaseArchTypeRepositoriesHostsErrata
    {{ package.epoch }} {{ package.version }} {{ package.release }} {{ package.arch }} {{ package.get_packagetype_display }} Available from {{ package.repo_count }} Repositories Installed on {{ package.host_set.count }} Hosts Affected by {{ package.erratum_set.count }} Errata
    See All Versions of this Package
    {% endblock %} diff --git a/packages/templates/packages/package_name_detail.html b/packages/templates/packages/package_name_detail.html new file mode 100644 index 00000000..7bba3306 --- /dev/null +++ b/packages/templates/packages/package_name_detail.html @@ -0,0 +1,40 @@ +{% extends "base.html" %} + +{% block page_title %}Package - {{ package }} {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Packages
  • {{ package }}
  • {% endblock %} + +{% block content_title %} Package - {{ package }} {% endblock %} + +{% block content %} + +
    + {% if allversions %} + + + + + + + + + + + + {% for version in allversions %} + + + + + + + + + {% endfor %} +
    EpochVersionReleaseArchTypeRepositoriesHostsErrata
    {{ version.epoch }} {{ version.version }} {{ version.release }} {{ version.arch }} {{ version.get_packagetype_display }} Available from {{ version.repo_count }} Repositories Installed on {{ version.host_set.count }} Hosts Affected by {{ version.erratum_set.count }} Errata
    + {% else %} + No versions of this Package exist. + {% endif %} +
    + +{% endblock %} diff --git a/packages/templates/packages/package_name_list.html b/packages/templates/packages/package_name_list.html new file mode 100644 index 00000000..7288b519 --- /dev/null +++ b/packages/templates/packages/package_name_list.html @@ -0,0 +1,7 @@ +{% extends "objectlist.html" %} + +{% block page_title %}Packages{% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Packages
  • {% endblock %} + +{% block content_title %} Packages {% endblock %} diff --git a/packages/templates/packages/package_name_table.html b/packages/templates/packages/package_name_table.html new file mode 100644 index 00000000..39977d96 --- /dev/null +++ b/packages/templates/packages/package_name_table.html @@ -0,0 +1,17 @@ +{% load common %} + + + + + + + + + {% for packagename in object_list %} + + + + + {% endfor %} + +
    PackageVersions available
    {{ packagename }}{{ packagename.package_set.count }}
    diff --git a/packages/templates/packages/package_table.html b/packages/templates/packages/package_table.html index 39977d96..9163130b 100644 --- a/packages/templates/packages/package_table.html +++ b/packages/templates/packages/package_table.html @@ -2,15 +2,27 @@ - - + + + + + + + + + - {% for packagename in object_list %} + {% for package in object_list %} - - + + + + + + + {% endfor %} diff --git a/packages/urls.py b/packages/urls.py index c2f136dc..bc027807 100644 --- a/packages/urls.py +++ b/packages/urls.py @@ -22,6 +22,9 @@ app_name = 'packages' urlpatterns = [ - path('', views.package_list, name='package_list'), - path('/', views.package_detail, name='package_detail'), + path('', views.package_name_list, name='package_name_list'), + path('name/', views.package_name_list, name='package_name_list'), + path('name//', views.package_name_detail, name='package_name_detail'), + path('id/', views.package_list, name='package_list'), + path('id//', views.package_detail, name='package_detail'), ] diff --git a/packages/utils.py b/packages/utils.py index cb408993..bc7ed35b 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -16,17 +16,13 @@ # along with Patchman. If not, see import re -from defusedxml.lxml import _etree as etree -from django.conf import settings from django.core.exceptions import MultipleObjectsReturned from django.db import IntegrityError, DatabaseError, transaction -from util import bunzip2, get_url, download_url, get_sha1 -from packages.models import ErratumReference, PackageName, \ - Package, PackageUpdate -from arch.models import MachineArchitecture, PackageArchitecture -from patchman.signals import error_message, progress_info_s, progress_update_s +from arch.models import PackageArchitecture +from packages.models import PackageName, Package, PackageUpdate +from patchman.signals import error_message def find_evr(s): @@ -74,15 +70,24 @@ def find_version(s, epoch, release): return s[e:r] -def parse_package_string(pkg_str): - """ Parse a package string and return - name, epoch, ver, release, dist, arch +def parse_debian_package_string(pkg_str): + """ Parse a debian package string and return + name, epoch, ver, release, arch """ + parts = pkg_str.split('_') + name = parts[0] + full_version = parts[1] + arch = parts[2] + epoch, ver, rel = find_evr(full_version) + return name, epoch, ver, rel, None, arch - for suffix in ['rpm', 'deb']: - pkg_str = re.sub(f'.{suffix}$', '', pkg_str) - pkg_re = re.compile('(\S+)-(?:(\d*):)?(.*)-(~?\w+)[.+]?(~?\S+)?\.(\S+)$') # noqa - m = pkg_re.match(pkg_str) + +def parse_redhat_package_string(pkg_str): + """ Parse a redhat package string and return + name, epoch, ver, release, dist, arch + """ + rpm_pkg_re = re.compile(r'(\S+)-(?:(\d*):)?(.*)-(~?\w+)[.+]?(~?\S+)?\.(\S+)$') # noqa + m = rpm_pkg_re.match(pkg_str) if m: name, epoch, ver, rel, dist, arch = m.groups() else: @@ -94,162 +99,16 @@ def parse_package_string(pkg_str): return name, epoch, ver, rel, dist, arch -def update_errata(force=False): - """ Update CentOS errata from https://cefs.steve-meier.de/ - and mark packages that are security updates - """ - data = download_errata_checksum() - expected_checksum = parse_errata_checksum(data) - data = download_errata() - actual_checksum = get_sha1(data) - if actual_checksum != expected_checksum: - e = 'CEFS checksum did not match, skipping errata parsing' - error_message.send(sender=None, text=e) - else: - if data: - parse_errata(bunzip2(data), force) - - -def download_errata_checksum(): - """ Download CentOS errata checksum from https://cefs.steve-meier.de/ - """ - res = get_url('https://cefs.steve-meier.de/errata.latest.sha1') - return download_url(res, 'Downloading Errata Checksum:') - - -def download_errata(): - """ Download CentOS errata from https://cefs.steve-meier.de/ - """ - res = get_url('https://cefs.steve-meier.de/errata.latest.xml.bz2') - return download_url(res, 'Downloading CentOS Errata:') - - -def parse_errata_checksum(data): - """ Parse the errata checksum and return the bz2 checksum - """ - for line in data.decode('utf-8').splitlines(): - if line.endswith('errata.latest.xml.bz2'): - return line.split()[0] - - -def parse_errata(data, force): - """ Parse CentOS errata from https://cefs.steve-meier.de/ - """ - result = etree.XML(data) - errata_xml = result.findall('*') - elen = len(errata_xml) - ptext = f'Processing {elen!s} Errata:' - progress_info_s.send(sender=None, ptext=ptext, plen=elen) - for i, child in enumerate(errata_xml): - progress_update_s.send(sender=None, index=i + 1) - if not check_centos_release(child.findall('os_release')): - continue - e = parse_errata_tag(child.tag, child.attrib, force) - if e is not None: - parse_errata_children(e, child.getchildren()) - - -def parse_errata_tag(name, attribs, force): - """ Parse all tags that contain errata. If the erratum already exists, - we assume that it already has all refs, packages, releases and arches. - """ - e = None - if name.startswith('CE'): - issue_date = attribs['issue_date'] - references = attribs['references'] - synopsis = attribs['synopsis'] - if name.startswith('CEBA'): - etype = 'bugfix' - elif name.startswith('CESA'): - etype = 'security' - elif name.startswith('CEEA'): - etype = 'enhancement' - e = create_erratum(name=name, - etype=etype, - issue_date=issue_date, - synopsis=synopsis, - force=force) - if e is not None: - add_erratum_refs(e, references) - return e - - -def parse_errata_children(e, children): - """ Parse errata children to obtain architecture, release and packages - """ - for c in children: - if c.tag == 'os_arch': - m_arches = MachineArchitecture.objects.all() - with transaction.atomic(): - m_arch, c = m_arches.get_or_create(name=c.text) - e.arches.add(m_arch) - elif c.tag == 'os_release': - from operatingsystems.models import OSGroup - osgroups = OSGroup.objects.all() - osgroup_name = f'CentOS {c.text!s}' - with transaction.atomic(): - osgroup, c = osgroups.get_or_create(name=osgroup_name) - e.releases.add(osgroup) - elif c.tag == 'packages': - pkg_str = c.text.replace('.rpm', '') - pkg_re = re.compile('(\S+)-(?:(\d*):)?(.*)-(~?\w+)[.+]?(~?\S+)?\.(\S+)$') # noqa - m = pkg_re.match(pkg_str) - if m: - name, epoch, ver, rel, dist, arch = m.groups() - else: - e = 'Error parsing errata: ' - e += f'could not parse package "{pkg_str!s}"' - error_message.send(sender=None, text=e) - continue - if dist: - rel = f'{rel!s}.{dist!s}' - p_type = Package.RPM - pkg = get_or_create_package(name, epoch, ver, rel, arch, p_type) - e.packages.add(pkg) - - -def check_centos_release(releases_xml): - """ Check if we care about the release that the erratum affects +def parse_package_string(pkg_str): + """ Parse a package string and return + name, epoch, ver, release, dist, arch """ - releases = set() - for release in releases_xml: - releases.add(int(release.text)) - if hasattr(settings, 'MIN_CENTOS_RELEASE') and \ - isinstance(settings.MIN_CENTOS_RELEASE, int): - min_release = settings.MIN_CENTOS_RELEASE + if pkg_str.endswith('.deb'): + return parse_debian_package_string(pkg_str.removesuffix('.deb')) + elif pkg_str.endswith('.rpm'): + return parse_redhat_package_string(pkg_str.removesuffix('.rpm')) else: - # defaults to CentOS 6 - min_release = 6 - wanted_release = False - for release in releases: - if release >= min_release: - wanted_release = True - return wanted_release - - -def create_erratum(name, etype, issue_date, synopsis, force=False): - """ Create an Erratum object. Returns the object or None if it already - exists. To force update the erratum, set force=True - """ - from packages.models import Erratum - errata = Erratum.objects.all() - with transaction.atomic(): - e, c = errata.get_or_create(name=name, - etype=etype, - issue_date=issue_date, - synopsis=synopsis) - if c or force: - return e - - -def add_erratum_refs(e, references): - """ Add references to an Erratum object - """ - for reference in references.split(' '): - erratarefs = ErratumReference.objects.all() - with transaction.atomic(): - er, c = erratarefs.get_or_create(url=reference) - e.references.add(er) + return parse_redhat_package_string(pkg_str) def get_or_create_package(name, epoch, version, release, arch, p_type): @@ -359,32 +218,19 @@ def get_or_create_package_update(oldpackage, newpackage, security): return update -def mark_errata_security_updates(): - """ For each set of erratum packages, modify any PackageUpdate that - should be marked as a security update. +def get_matching_packages(name, epoch, version, release, p_type): + """ Get packges matching certain criteria + Returns the matching packages or None """ - package_updates = PackageUpdate.objects.all() - from packages.models import Erratum - errata = Erratum.objects.all() - elen = Erratum.objects.count() - ptext = f'Scanning {elen!s} Errata:' - progress_info_s.send(sender=None, ptext=ptext, plen=elen) - for i, erratum in enumerate(errata): - progress_update_s.send(sender=None, index=i + 1) - if erratum.etype == 'security': - for package in erratum.packages.all(): - affected_updates = package_updates.filter( - newpackage=package, - security=False - ) - for affected_update in affected_updates: - if not affected_update.security: - affected_update.security = True - try: - with transaction.atomic(): - affected_update.save() - except IntegrityError as e: - error_message.send(sender=None, text=e) - # a version of this update already exists that is - # marked as a security update, so delete this one - affected_update.delete() + try: + package_name = PackageName.objects.get(name=name) + except PackageName.DoesNotExist: + return + if package_name: + packages = Package.objects.filter( + name=package_name, + version=version, + release=release, + packagetype=p_type, + ) + return packages diff --git a/packages/views.py b/packages/views.py index 06c7106e..be1b228f 100644 --- a/packages/views.py +++ b/packages/views.py @@ -23,17 +23,74 @@ from rest_framework import viewsets from util.filterspecs import Filter, FilterBar -from packages.models import PackageName, Package, PackageUpdate, \ - Erratum, ErratumReference +from packages.models import PackageName, Package, PackageUpdate from arch.models import PackageArchitecture -from packages.serializers import PackageNameSerializer, \ - PackageSerializer, PackageUpdateSerializer, ErratumSerializer, \ - ErratumReferenceSerializer +from packages.serializers import PackageNameSerializer, PackageSerializer, PackageUpdateSerializer @login_required def package_list(request): + packages = Package.objects.select_related() + if 'arch' in request.GET: + packages = packages.filter(arch=int(request.GET['arch'])).distinct() + + if 'packagetype' in request.GET: + packages = packages.filter(packagetype=request.GET['packagetype']).distinct() + + if 'erratum_id' in request.GET: + packages = packages.filter(erratum=request.GET['erratum_id']).distinct() + + if 'host' in request.GET: + packages = packages.filter(host__hostname=request.GET['host']).distinct() + + if 'cve_id' in request.GET: + packages = packages.filter(erratum__cves__cve_id=request.GET['cve_id']).distinct() + + if 'mirror_id' in request.GET: + packages = packages.filter(mirror=request.GET['mirror_id']).distinct() + + if 'module_id' in request.GET: + packages = packages.filter(module=request.GET['module_id']).distinct() + + if 'search' in request.GET: + terms = request.GET['search'].lower() + query = Q() + for term in terms.split(' '): + q = Q(name__name__icontains=term) + query = query & q + packages = packages.filter(query) + else: + terms = '' + + page_no = request.GET.get('page') + paginator = Paginator(packages, 50) + + try: + page = paginator.page(page_no) + except PageNotAnInteger: + page = paginator.page(1) + except EmptyPage: + page = paginator.page(paginator.num_pages) + + filter_list = [] + filter_list.append( + Filter(request, 'arch', PackageArchitecture.objects.all())) +# Disabled due to being a huge slowdown +# filter_list.append( +# Filter( +# request, 'packagetype', +# Package.objects.values_list('packagetype', flat=True).distinct())) + filter_bar = FilterBar(request, filter_list) + + return render(request, + 'packages/package_list.html', + {'page': page, + 'filter_bar': filter_bar, + 'terms': terms}) + +@login_required +def package_name_list(request): packages = PackageName.objects.select_related() if 'arch' in request.GET: @@ -75,22 +132,29 @@ def package_list(request): filter_bar = FilterBar(request, filter_list) return render(request, - 'packages/package_list.html', + 'packages/package_name_list.html', {'page': page, 'filter_bar': filter_bar, - 'terms': terms}, ) + 'terms': terms, + 'table_template': 'packages/package_name_table.html'}) @login_required -def package_detail(request, packagename): +def package_detail(request, package_id): + package = get_object_or_404(Package, id=package_id) + return render(request, + 'packages/package_detail.html', + {'package': package}) + +@login_required +def package_name_detail(request, packagename): package = get_object_or_404(PackageName, name=packagename) allversions = Package.objects.select_related().filter(name=package.id) - return render(request, - 'packages/package_detail.html', + 'packages/package_name_detail.html', {'package': package, - 'allversions': allversions}, ) + 'allversions': allversions}) class PackageNameViewSet(viewsets.ModelViewSet): @@ -125,19 +189,3 @@ class PackageUpdateViewSet(viewsets.ModelViewSet): queryset = PackageUpdate.objects.all() serializer_class = PackageUpdateSerializer filterset_fields = ['oldpackage', 'newpackage', 'security'] - - -class ErratumViewSet(viewsets.ModelViewSet): - """ - API endpoint that allows errata to be viewed or edited. - """ - queryset = Erratum.objects.all() - serializer_class = ErratumSerializer - - -class ErratumReferenceViewSet(viewsets.ModelViewSet): - """ - API endpoint that allows erratum references to be viewed or edited. - """ - queryset = ErratumReference.objects.all() - serializer_class = ErratumReferenceSerializer diff --git a/patchman/settings.py b/patchman/settings.py index 6bf681db..27198cc6 100644 --- a/patchman/settings.py +++ b/patchman/settings.py @@ -87,11 +87,13 @@ LOCAL_APPS = [ 'arch.apps.ArchConfig', 'domains.apps.DomainsConfig', + 'errata.apps.ErrataConfig', 'hosts.apps.HostsConfig', 'modules.apps.ModulesConfig', 'operatingsystems.apps.OperatingsystemsConfig', 'packages.apps.PackagesConfig', 'repos.apps.ReposConfig', + 'security.apps.SecurityConfig', 'reports.apps.ReportsConfig', 'util.apps.UtilConfig', ] diff --git a/patchman/static/js/expandable-text.js b/patchman/static/js/expandable-text.js new file mode 100644 index 00000000..0f5861ce --- /dev/null +++ b/patchman/static/js/expandable-text.js @@ -0,0 +1,8 @@ +document.addEventListener('DOMContentLoaded', function() { + const expandableTexts = document.querySelectorAll('.expandable-text'); + expandableTexts.forEach(text => { + text.addEventListener('click', function() { + this.textContent = this.dataset.fullText; + }); + }); +}); diff --git a/patchman/urls.py b/patchman/urls.py index c5ffa847..e8576c7a 100644 --- a/patchman/urls.py +++ b/patchman/urls.py @@ -25,10 +25,12 @@ from arch import views as arch_views from domains import views as domain_views +from errata import views as errata_views from hosts import views as host_views from operatingsystems import views as os_views from packages import views as package_views from repos import views as repo_views +from security import views as security_views router = routers.DefaultRouter() router.register(r'package-architecture', arch_views.PackageArchitectureViewSet) @@ -36,13 +38,14 @@ router.register(r'domain', domain_views.DomainViewSet) router.register(r'host', host_views.HostViewSet) router.register(r'host-repo', host_views.HostRepoViewSet) -router.register(r'os', os_views.OSViewSet) -router.register(r'os-group', os_views.OSGroupViewSet) +router.register(r'os-variant', os_views.OSVariantViewSet) +router.register(r'os-release', os_views.OSReleaseViewSet) router.register(r'package-name', package_views.PackageNameViewSet) router.register(r'package', package_views.PackageViewSet) router.register(r'package-update', package_views.PackageUpdateViewSet) -router.register(r'erratum', package_views.ErratumViewSet) -router.register(r'erratum-reference', package_views.ErratumReferenceViewSet) +router.register(r'cve', security_views.CVEViewSet) +router.register(r'erratum', errata_views.ErratumViewSet) +router.register(r'erratum-reference', errata_views.ErratumReferenceViewSet) router.register(r'repo', repo_views.RepositoryViewSet) router.register(r'mirror', repo_views.MirrorViewSet) router.register(r'mirror-package', repo_views.MirrorPackageViewSet) @@ -55,11 +58,13 @@ path('api/', include(router.urls)), path('api-auth/', include('rest_framework.urls', namespace='rest_framework')), # noqa path('', include('util.urls', namespace='util')), + path('errata/', include('errata.urls', namespace='errata')), path('reports/', include('reports.urls', namespace='reports')), path('hosts/', include('hosts.urls', namespace='hosts')), path('packages/', include('packages.urls', namespace='packages')), path('modules/', include('modules.urls', namespace='modules')), path('repos/', include('repos.urls', namespace='repos')), + path('security/', include('security.urls', namespace='security')), path('os/', include('operatingsystems.urls', namespace='operatingsystems')), # noqa ] diff --git a/reports/models.py b/reports/models.py index c7aecb7b..95b954e2 100644 --- a/reports/models.py +++ b/reports/models.py @@ -22,7 +22,7 @@ from hosts.models import Host from arch.models import MachineArchitecture -from operatingsystems.models import OS, OSGroup +from operatingsystems.models import OSVariant, OSRelease from domains.models import Domain from patchman.signals import error_message, info_message @@ -105,27 +105,25 @@ def parse(self, data, meta): def process(self, find_updates=True, verbose=False): """ Process a report and extract os, arch, domain, packages, repos etc """ - if self.os and self.kernel and self.arch and not self.processed: - osgroup_codename = None + osrelease_codename = None match = re.match(r'(.*) \((.*)\)', self.os) if match: os_name = match.group(1) - osgroup_codename = match.group(2) + osrelease_codename = match.group(2) else: os_name = self.os - oses = OS.objects.all() + with transaction.atomic(): - os, c = oses.get_or_create(name=os_name) - if osgroup_codename: - osgroups = OSGroup.objects.filter(codename=osgroup_codename) - if osgroups.count() == 1: - os.osgroup = osgroups[0] + m_arch, created = MachineArchitecture.objects.get_or_create(name=self.arch) - machine_arches = MachineArchitecture.objects.all() with transaction.atomic(): - arch, c = machine_arches.get_or_create(name=self.arch) - os.arch = arch + osvariant, created = OSVariant.objects.get_or_create(name=os_name, arch=m_arch) + + if osrelease_codename: + osreleases = OSRelease.objects.filter(codename=osrelease_codename) + if osreleases.count() == 1: + osvariant.osrelease = osreleases[0] if not self.domain: self.domain = 'unknown' @@ -139,14 +137,13 @@ def process(self, find_updates=True, verbose=False): except herror: self.host = self.report_ip - hosts = Host.objects.all() with transaction.atomic(): - host, c = hosts.get_or_create( + host, c = Hosts.objects.get_or_create( hostname=self.host, defaults={ 'ipaddress': self.report_ip, 'arch': arch, - 'os': os, + 'osvariant': osvariant, 'domain': domain, 'lastreport': self.created, }) @@ -154,7 +151,7 @@ def process(self, find_updates=True, verbose=False): host.ipaddress = self.report_ip host.kernel = self.kernel host.arch = arch - host.os = os + host.osvariant = osvariant host.domain = domain host.lastreport = self.created host.tags = self.tags diff --git a/repos/templates/repos/mirror_table.html b/repos/templates/repos/mirror_table.html index 85ea4011..32e9b57c 100644 --- a/repos/templates/repos/mirror_table.html +++ b/repos/templates/repos/mirror_table.html @@ -9,7 +9,7 @@ - + @@ -20,13 +20,17 @@ - + - + diff --git a/repos/templates/repos/repo_detail.html b/repos/templates/repos/repo_detail.html index be42f3cf..175f2178 100644 --- a/repos/templates/repos/repo_detail.html +++ b/repos/templates/repos/repo_detail.html @@ -14,7 +14,7 @@
  • Details
  • Mirrors
  • Hosts with this Repository
  • -
  • OS Groups with this Repository
  • +
  • OS Releases with this Repository
  • Modules in this Repository
  • @@ -60,9 +60,9 @@ -
    +
    - {% gen_table repo.osgroup_set.all.distinct %} + {% gen_table repo.osrelease_set.all.distinct %}
    diff --git a/repos/views.py b/repos/views.py index e999f954..f566a4f7 100644 --- a/repos/views.py +++ b/repos/views.py @@ -29,7 +29,7 @@ from util.filterspecs import Filter, FilterBar from hosts.models import HostRepo from repos.models import Repository, Mirror, MirrorPackage -from operatingsystems.models import OSGroup +from operatingsystems.models import OSRelease from arch.models import MachineArchitecture from repos.forms import EditRepoForm, LinkRepoForm, CreateRepoForm, \ EditMirrorForm @@ -48,8 +48,8 @@ def repo_list(request): if 'arch' in request.GET: repos = repos.filter(arch=request.GET['arch']) - if 'osgroup' in request.GET: - repos = repos.filter(osgroup=request.GET['osgroup']) + if 'osrelease' in request.GET: + repos = repos.filter(osrelease=request.GET['osrelease']) if 'security' in request.GET: security = request.GET['security'] == 'True' @@ -96,7 +96,7 @@ def repo_list(request): MachineArchitecture.objects.all())) filter_list.append(Filter(request, 'enabled', {False: 'No', True: 'Yes'})) filter_list.append(Filter(request, 'security', {False: 'No', True: 'Yes'})) - filter_list.append(Filter(request, 'osgroup', OSGroup.objects.all())) + filter_list.append(Filter(request, 'osrelease', OSRelease.objects.all())) filter_bar = FilterBar(request, filter_list) return render(request, diff --git a/sbin/patchman b/sbin/patchman index 6fb90768..c947b393 100755 --- a/sbin/patchman +++ b/sbin/patchman @@ -34,16 +34,15 @@ from taggit.models import TaggedItem from hosts.models import Host from packages.models import Package, PackageName, PackageUpdate -from packages.utils import update_errata, mark_errata_security_updates +from errata.utils import update_errata, mark_errata_security_updates from repos.models import Repository from modules.models import Module from arch.models import PackageArchitecture, MachineArchitecture from reports.models import Report -from util import print_nocr, create_pbar, update_pbar, \ - set_verbosity, get_verbosity -from patchman.signals import \ - info_message, warning_message, error_message, debug_message, \ - progress_info_s, progress_update_s +from security.utils import update_cves, update_cwes +from util import print_nocr, create_pbar, update_pbar, set_verbosity, get_verbosity, tz_aware_datetime +from patchman.signals import info_message, warning_message, error_message, debug_message, progress_info_s, \ + progress_update_s def get_host(host=None, action='Performing action'): @@ -180,9 +179,10 @@ def clean_arches(): a.delete() update_pbar(i + 1) - marches = MachineArchitecture.objects.filter(host__isnull=True, - repository__isnull=True, - erratum__isnull=True) + marches = MachineArchitecture.objects.filter( + host__isnull=True, + repository__isnull=True, + ) mlen = marches.count() if mlen == 0: @@ -293,7 +293,7 @@ def host_updates_alt(host=None): """ updated_hosts = [] hosts = get_hosts(host, 'Finding updates') - ts = datetime.now().replace(microsecond=0) + ts = tz_aware_datetime(datetime.now().replace(microsecond=0)) for host in hosts: info_message.send(sender=None, text=str(host)) if host not in updated_hosts: @@ -596,14 +596,11 @@ def collect_args(): '-D', '--diff', metavar=('hostA', 'hostB'), nargs=2, help='Show differences between two Hosts in diff-like output') parser.add_argument( - '-ue', '--update-errata', action='store_true', - help='Update CentOS errata from https://cefs.steve-meier.de/') + '-e', '--update-errata', action='store_true', + help='Update Errata') parser.add_argument( - '-me', '--mark-errata-security-updates', action='store_true', - help='Mark updates as security updates based on downloaded errata') - parser.add_argument( - '-e', '--errata', action='store_true', - help='Download CentOS errata from https://cefs.steve-meier.de/') + '-v', '--update-cves', action='store_true', + help='Update CVEs from https://cve.org') return parser @@ -678,16 +675,14 @@ def process_args(args): if args.dns_checks: dns_checks(args.host) showhelp = False - if args.errata: - update_errata(args.force) - mark_errata_security_updates() - showhelp = False if args.update_errata: - update_errata(args.force) - showhelp = False - if args.mark_errata_security_updates: + update_errata() mark_errata_security_updates() showhelp = False + if args.update_cves: + update_cves() + update_cwes() + showhelp = False return showhelp diff --git a/security/admin.py b/security/admin.py new file mode 100644 index 00000000..9c90a8fe --- /dev/null +++ b/security/admin.py @@ -0,0 +1,23 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.contrib import admin +from security.models import CWE, CVSS, CVE + + +admin.site.register(CWE) +admin.site.register(CVSS) +admin.site.register(CVE) diff --git a/security/apps.py b/security/apps.py new file mode 100644 index 00000000..6e578334 --- /dev/null +++ b/security/apps.py @@ -0,0 +1,21 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.apps import AppConfig + + +class SecurityConfig(AppConfig): + name = 'security' diff --git a/security/managers.py b/security/managers.py new file mode 100644 index 00000000..4dfcffaf --- /dev/null +++ b/security/managers.py @@ -0,0 +1,22 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.db import models + + +class CVEManager(models.Manager): + def get_queryset(self): + return super().get_queryset().select_related() diff --git a/security/migrations/0001_initial.py b/security/migrations/0001_initial.py new file mode 100644 index 00000000..5655f8b0 --- /dev/null +++ b/security/migrations/0001_initial.py @@ -0,0 +1,48 @@ +# Generated by Django 4.2.18 on 2025-02-08 20:40 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='CVSS', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('score', models.DecimalField(decimal_places=1, max_digits=3, null=True)), + ('severity', models.CharField(blank=True, max_length=255, null=True)), + ('version', models.DecimalField(decimal_places=1, max_digits=2)), + ('vector_string', models.CharField(blank=True, max_length=255, null=True)), + ], + ), + migrations.CreateModel( + name='CWE', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('cwe_id', models.CharField(max_length=255, unique=True)), + ('name', models.CharField(blank=True, max_length=255, null=True)), + ('description', models.CharField(blank=True, max_length=65535, null=True)), + ], + ), + migrations.CreateModel( + name='CVE', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('cve_id', models.CharField(max_length=255, unique=True)), + ('title', models.CharField(blank=True, max_length=255, null=True)), + ('description', models.CharField(max_length=65535)), + ('reserved_date', models.DateTimeField(blank=True, null=True)), + ('published_date', models.DateTimeField(blank=True, null=True)), + ('rejected_date', models.DateTimeField(blank=True, null=True)), + ('updated_date', models.DateTimeField(blank=True, null=True)), + ('cvss_scores', models.ManyToManyField(blank=True, to='security.cvss')), + ('cwes', models.ManyToManyField(blank=True, to='security.cwe')), + ], + ), + ] diff --git a/security/migrations/0002_alter_cve_options.py b/security/migrations/0002_alter_cve_options.py new file mode 100644 index 00000000..91e6b620 --- /dev/null +++ b/security/migrations/0002_alter_cve_options.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.19 on 2025-02-11 03:51 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('security', '0001_initial'), + ] + + operations = [ + migrations.AlterModelOptions( + name='cve', + options={'ordering': ('cve_id',)}, + ), + ] diff --git a/security/migrations/__init__.py b/security/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/security/models.py b/security/models.py new file mode 100644 index 00000000..a957adb9 --- /dev/null +++ b/security/models.py @@ -0,0 +1,162 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import json +import re + +from django.db import models +from django.urls import reverse + +from security.managers import CVEManager +from util import get_url, download_url, tz_aware_datetime, error_message + + +class CWE(models.Model): + + cwe_id = models.CharField(max_length=255, unique=True) + name = models.CharField(max_length=255, blank=True, null=True) + description = models.CharField(max_length=65535, blank=True, null=True) + + def __str__(self): + return f'{self.cwe_id} - {self.name}' + + def get_absolute_url(self): + return reverse('security:cwe_detail', args=[self.cwe_id]) + + @property + def int_id(self): + return int(self.cwe_id.split('-')[1]) + + def download_cwe_data(self): + int_id = self.int_id + cwe_url = f'https://cwe-api.mitre.org/api/v1/cwe/{int_id}' + res = get_url(cwe_url) + data = download_url(res, f'Downloading {self.cwe_id} data') + cwe_json = json.loads(data) + if cwe_json == 'at least one CWE not found': + return + cwe = cwe_json[0] + if cwe.get('Type').endswith('weakness'): + weakness_url = f'https://cwe-api.mitre.org/api/v1/cwe/weakness/{int_id}' + res = get_url(weakness_url) + data = download_url(res, f'Downloading {self.cwe_id} weakness data') + weakness_json = json.loads(data) + for weakness in weakness_json.get('Weaknesses'): + if int(weakness.get('ID')) == int_id: + self.name = weakness.get('Name') + self.description = weakness.get('Description') + self.save() + + +class CVSS(models.Model): + + score = models.DecimalField(max_digits=3, decimal_places=1, null=True) + severity = models.CharField(max_length=255, blank=True, null=True) + version = models.DecimalField(max_digits=2, decimal_places=1) + vector_string = models.CharField(max_length=255, blank=True, null=True) + + def __str__(self): + return f'{self.score} ({self.severity}) [{self.vector_string}]' + + +class CVE(models.Model): + + cve_id = models.CharField(max_length=255, unique=True) + title = models.CharField(max_length=255, blank=True, null=True) + description = models.CharField(max_length=65535) + reserved_date = models.DateTimeField(blank=True, null=True) + published_date = models.DateTimeField(blank=True, null=True) + rejected_date = models.DateTimeField(blank=True, null=True) + updated_date = models.DateTimeField(blank=True, null=True) + cwes = models.ManyToManyField(CWE, blank=True) + cvss_scores = models.ManyToManyField(CVSS, blank=True) + + objects = CVEManager() + + class Meta: + ordering = ('cve_id',) + + def __str__(self): + return self.cve_id + + def get_absolute_url(self): + return reverse('security:cve_detail', args=[self.cve_id]) + + def download_cve_data(self): + cve_url = f'https://cveawg.mitre.org/api/cve/{self.cve_id}' + res = get_url(cve_url) + if res.status_code == 404: + error_message.send(sender=None, text=f'404 - Skipping {self.cve_id}') + return + data = download_url(res, f'Downloading {self.cve_id} data') + cve_json = json.loads(data) + self.parse_cve_data(cve_json) + + def parse_cve_data(self, cve_json): + cve_metadata = cve_json.get('cveMetadata') + reserved_date = cve_metadata.get('dateReserved') + if reserved_date: + self.reserved_date = tz_aware_datetime(cve_metadata.get('dateReserved')) + rejected_date = cve_metadata.get('dateRejected') + if rejected_date: + self.rejected_date = tz_aware_datetime(rejected_date) + published_date = cve_metadata.get('datePublished') + if published_date: + self.published_date = tz_aware_datetime(cve_metadata.get('datePublished')) + updated_date = cve_metadata.get('dateUpdated') + if updated_date: + self.updated_date = tz_aware_datetime(cve_metadata.get('dateUpdated')) + cna_container = cve_json.get('containers').get('cna') + title = cna_container.get('title') + if not title: + product = cna_container.get('product') + descriptions = cna_container.get('descriptions') + if descriptions: + self.description = descriptions[0].get('value') + problem_types = cna_container.get('problemTypes', []) + for problem_type in problem_types: + descriptions = problem_type.get('descriptions') + if descriptions: + for description in descriptions: + cwe_description = description.get('description') + if description.get('type') == 'CWE': + cwe_id = description.get('cweId') + if cwe_id: + cwe, created = CWE.objects.get_or_create(cwe_id=cwe_id) + self.cwes.add(cwe) + cwe_ids = re.findall(r'CWE-\d+', cwe_description) + for cwe_id in cwe_ids: + cwe, created = CWE.objects.get_or_create(cwe_id=cwe_id) + self.cwes.add(cwe) + if not title: + if product and cwe_description: + self.title = f'{product} - {cwe_description}' + else: + self.title = '' + metrics = cna_container.get('metrics') + if metrics: + for metric in metrics: + if metric.get('format') == 'CVSS': + for key, value in metric.items(): + if key.startswith('cvss'): + cvss_score, created = CVSS.objects.get_or_create( + score=value.get('baseScore'), + severity=value.get('baseSeverity'), + version=value.get('version'), + vector_string=value.get('vectorString'), + ) + self.cvss_scores.add(cvss_score) + self.save() diff --git a/security/serializers.py b/security/serializers.py new file mode 100644 index 00000000..c66a72ad --- /dev/null +++ b/security/serializers.py @@ -0,0 +1,32 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from rest_framework import serializers + +from security.models import CVE, CWE + + +class CWESerializer(serializers.HyperlinkedModelSerializer): + class Meta: + model = CWE + fields = ('cwe_id', 'title', 'description') + + +class CVESerializer(serializers.HyperlinkedModelSerializer): + class Meta: + model = CVE + fields = ('cve_id', 'title', 'description', 'cvss_score', 'cwe', + 'registered_date', 'published_date', 'updated_date') diff --git a/security/templates/security/cve_detail.html b/security/templates/security/cve_detail.html new file mode 100644 index 00000000..27323976 --- /dev/null +++ b/security/templates/security/cve_detail.html @@ -0,0 +1,83 @@ +{% extends "base.html" %} + +{% block page_title %}CVE - {{ cve }} {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Security
  • CVEs
  • {{ cve }}
  • {% endblock %} + +{% block content_title %} CVE - {{ cve }} {% endblock %} + +{% block content %} + + + +
    +
    +
    +
    PackageVersions availablePackageEpochVersionReleaseArchTypeRepositoriesHostsErrata
    {{ packagename }}{{ packagename.package_set.count }}{{ package }} {{ package.epoch }} {{ package.version }} {{ package.release }} {{ package.arch }} {{ package.get_packagetype_display }} Available from {{ package.repo_count }} Repositories Installed on {{ package.host_set.count }} Hosts Affected by {{ package.erratum_set.count }} Errata
    Refresh Mirrorlist/Metalink Last Access OKTimestampTimestamp Checksum Delete Edit
    {{ mirror.id }} {{ mirror.url|truncatechars:25 }}{{ mirror.packages.count }} + {% if not mirror.mirrorlist %} + {{ mirror.packages.count }} + {% endif %} + {% yes_no_img mirror.enabled 'Enabled' 'Not Enabled' %} {% yes_no_img mirror.refresh 'Yes' 'No' %} {% yes_no_img mirror.mirrorlist 'Yes' 'No' %} {% yes_no_img mirror.last_access_ok 'Yes' 'No' %} {{ mirror.timestamp }}{{ mirror.file_checksum|truncatechars:16 }}{% if not mirror.mirrorlist %}{{ mirror.file_checksum|truncatechars:16 }}{% endif %} {% bootstrap_icon "trash" %} Delete this Mirror {% bootstrap_icon "edit" %} Edit this Mirror
    + + + + + + + + + + + + + + + + + + + + + + + + + +
    CVE ID{{ cve.cve_id }}
    Title{{ cve.title }}
    Description{{ cve.description }}
    Reserved{{ cve.reserved_date|default_if_none:'' }}
    Rejected{{ cve.rejected_date|default_if_none:'' }}
    Published{{ cve.published_date|default_if_none:'' }}
    Updated{{ cve.updated_date|default_if_none:'' }}
    CVSS Scores + {% for score in cve.cvss_scores.all %} + {{ score.score }} - {{ score.severity }} (CVSS {{ score.version }})
    + {% endfor %} +
    CWEs + {% for cwe in cve.cwes.all %} + {{ cwe.cwe_id }} - {{ cwe.name }}
    + {% endfor %} +
    Affected Packages{{ packages|length }}
    Errata{{ cve.erratum_set.count }}
    OSes Affected + {% for osrelease in osreleases %} + {{ osrelease }}
    + {% endfor %} +
    URLs + + + + + {% for reference in cve.erratum_set.references.all %} + + + + + {% endfor %} +
    NIST
    MITRE
    osv.dev
    {{ reference.er_type }}{{reference.url}}
    +
    + + +
    +
    + {% for package in packages %} + + {{ package }} + + {% endfor %} +
    +
    + + +{% endblock %} diff --git a/security/templates/security/cve_list.html b/security/templates/security/cve_list.html new file mode 100644 index 00000000..a9027525 --- /dev/null +++ b/security/templates/security/cve_list.html @@ -0,0 +1,7 @@ +{% extends "objectlist.html" %} + +{% block page_title %}CVEs{% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Security
  • CVEs
  • {% endblock %} + +{% block content_title %} CVEs {% endblock %} diff --git a/security/templates/security/cve_table.html b/security/templates/security/cve_table.html new file mode 100644 index 00000000..81259636 --- /dev/null +++ b/security/templates/security/cve_table.html @@ -0,0 +1,37 @@ +{% load common %} + + + + + + + + + + + + + + + + + {% for cve in object_list %} + + + + + + + + + + + + + {% endfor %} + +
    CVE IDLinksDescriptionCVSS ScoresCWEsReservedRejectedPublishedUpdatedErrata
    {{ cve.cve_id }} + [NIST] + [MITRE] + [osv.dev] + {{ cve.description|truncatechars:60 }}{% for score in cve.cvss_scores.all %} {{ score.score }} {% endfor %}{% for cwe in cve.cwes.all %} {{ cwe.cwe_id }} {% endfor %}{{ cve.reserved_date|date|default_if_none:'' }}{{ cve.rejected_date|date|default_if_none:'' }}{{ cve.published_date|date|default_if_none:'' }}{{ cve.updated_date|date|default_if_none:'' }}{{ cve.erratum_set.count }}
    diff --git a/security/templates/security/cwe_detail.html b/security/templates/security/cwe_detail.html new file mode 100644 index 00000000..5bab2a0d --- /dev/null +++ b/security/templates/security/cwe_detail.html @@ -0,0 +1,20 @@ +{% extends "base.html" %} + +{% block page_title %}CWE - {{ cwe }} {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Security
  • CWEs
  • {{ cwe }}
  • {% endblock %} + +{% block content_title %} CWE - {{ cwe }} {% endblock %} + +{% block content %} + +
    + + + + + +
    CWE ID{{ cwe.cwe_id }}
    Name{{ cwe.name }}
    Description{{ cwe.description }}
    Affected CVEs{{ cwe.cve_set.count }}
    +
    + +{% endblock %} diff --git a/security/templates/security/cwe_list.html b/security/templates/security/cwe_list.html new file mode 100644 index 00000000..de74683e --- /dev/null +++ b/security/templates/security/cwe_list.html @@ -0,0 +1,7 @@ +{% extends "objectlist.html" %} + +{% block page_title %}CWEs{% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Security
  • CWEs
  • {% endblock %} + +{% block content_title %} CWEs {% endblock %} diff --git a/security/templates/security/cwe_table.html b/security/templates/security/cwe_table.html new file mode 100644 index 00000000..85ccd118 --- /dev/null +++ b/security/templates/security/cwe_table.html @@ -0,0 +1,21 @@ +{% load common %} + + + + + + + + + + + {% for cwe in object_list %} + + + + + + + {% endfor %} + +
    CWE IDNameDescriptionCVEs
    {{ cwe.cwe_id }}{{ cwe.name }}{{ cwe.description|truncatechars:120 }}{{ cwe.cve_set.count }}
    diff --git a/security/templates/security/security_landing.html b/security/templates/security/security_landing.html new file mode 100644 index 00000000..ad0f175a --- /dev/null +++ b/security/templates/security/security_landing.html @@ -0,0 +1,19 @@ +{% extends "base.html" %} + +{% block page_title %} Security {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Security
  • {% endblock %} + +{% block content_title %} Security {% endblock %} + +{% block content %} + + + +{% endblock %} diff --git a/security/urls.py b/security/urls.py new file mode 100644 index 00000000..b3bf3506 --- /dev/null +++ b/security/urls.py @@ -0,0 +1,29 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.urls import path + +from security import views + +app_name = 'security' + +urlpatterns = [ + path('', views.security_landing, name='security_landing'), + path('cves', views.cve_list, name='cve_list'), + path('cves/', views.cve_detail, name='cve_detail'), + path('cwes', views.cwe_list, name='cwe_list'), + path('cwes/', views.cwe_detail, name='cwe_detail'), +] diff --git a/security/utils.py b/security/utils.py new file mode 100644 index 00000000..c224f5d3 --- /dev/null +++ b/security/utils.py @@ -0,0 +1,48 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from security.models import CVE, CWE + + +def get_cve_reference(cve_id): + """ Given a CVE ID, return a dictionary with the URL to the CVE record. + """ + url = f'https://www.cve.org/CVERecord?id={cve_id}' + return {'er_type': 'CVE', 'url': url} + + +def get_or_create_cve(cve_id): + """ Given a CVE ID, get or create a CVE object. + """ + cve, created = CVE.objects.get_or_create(cve_id=cve_id) + return cve + + +def update_cves(): + """ Download the latest CVE data from the CVE API. + e.g. https://cveawg.mitre.org/api/cve/CVE-2024-1234 + """ + for cve in CVE.objects.all(): + cve.download_cve_data() + + +def update_cwes(): + """ Download the latest CWEs from the CWE API. + e.g. https://cwe-api.mitre.org/api/v1/cwe/74,79 + https://cwe-api.mitre.org/api/v1/cwe/weakness/79 + """ + for cwe in CWE.objects.all(): + cwe.download_cwe_data() diff --git a/security/views.py b/security/views.py new file mode 100644 index 00000000..3ab25cc9 --- /dev/null +++ b/security/views.py @@ -0,0 +1,140 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.shortcuts import get_object_or_404, render +from django.contrib.auth.decorators import login_required +from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger +from django.db.models import Q + +from rest_framework import viewsets + +from packages.models import Package +from operatingsystems.models import OSRelease +from security.models import CVE, CWE +from security.serializers import CVESerializer, CWESerializer + + +@login_required +def cwe_list(request): + cwes = CWE.objects.select_related() + + if 'search' in request.GET: + terms = request.GET['search'].lower() + query = Q() + for term in terms.split(' '): + q = Q(cwe_id__icontains=term) | \ + Q(name__icontains=term) | \ + Q(description__icontains=term) + query = query & q + cwes = cwes.filter(query) + else: + terms = '' + + page_no = request.GET.get('page') + paginator = Paginator(cwes, 50) + + try: + page = paginator.page(page_no) + except PageNotAnInteger: + page = paginator.page(1) + except EmptyPage: + page = paginator.page(paginator.num_pages) + + return render(request, + 'security/cwe_list.html', + {'page': page, + 'terms': terms}) + + +@login_required +def cwe_detail(request, cwe_id): + cwe = get_object_or_404(CWE, cwe_id=cwe_id) + return render(request, + 'security/cwe_detail.html', + {'cwe': cwe}) + + +@login_required +def cve_list(request): + cves = CVE.objects.select_related() + + if 'erratum_id' in request.GET: + cves = cves.filter(erratum=int(request.GET['erratum_id'])) + + if 'reference_id' in request.GET: + cves = cves.filter(references=int(request.GET['reference_id'])) + + if 'package_id' in request.GET: + cves = cves.filter(packages=int(request.GET['package_id'])) + + if 'cwe_id' in request.GET: + cves = cves.filter(cwes__cwe_id=request.GET['cwe_id']) + + if 'search' in request.GET: + terms = request.GET['search'].lower() + query = Q() + for term in terms.split(' '): + q = Q(cve_id__icontains=term) + query = query & q + cves = cves.filter(query) + else: + terms = '' + + page_no = request.GET.get('page') + paginator = Paginator(cves, 50) + + try: + page = paginator.page(page_no) + except PageNotAnInteger: + page = paginator.page(1) + except EmptyPage: + page = paginator.page(paginator.num_pages) + + return render(request, + 'security/cve_list.html', + {'page': page, + 'terms': terms}) + + +@login_required +def cve_detail(request, cve_id): + cve = get_object_or_404(CVE, cve_id=cve_id) + packages = Package.objects.filter(erratum__in=cve.erratum_set.all()).distinct() + osreleases = OSRelease.objects.filter(erratum__in=cve.erratum_set.all()).distinct() + return render(request, + 'security/cve_detail.html', + {'cve': cve, + 'packages': packages, + 'osreleases': osreleases}) + + +@login_required +def security_landing(request): + return render(request, 'security/security_landing.html') + + +class CWEViewSet(viewsets.ModelViewSet): + """ API endpoint that allows CWEs to be viewed or edited. + """ + queryset = CWE.objects.all() + serializer_class = CWESerializer + + +class CVEViewSet(viewsets.ModelViewSet): + """ API endpoint that allows CVEs to be viewed or edited. + """ + queryset = CVE.objects.all() + serializer_class = CVESerializer diff --git a/util/__init__.py b/util/__init__.py index 3cdfe7d3..aad5410e 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -26,7 +26,7 @@ from enum import Enum from hashlib import md5, sha1, sha256, sha512 from progressbar import Bar, ETA, Percentage, ProgressBar -from patchman.signals import error_message, info_message +from patchman.signals import error_message, info_message, debug_message from django.utils.timezone import make_aware from django.utils.dateparse import parse_datetime @@ -86,7 +86,7 @@ def update_pbar(index, **kwargs): pmax = pbar.maxval if index >= pmax: pbar.finish() - print_nocr(Fore.RESET) + print_nocr(Fore.RESET + Style.RESET_ALL) pbar = None @@ -129,7 +129,8 @@ def get_url(url, headers={}, params={}): """ res = None try: - res = requests.get(url, headers=headers, params=params, stream=True) + res = requests.get(url, headers=headers, params=params, stream=True, timeout=30) + debug_message.send(sender=None, text=f'{res.status_code}: {res.headers}') except requests.exceptions.Timeout: error_message.send(sender=None, text=f'Timeout - {url!s}') except requests.exceptions.TooManyRedirects: @@ -263,8 +264,10 @@ def tz_aware_datetime(date): """ if isinstance(date, int): parsed_date = datetime.fromtimestamp(date) - else: + elif isinstance(date, str): parsed_date = parse_datetime(date) + else: + parsed_date = date if not parsed_date.tzinfo: parsed_date = make_aware(parsed_date) return parsed_date diff --git a/util/templates/base.html b/util/templates/base.html index 25dc0578..d732263c 100644 --- a/util/templates/base.html +++ b/util/templates/base.html @@ -10,6 +10,7 @@ {% block page_title %}{% endblock %} + {% block extrahead %}{% endblock %} diff --git a/util/templates/dashboard.html b/util/templates/dashboard.html index 6b983aa6..b8dd8e96 100644 --- a/util/templates/dashboard.html +++ b/util/templates/dashboard.html @@ -8,34 +8,34 @@ {% block content %} -{% with count=lonely_oses.count %} - {% if lonely_oses.count > 0 %} +{% with count=lonely_osvariants.count %} + {% if lonely_osvariants.count > 0 %}
    - -
    - {% gen_table lonely_oses %} + +
    + {% gen_table lonely_osvariants %}
    {% endif %} {% endwith %} -{% with count=nohost_oses.count %} - {% if nohost_oses.count > 0 %} +{% with count=nohost_osvariants.count %} + {% if nohost_osvariants.count > 0 %}
    - -
    - {% gen_table nohost_oses %} + +
    + {% gen_table nohost_osvariants %}
    {% endif %} {% endwith %} -{% with count=norepo_osgroups.count %} - {% if count > 0 and norepo_osgroups != None %} +{% with count=norepo_osreleases.count %} + {% if count > 0 and norepo_osreleases != None %}
    - -
    - {% gen_table norepo_osgroups %} + +
    + {% gen_table norepo_osreleases %}
    {% endif %} diff --git a/util/templates/navbar.html b/util/templates/navbar.html index 10391b3c..2a2edc0b 100644 --- a/util/templates/navbar.html +++ b/util/templates/navbar.html @@ -13,8 +13,10 @@
  • Dashboard
  • Hosts
  • Repositories
  • -
  • Packages
  • -
  • Operating Systems
  • +
  • Packages
  • +
  • Errata
  • +
  • CVEs
  • +
  • Operating Systems
  • Reports
  • {% if user.is_superuser %}
  • Django Admin
  • diff --git a/util/templates/objectlist.html b/util/templates/objectlist.html index edb31271..c7b61c92 100644 --- a/util/templates/objectlist.html +++ b/util/templates/objectlist.html @@ -7,7 +7,7 @@
    {% searchform terms %} - {% gen_table page.object_list %} + {% gen_table page.object_list table_template %}
    {% object_count page %}
    diff --git a/util/views.py b/util/views.py index 8de578e5..bdc5687c 100644 --- a/util/views.py +++ b/util/views.py @@ -24,7 +24,7 @@ from django.db.models import F from hosts.models import Host -from operatingsystems.models import OS, OSGroup +from operatingsystems.models import OSVariant, OSRelease from repos.models import Repository, Mirror from packages.models import Package from reports.models import Report @@ -40,8 +40,8 @@ def dashboard(request): site = {'name': '', 'domainname': ''} hosts = Host.objects.all() - oses = OS.objects.all() - osgroups = OSGroup.objects.all() + osvariants = OSVariant.objects.all() + osreleases = OSRelease.objects.all() repos = Repository.objects.all() packages = Package.objects.all() @@ -52,20 +52,20 @@ def dashboard(request): days = 14 last_report_delta = datetime.now() - timedelta(days=days) stale_hosts = hosts.filter(lastreport__lt=last_report_delta) - norepo_hosts = hosts.filter(repos__isnull=True, os__osgroup__repos__isnull=True) # noqa + norepo_hosts = hosts.filter(repos__isnull=True, osvariant__osrelease__repos__isnull=True) # noqa reboot_hosts = hosts.filter(reboot_required=True) secupdate_hosts = hosts.filter(updates__security=True, updates__isnull=False).distinct() # noqa bugupdate_hosts = hosts.exclude(updates__security=True, updates__isnull=False).distinct().filter(updates__security=False, updates__isnull=False).distinct() # noqa diff_rdns_hosts = hosts.exclude(reversedns=F('hostname')).filter(check_dns=True) # noqa - # os issues - lonely_oses = oses.filter(osgroup__isnull=True) - nohost_oses = oses.filter(host__isnull=True) + # os variant issues + lonely_osvariants = osvariants.filter(osrelease__isnull=True) + nohost_osvariants = osvariants.filter(host__isnull=True) - # osgroup issues - norepo_osgroups = None + # os release issues + norepo_osreleases = None if hosts.filter(host_repos_only=False).exists(): - norepo_osgroups = osgroups.filter(repos__isnull=True) + norepo_osreleases = osreleases.filter(repos__isnull=True) # mirror issues failed_mirrors = repos.filter(auth_required=False).filter(mirror__last_access_ok=False).filter(mirror__last_access_ok=True).distinct() # noqa @@ -74,7 +74,7 @@ def dashboard(request): # repo issues failed_repos = repos.filter(auth_required=False).filter(mirror__last_access_ok=False).exclude(id__in=[x.id for x in failed_mirrors]).distinct() # noqa - unused_repos = repos.filter(host__isnull=True, osgroup__isnull=True) + unused_repos = repos.filter(host__isnull=True, osrelease__isnull=True) nomirror_repos = repos.filter(mirror__isnull=True) nohost_repos = repos.filter(host__isnull=True) @@ -110,18 +110,23 @@ def dashboard(request): request, 'dashboard.html', {'site': site, - 'lonely_oses': lonely_oses, 'norepo_hosts': norepo_hosts, - 'nohost_oses': nohost_oses, 'diff_rdns_hosts': diff_rdns_hosts, - 'stale_hosts': stale_hosts, 'possible_mirrors': possible_mirrors, + 'lonely_osvariants': lonely_osvariants, + 'norepo_hosts': norepo_hosts, + 'nohost_osvariants': nohost_osvariants, + 'diff_rdns_hosts': diff_rdns_hosts, + 'stale_hosts': stale_hosts, + 'possible_mirrors': possible_mirrors, 'norepo_packages': norepo_packages, 'nohost_repos': nohost_repos, 'secupdate_hosts': secupdate_hosts, 'bugupdate_hosts': bugupdate_hosts, - 'norepo_osgroups': norepo_osgroups, 'unused_repos': unused_repos, + 'norepo_osreleases': norepo_osreleases, + 'unused_repos': unused_repos, 'disabled_mirrors': disabled_mirrors, 'norefresh_mirrors': norefresh_mirrors, 'failed_mirrors': failed_mirrors, 'orphaned_packages': orphaned_packages, - 'failed_repos': failed_repos, 'nomirror_repos': nomirror_repos, + 'failed_repos': failed_repos, + 'nomirror_repos': nomirror_repos, 'reboot_hosts': reboot_hosts, - 'unprocessed_reports': unprocessed_reports}, ) + 'unprocessed_reports': unprocessed_reports}) From 25a8540c2ee9ab966fcd4ef2b4bbbb4438f08a60 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 10 Feb 2025 22:39:32 -0500 Subject: [PATCH 027/199] gentoo client support --- client/patchman-client | 34 ++ hosts/models.py | 11 +- .../migrations/0002_auto_20250207_1319.py | 36 ++ .../migrations/0003_auto_20250207_1746.py | 18 + packages/models.py | 52 ++- packages/utils.py | 58 ++- reports/utils.py | 43 ++- .../0002_alter_repository_repotype.py | 18 + repos/models.py | 6 +- repos/utils.py | 330 ++++++++++++++---- requirements.txt | 1 + 11 files changed, 502 insertions(+), 105 deletions(-) create mode 100644 packages/migrations/0002_auto_20250207_1319.py create mode 100644 packages/migrations/0003_auto_20250207_1746.py create mode 100644 repos/migrations/0002_alter_repository_repotype.py diff --git a/client/patchman-client b/client/patchman-client index 6b6a0943..3f06969b 100755 --- a/client/patchman-client +++ b/client/patchman-client @@ -297,10 +297,20 @@ get_installed_archlinux_packages() { fi } +get_installed_gentoo_packages() { + if check_command_exists qkeyword ; then + gentoo_package_arch=$(qkeyword -A) + fi + if check_command_exists qlist ; then + qlist -Ic -F "'%{PN}' '%{SLOT}' '%{PV}' REL'%{PR}' '${gentoo_package_arch}' 'gentoo' '%{CAT}' '%{REPO}'" | sed -e "s/REL'r/'/g" >> "${tmpfile_pkg}" + fi +} + get_packages() { get_installed_rpm_packages get_installed_deb_packages get_installed_archlinux_packages + get_installed_gentoo_packages } get_modules() { @@ -346,6 +356,8 @@ get_host_data() { os="${PRETTY_NAME}" elif [ "${ID}" == "arch" ] ; then os="${NAME}" + elif [ "${ID}" == "gentoo" ] ; then + os="${PRETTY_NAME} ${VERSION_ID}" elif [[ "${ID}" =~ "suse" ]] ; then os="${PRETTY_NAME}" elif [ "${ID}" == "astra" ] ; then @@ -557,6 +569,28 @@ get_repos() { done fi + # Gentoo + if [ "${os}" == "Gentoo" ] ; then + if [ ${verbose} == 1 ] ; then + echo 'Finding portage repos...' + fi + declare -A repos + repos[gentoo]='-1000' + repos_conf=$(awk '/\[/{prefix=$0; next} $1{print prefix $0}' /etc/portage/repos.conf/*.conf | grep '^\[') + for stanza in ${repos_conf} ; do + repo=$(echo ${stanza} | cut -d ']' -f 1 | sed -e 's/\[//') + rhs=$(echo ${stanza} | cut -d ']' -f 2 | grep -v '^#') + if [[ ${rhs} =~ "priority" ]] ; then + priority=$(echo ${rhs} | sed -e 's/^ *priority *= *//') + repos[${repo}]+=${priority} + unset priority + fi + done + for r in "${!repos[@]}"; do + echo "'gentoo' 'Gentoo Linux ${r} ${host_arch}' '${r}' '${repos[${r}]}'" >> "${tmpfile_rep}" + done + fi + IFS=${FULL_IFS} sed -i -e '/^$/d' "${tmpfile_rep}" diff --git a/hosts/models.py b/hosts/models.py index 6f812727..237e4e54 100644 --- a/hosts/models.py +++ b/hosts/models.py @@ -229,10 +229,13 @@ def find_host_repo_updates(self, host_packages, repo_packages): priority = best_repo.priority # find the packages that are potential updates - pu_q = Q(name=package.name, - arch=package.arch, - packagetype=package.packagetype) - potential_updates = repo_packages.filter(pu_q) + pu_q = Q( + name=package.name, + arch=package.arch, + packagetype=package.packagetype, + category=package.category, + ) + potential_updates = repo_packages.filter(pu_q).exclude(version__startswith='9999') for pu in potential_updates: pu_is_module_package = False pu_in_enabled_modules = False diff --git a/packages/migrations/0002_auto_20250207_1319.py b/packages/migrations/0002_auto_20250207_1319.py new file mode 100644 index 00000000..1563d139 --- /dev/null +++ b/packages/migrations/0002_auto_20250207_1319.py @@ -0,0 +1,36 @@ +# Generated by Django 3.2.25 on 2025-02-07 13:19 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('packages', '0002_delete_erratum_delete_erratumreference'), + ] + + operations = [ + migrations.CreateModel( + name='PackageCategory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=255, unique=True)), + ], + options={ + 'verbose_name': 'Package Category', + 'verbose_name_plural': 'Package Categories', + 'ordering': ('name',), + }, + ), + migrations.AlterField( + model_name='package', + name='packagetype', + field=models.CharField(blank=True, choices=[('R', 'rpm'), ('D', 'deb'), ('A', 'arch'), ('G', 'gentoo'), ('U', 'unknown')], max_length=1, null=True), + ), + migrations.AddField( + model_name='package', + name='category', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='packages.packagecategory'), + ), + ] diff --git a/packages/migrations/0003_auto_20250207_1746.py b/packages/migrations/0003_auto_20250207_1746.py new file mode 100644 index 00000000..bf97bde7 --- /dev/null +++ b/packages/migrations/0003_auto_20250207_1746.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.25 on 2025-02-07 17:46 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('arch', '0001_initial'), + ('packages', '0002_auto_20250207_1319'), + ] + + operations = [ + migrations.AlterUniqueTogether( + name='package', + unique_together={('name', 'epoch', 'version', 'release', 'arch', 'packagetype', 'category')}, + ), + ] diff --git a/packages/models.py b/packages/models.py index 45cb6656..51005aeb 100644 --- a/packages/models.py +++ b/packages/models.py @@ -44,17 +44,32 @@ def get_absolute_url(self): return reverse('packages:package_name_detail', args=[self.name]) +class PackageCategory(models.Model): + + name = models.CharField(unique=True, max_length=255) + + class Meta: + verbose_name = 'Package Category' + verbose_name_plural = 'Package Categories' + ordering = ('name',) + + def __str__(self): + return self.name + + class Package(models.Model): RPM = 'R' DEB = 'D' ARCH = 'A' + GENTOO = 'G' UNKNOWN = 'U' PACKAGE_TYPES = ( (RPM, 'rpm'), (DEB, 'deb'), (ARCH, 'arch'), + (GENTOO, 'gentoo'), (UNKNOWN, 'unknown'), ) @@ -63,10 +78,8 @@ class Package(models.Model): version = models.CharField(max_length=255) release = models.CharField(max_length=255, blank=True, null=True) arch = models.ForeignKey(PackageArchitecture, on_delete=models.CASCADE) - packagetype = models.CharField(max_length=1, - choices=PACKAGE_TYPES, - blank=True, - null=True) + packagetype = models.CharField(max_length=1, choices=PACKAGE_TYPES, blank=True, null=True) + category = models.ForeignKey(PackageCategory, blank=True, null=True, on_delete=models.SET_NULL) description = models.TextField(blank=True, null=True) url = models.URLField(max_length=255, blank=True, null=True) @@ -74,8 +87,7 @@ class Package(models.Model): class Meta: ordering = ('name', 'epoch', 'version', 'release', 'arch') - unique_together = ( - 'name', 'epoch', 'version', 'release', 'arch', 'packagetype',) + unique_together = ('name', 'epoch', 'version', 'release', 'arch', 'packagetype', 'category') def __str__(self): if self.epoch: @@ -86,14 +98,16 @@ def __str__(self): rel = f'-{self.release!s}' else: rel = '' - return f'{self.name!s}-{epo!s}{self.version!s}{rel!s}-{self.arch!s}' + if self.packagetype == 'G': + return f'{self.category!s}/{self.name!s}-{epo!s}{self.version!s}{rel!s}-{self.arch!s}' + else: + return f'{self.name!s}-{epo!s}{self.version!s}{rel!s}-{self.arch!s}' def get_absolute_url(self): return reverse('packages:package_detail', args=[self.id]) def __key(self): - return (self.name, self.epoch, self.version, self.release, self.arch, - self.packagetype) + return (self.name, self.epoch, self.version, self.release, self.arch, self.packagetype, self.category) def __eq__(self, other): return self.__key() == other.__key() @@ -122,7 +136,7 @@ def _version_string_deb_arch(self): return (epoch + version + release) def get_version_string(self): - if self.packagetype == 'R': + if self.packagetype == 'R' or self.packagetype == 'G': return self._version_string_rpm() elif self.packagetype == 'D' or self.packagetype == 'A': return self._version_string_deb_arch() @@ -143,6 +157,9 @@ def compare_version(self, other): vs = Version(self.get_version_string()) vo = Version(other.get_version_string()) return version_compare(vs, vo) + elif self.packagetype == 'G' and other.packagetype == 'G': + return labelCompare(self.get_version_string(), + other.get_version_string()) def repo_count(self): from repos.models import Repository @@ -152,18 +169,19 @@ def repo_count(self): class PackageString(models.Model): - class Meta: - managed = False - name = models.CharField(max_length=255) version = models.CharField(max_length=255) epoch = models.CharField(max_length=255, blank=True, null=True) release = models.CharField(max_length=255, blank=True, null=True) arch = models.CharField(max_length=255) packagetype = models.CharField(max_length=1, blank=True, null=True) + category = models.CharField(max_length=255, blank=True, null=True) description = models.TextField(blank=True, null=True) url = models.URLField(max_length=255, blank=True, null=True) + class Meta: + managed = False + def __str__(self): if self.epoch: epo = f'{self.epoch!s}:' @@ -173,11 +191,13 @@ def __str__(self): rel = f'-{self.release!s}' else: rel = '' - return f'{self.name!s}-{epo!s}{self.version!s}{rel!s}-{self.arch!s}' + if self.packagetype == 'G': + return f'{self.category!s}/{self.name!s}-{epo!s}{self.version!s}{rel!s}-{self.arch!s}' + else: + return f'{self.name!s}-{epo!s}{self.version!s}{rel!s}-{self.arch!s}' def __key(self): - return (self.name, self.epoch, self.version, self.release, self.arch, - self.packagetype) + return (self.name, self.epoch, self.version, self.release, self.arch, self.packagetype, self.category) def __eq__(self, other): return self.__key() == other.__key() diff --git a/packages/utils.py b/packages/utils.py index bc7ed35b..e10df89f 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -20,9 +20,61 @@ from django.core.exceptions import MultipleObjectsReturned from django.db import IntegrityError, DatabaseError, transaction -from arch.models import PackageArchitecture -from packages.models import PackageName, Package, PackageUpdate -from patchman.signals import error_message +from arch.models import MachineArchitecture, PackageArchitecture +from packages.models import PackageName, Package, PackageUpdate, PackageCategory, PackageString +from patchman.signals import error_message, progress_info_s, progress_update_s +from util import bunzip2, get_url, download_url, get_sha1 + + +def convert_package_to_packagestring(package): + """ Convert a Package object to a PackageString object + """ + name = package.name.name + arch = package.arch.name + if package.category: + category = package.category.name + else: + category = None + + string_package = PackageString( + name=name, + epoch=package.epoch, + version=package.version, + release=package.release, + arch=arch, + packagetype=package.packagetype, + category=category, + ) + return string_package + + +def convert_packagestring_to_package(strpackage): + """ Convert a PackageString object to a Package object + """ + with transaction.atomic(): + name, created = PackageName.objects.get_or_create(name=strpackage.name.lower()) + epoch = strpackage.epoch + version = strpackage.version + release = strpackage.release + with transaction.atomic(): + arch, created = PackageArchitecture.objects.get_or_create(name=strpackage.arch) + packagetype = strpackage.packagetype + if strpackage.category: + with transaction.atomic(): + category, created = PackageCategory.objects.get_or_create(name=strpackage.category) + else: + category = None + + package, created = Package.objects.get_or_create( + name=name, + epoch=epoch, + version=version, + release=release, + arch=arch, + packagetype=packagetype, + category=category, + ) + return package def find_evr(s): diff --git a/reports/utils.py b/reports/utils.py index 7b39aec5..4c10226d 100644 --- a/reports/utils.py +++ b/reports/utils.py @@ -23,9 +23,10 @@ from arch.models import MachineArchitecture, PackageArchitecture from repos.models import Repository, Mirror, MirrorPackage from modules.models import Module -from packages.models import Package +from packages.models import Package, PackageCategory from packages.utils import find_evr, get_or_create_package, \ get_or_create_package_update, parse_package_string +from repos.utils import get_or_create_repo from patchman.signals import progress_info_s, progress_update_s, \ error_message, info_message @@ -246,6 +247,10 @@ def process_repo(repo, arch): r_type = Repository.ARCH r_id = repo[2] r_priority = 0 + elif repo[0] == 'gentoo': + r_type = Repository.GENTOO + r_id = repo.pop(2) + r_priority = repo[2] if repo[1]: r_name = repo[1] @@ -266,19 +271,7 @@ def process_repo(repo, arch): else: repository = mirror.repo if not repository: - repositories = Repository.objects.all() - try: - with transaction.atomic(): - repository, c = repositories.get_or_create(name=r_name, - arch=r_arch, - repotype=r_type) - except IntegrityError as e: - error_message.send(sender=None, text=e) - repository = repositories.get(name=r_name, - arch=r_arch, - repotype=r_type) - except DatabaseError as e: - error_message.send(sender=None, text=e) + repository = get_or_create_repo(r_name, r_arch, r_type) if r_id and repository.repo_id != r_id: repository.repo_id = r_id @@ -389,6 +382,7 @@ def process_package(pkg, protocol): arch = 'unknown' name = pkg[0] + p_category = p_repo = None if pkg[1]: epoch = pkg[1] if pkg[2]: @@ -404,8 +398,29 @@ def process_package(pkg, protocol): p_type = Package.RPM elif pkg[5] == 'arch': p_type = Package.ARCH + elif pkg[5] == 'gentoo': + p_type = Package.GENTOO + p_category = pkg[6] + p_repo = pkg[7] else: p_type = Package.UNKNOWN package = get_or_create_package(name, epoch, ver, rel, arch, p_type) + if p_type == Package.GENTOO: + category, created = PackageCategory.objects.get_or_create(name=p_category) + package.category = category + + machine_arches = MachineArchitecture.objects.all() + with transaction.atomic(): + repo_arch, created = machine_arches.get_or_create(name='any') + + repo_name = 'Gentoo Linux' + repo = get_or_create_repo(repo_name, repo_arch, Repository.GENTOO) + + with transaction.atomic(): + url = f'gentoo virtual for {p_repo}' + mirror, c = Mirror.objects.get_or_create(repo=repo, url=url, mirrorlist=True) + MirrorPackage.objects.create(mirror=mirror, package=package) + + package.save() return package diff --git a/repos/migrations/0002_alter_repository_repotype.py b/repos/migrations/0002_alter_repository_repotype.py new file mode 100644 index 00000000..ec8dd33e --- /dev/null +++ b/repos/migrations/0002_alter_repository_repotype.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.25 on 2025-02-07 13:19 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('repos', '0001_initial'), + ] + + operations = [ + migrations.AlterField( + model_name='repository', + name='repotype', + field=models.CharField(choices=[('R', 'rpm'), ('D', 'deb'), ('A', 'arch'), ('G', 'gentoo')], max_length=1), + ), + ] diff --git a/repos/models.py b/repos/models.py index 888fd686..9f6c219c 100644 --- a/repos/models.py +++ b/repos/models.py @@ -24,7 +24,7 @@ from util import has_setting_of_type from repos.utils import refresh_deb_repo, refresh_rpm_repo, \ - refresh_arch_repo, update_mirror_packages + refresh_arch_repo, refresh_gentoo_repo, update_mirror_packages from patchman.signals import info_message, warning_message, error_message @@ -33,11 +33,13 @@ class Repository(models.Model): RPM = 'R' DEB = 'D' ARCH = 'A' + GENTOO = 'G' REPO_TYPES = ( (RPM, 'rpm'), (DEB, 'deb'), (ARCH, 'arch'), + (GENTOO, 'gentoo'), ) name = models.CharField(max_length=255, unique=True) @@ -91,6 +93,8 @@ def refresh(self, force=False): refresh_rpm_repo(self) elif self.repotype == Repository.ARCH: refresh_arch_repo(self) + elif self.repotype == Repository.GENTOO: + refresh_gentoo_repo(self) else: text = 'Error: unknown repo type for repo ' text += f'{self.id!s}: {self.repotype!s}' diff --git a/repos/utils.py b/repos/utils.py index 35a96ce1..5a381da8 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -15,21 +15,28 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see +import io +import os +import git import re +import shutil import tarfile +import tempfile import yaml from datetime import datetime from io import BytesIO from defusedxml.lxml import _etree as etree from debian.debian_support import Version from debian.deb822 import Packages +from fnmatch import fnmatch from django.conf import settings from django.db import IntegrityError, DatabaseError, transaction from django.db.models import Q -from packages.models import Package, PackageName, PackageString -from packages.utils import parse_package_string, get_or_create_package +from packages.models import Package, PackageString +from packages.utils import parse_package_string, get_or_create_package, find_evr, \ + convert_package_to_packagestring, convert_packagestring_to_package from arch.models import PackageArchitecture from util import get_url, download_url, response_is_valid, extract, \ get_checksum, Checksum, has_setting_of_type @@ -37,94 +44,63 @@ info_message, warning_message, error_message, debug_message +def get_or_create_repo(r_name, r_arch, r_type): + """ Get or create a Repository object. Returns the object. Returns None if + it cannot get or create the object. + """ + from repos.models import Repository + repositories = Repository.objects.all() + try: + with transaction.atomic(): + repository, c = repositories.get_or_create(name=r_name, + arch=r_arch, + repotype=r_type) + except IntegrityError as e: + error_message.send(sender=None, text=e) + repository = repositories.get(name=r_name, + arch=r_arch, + repotype=r_type) + except DatabaseError as e: + error_message.send(sender=None, text=e) + if repository: + return repository + + def update_mirror_packages(mirror, packages): """ Updates the packages contained on a mirror, and removes obsolete packages. """ - new = set() - old = set() - removals = set() + from repos.models import MirrorPackage # noqa + old = set() mirror_packages = mirror.packages.all() mlen = mirror_packages.count() - ptext = 'Fetching existing packages:' progress_info_s.send(sender=None, ptext=ptext, plen=mlen) for i, package in enumerate(mirror_packages): progress_update_s.send(sender=None, index=i + 1) - name = str(package.name) - arch = str(package.arch) - strpackage = PackageString(name=name, - epoch=package.epoch, - version=package.version, - release=package.release, - arch=arch, - packagetype=package.packagetype) + strpackage = convert_package_to_packagestring(package) old.add(strpackage) - new = packages.difference(old) removals = old.difference(packages) - - nlen = len(new) rlen = len(removals) - ptext = f'Removing {rlen!s} obsolete packages:' progress_info_s.send(sender=None, ptext=ptext, plen=rlen) - for i, package in enumerate(removals): + for i, strpackage in enumerate(removals): progress_update_s.send(sender=None, index=i + 1) - package_id = PackageName.objects.get(name=package.name) - epoch = package.epoch - version = package.version - release = package.release - arch = PackageArchitecture.objects.get(name=package.arch) - packagetype = package.packagetype - p = Package.objects.get(name=package_id, - epoch=epoch, - version=version, - arch=arch, - release=release, - packagetype=packagetype) - from repos.models import MirrorPackage - mirror_packages = MirrorPackage.objects.filter(mirror=mirror, package=p) - for mirror_package in mirror_packages: - with transaction.atomic(): - mirror_package.delete() + package = convert_packagestring_to_package(strpackage) + MirrorPackage.objects.filter(mirror=mirror, package=package).delete() + new = packages.difference(old) + nlen = len(new) ptext = f'Adding {nlen!s} new packages:' progress_info_s.send(sender=None, ptext=ptext, plen=nlen) - for i, package in enumerate(new): + for i, strpackage in enumerate(new): progress_update_s.send(sender=None, index=i + 1) - - package_names = PackageName.objects.all() - with transaction.atomic(): - package_id, c = package_names.get_or_create(name=package.name) - - epoch = package.epoch - version = package.version - release = package.release - packagetype = package.packagetype - - package_arches = PackageArchitecture.objects.all() + package = convert_packagestring_to_package(strpackage) with transaction.atomic(): - arch, c = package_arches.get_or_create(name=package.arch) - - all_packages = Package.objects.all() - with transaction.atomic(): - p, c = all_packages.get_or_create(name=package_id, - epoch=epoch, - version=version, - arch=arch, - release=release, - packagetype=packagetype) - # This fixes a subtle bug where a stored package name with uppercase - # letters will not match until it is lowercased. - if package_id.name != package.name: - package_id.name = package.name - with transaction.atomic(): - package_id.save() - from repos.models import MirrorPackage # noqa - with transaction.atomic(): - mirror_package, c = MirrorPackage.objects.get_or_create(mirror=mirror, package=p) + mirror_package, c = MirrorPackage.objects.get_or_create(mirror=mirror, package=package) + mirror.save() def get_primary_url(mirror_url, data): @@ -189,6 +165,62 @@ def find_mirror_url(stored_mirror_url, formats): return res +def get_gentoo_mirror_urls(): + """ Use the Gentoo API to find http(s) mirrors + """ + res = get_url('https://api.gentoo.org/mirrors/distfiles.xml') + if not res: + return + mirrors = {} + tree = etree.parse(BytesIO(res.content)) + root = tree.getroot() + for child in root: + if child.tag == 'mirrorgroup': + for k, v in child.attrib.items(): + if k == 'region': + region = v + elif k == 'country': + country = v + for mirror in child: + for element in mirror: + if element.tag == 'name': + name = element.text + mirrors[name] = {} + mirrors[name]['region'] = region + mirrors[name]['country'] = country + mirrors[name]['urls'] = [] + elif element.tag == 'uri': + if element.get('protocol') == 'http': + mirrors[name]['urls'].append(element.text) + mirror_urls = [] + # for now, ignore region data and choose MAX_MIRRORS mirrors at random + for _, v in mirrors.items(): + for url in v['urls']: + mirror_urls.append(url.rstrip('/') + '/snapshots/gentoo-latest.tar.xz') + return mirror_urls + + +def get_gentoo_overlay_mirrors(repo_name): + """Get the gentoo overlay repos that match repo.id + """ + res = get_url('https://api.gentoo.org/overlays/repositories.xml') + if not res: + return + tree = etree.parse(BytesIO(res.content)) + root = tree.getroot() + mirrors = [] + for child in root: + if child.tag == 'repo': + found = False + for element in child: + if element.tag == 'name' and element.text == repo_name: + found = True + if found and element.tag == 'source': + if element.text.startswith('http'): + mirrors.append(element.text) + return mirrors + + def is_metalink(url): """ Checks if a given url is a metalink url """ @@ -615,6 +647,170 @@ def refresh_arch_repo(repo): mirror.save() +def refresh_gentoo_main_repo(repo): + """ Refresh all mirrors of the main gentoo repo + """ + mirrors = get_gentoo_mirror_urls() + add_mirrors_from_urls(repo, mirrors) + + +def refresh_gentoo_overlay_repo(repo): + """ Refresh all mirrors of a Gentoo overlay repo + """ + mirrors = get_gentoo_overlay_mirrors(repo.repo_id) + add_mirrors_from_urls(repo, mirrors) + + +def get_gentoo_ebuild_keywords(content): + keywords = set() + default_keywords = { + 'alpha', + 'amd64', + 'arm', + 'arm64', + 'hppa', + 'loong', + 'm68k', + 'mips', + 'ppc', + 'ppc64', + 'riscv', + 's390', + 'sparc', + 'x86', + } + for line in content.decode().splitlines(): + if not line.startswith('KEYWORDS='): + continue + all_keywords = line.split('=')[1].split('#')[0].strip(' "').split() + if len(all_keywords) == 0 or '*' in all_keywords: + all_keywords = default_keywords + for keyword in all_keywords: + if keyword.startswith('~'): + continue + if keyword.startswith('-'): + keyword = keyword.replace('-', '') + if keyword in all_keywords: + all_keywords.remove(keyword) + continue + keywords.add(keyword) + break + return keywords + + +def extract_gentoo_packages(mirror, data): + extracted_files = {} + with tarfile.open(fileobj=io.BytesIO(data), mode='r') as tar: + for member in tar.getmembers(): + if member.isfile(): + file_content = tar.extractfile(member).read() + extracted_files[member.name] = file_content + packages = set() + for path, content in extracted_files.items(): + if fnmatch(path, '*.ebuild'): + components = path.split(os.sep) + if len(components) < 4: + continue + category = components[1] + name = components[2] + evr = components[3].replace(f'{name}-', '').replace('.ebuild', '') + epoch, version, release = find_evr(evr) + arches = get_gentoo_ebuild_keywords(content) + for arch in arches: + package = PackageString( + name=name.lower(), + epoch=epoch, + version=version, + release=release, + arch=arch, + packagetype='G', + category=category, + ) + packages.add(package) + return packages + + +def extract_gentoo_overlay_packages(mirror): + from packages.utils import find_evr + t = tempfile.mkdtemp() + git.Repo.clone_from(mirror.url, t, branch='master', depth=1) + packages = set() + with transaction.atomic(): + arch, c = PackageArchitecture.objects.get_or_create(name='any') + for root, dirs, files in os.walk(t): + for name in files: + if fnmatch(name, '*.ebuild'): + full_name = root.replace(t + '/', '') + p_category, p_name = full_name.split('/') + m = re.match(fr'{p_name}-(.*)\.ebuild', name) + if m: + p_evr = m.group(1) + epoch, version, release = find_evr(p_evr) + package = PackageString( + name=p_name.lower(), + epoch=epoch, + version=version, + release=release, + arch=arch, + packagetype='G', + category=p_category, + ) + packages.add(package) + shutil.rmtree(t) + return packages + + +def refresh_gentoo_repo(repo): + """ Refresh a Gentoo repo + """ + if repo.repo_id == 'gentoo': + repo_type = 'main' + refresh_gentoo_main_repo(repo) + else: + refresh_gentoo_overlay_repo(repo) + repo_type = 'overlay' + ts = datetime.now().replace(microsecond=0) + for mirror in repo.mirror_set.filter(mirrorlist=False, refresh=True): + res = get_url(mirror.url + '.md5sum') + data = download_url(res, 'Downloading repo info (1/2):') + if data is None: + mirror.fail() + continue + checksum = data.decode().split()[0] + if checksum is None: + mirror.fail() + continue + if mirror.file_checksum == checksum: + text = 'Mirror checksum has not changed, not refreshing package metadata' + warning_message.send(sender=None, text=text) + continue + res = get_url(mirror.url) + mirror.last_access_ok = response_is_valid(res) + if mirror.last_access_ok: + data = download_url(res, 'Downloading repo info (2/2):') + if data is None: + mirror.fail() + continue + extracted = extract(data, mirror.url) + text = f'Found gentoo repo - {mirror.url}' + info_message.send(sender=None, text=text) + computed_checksum = get_checksum(data, Checksum.md5) + if not mirror_checksum_is_valid(computed_checksum, checksum, mirror, 'package'): + continue + else: + mirror.file_checksum = checksum + if repo_type == 'main': + packages = extract_gentoo_packages(mirror, extracted) + elif repo_type == 'overlay': + packages = extract_gentoo_overlay_packages(mirror) + mirror.timestamp = ts + if packages: + update_mirror_packages(mirror, packages) + else: + mirror.fail() + mirror.save() + + def refresh_yast_repo(mirror, data): """ Refresh package metadata for a yast-style rpm mirror and add the packages to the mirror diff --git a/requirements.txt b/requirements.txt index a6970774..4d6a598f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -17,3 +17,4 @@ humanize==3.13.1 version-utils==0.3.0 python-magic==0.4.25 pymemcache==4.0.0 +gitpython==3.1.44 From f4d86f61170c5b8187915db2718cac731d2fa33e Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 10 Feb 2025 23:03:39 -0500 Subject: [PATCH 028/199] operatingsystem updates --- .../0005_rename_os_host_osvariant.py | 18 +++++ .../migrations/0003_osgroup_codename.py | 2 +- ..._osrelease_rename_os_osvariant_and_more.py | 36 +++++++++ operatingsystems/models.py | 1 - .../operatingsystemrelease_table.html | 17 ++++ .../operatingsystemvariant_table.html | 21 +++++ .../templates/operatingsystems/os_delete.html | 77 ------------------- .../operatingsystems/os_landing.html | 18 +++++ .../templates/operatingsystems/os_list.html | 19 ----- .../operatingsystems/osgroup_detail.html | 72 ----------------- .../operatingsystems/osgroup_list.html | 7 -- ...roup_delete.html => osrelease_delete.html} | 16 ++-- .../operatingsystems/osrelease_detail.html | 70 +++++++++++++++++ .../operatingsystems/osrelease_list.html | 7 ++ .../operatingsystems/osvariant_delete.html | 77 +++++++++++++++++++ .../{os_detail.html => osvariant_detail.html} | 26 +++---- .../operatingsystems/osvariant_list.html | 19 +++++ operatingsystems/views.py | 3 +- repos/views.py | 2 +- 19 files changed, 308 insertions(+), 200 deletions(-) create mode 100644 hosts/migrations/0005_rename_os_host_osvariant.py create mode 100644 operatingsystems/migrations/0005_rename_osgroup_osrelease_rename_os_osvariant_and_more.py create mode 100644 operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html create mode 100644 operatingsystems/templates/operatingsystems/operatingsystemvariant_table.html delete mode 100644 operatingsystems/templates/operatingsystems/os_delete.html create mode 100644 operatingsystems/templates/operatingsystems/os_landing.html delete mode 100644 operatingsystems/templates/operatingsystems/os_list.html delete mode 100644 operatingsystems/templates/operatingsystems/osgroup_detail.html delete mode 100644 operatingsystems/templates/operatingsystems/osgroup_list.html rename operatingsystems/templates/operatingsystems/{osgroup_delete.html => osrelease_delete.html} (58%) create mode 100644 operatingsystems/templates/operatingsystems/osrelease_detail.html create mode 100644 operatingsystems/templates/operatingsystems/osrelease_list.html create mode 100644 operatingsystems/templates/operatingsystems/osvariant_delete.html rename operatingsystems/templates/operatingsystems/{os_detail.html => osvariant_detail.html} (51%) create mode 100644 operatingsystems/templates/operatingsystems/osvariant_list.html diff --git a/hosts/migrations/0005_rename_os_host_osvariant.py b/hosts/migrations/0005_rename_os_host_osvariant.py new file mode 100644 index 00000000..821c3224 --- /dev/null +++ b/hosts/migrations/0005_rename_os_host_osvariant.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.18 on 2025-02-08 20:36 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('hosts', '0004_remove_host_tags_host_tags'), + ] + + operations = [ + migrations.RenameField( + model_name='host', + old_name='os', + new_name='osvariant', + ), + ] diff --git a/operatingsystems/migrations/0003_osgroup_codename.py b/operatingsystems/migrations/0003_osgroup_codename.py index 426c7a15..97496e01 100644 --- a/operatingsystems/migrations/0003_osgroup_codename.py +++ b/operatingsystems/migrations/0003_osgroup_codename.py @@ -6,7 +6,7 @@ class Migration(migrations.Migration): dependencies = [ - ('operatingsystems', '0002_initial'), + ('operatingsystems', '0003_os_arch'), ] operations = [ diff --git a/operatingsystems/migrations/0005_rename_osgroup_osrelease_rename_os_osvariant_and_more.py b/operatingsystems/migrations/0005_rename_osgroup_osrelease_rename_os_osvariant_and_more.py new file mode 100644 index 00000000..b5381c1f --- /dev/null +++ b/operatingsystems/migrations/0005_rename_osgroup_osrelease_rename_os_osvariant_and_more.py @@ -0,0 +1,36 @@ +# Generated by Django 4.2.18 on 2025-02-08 20:36 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('repos', '0001_initial'), + ('hosts', '0005_rename_os_host_osvariant'), + ('operatingsystems', '0004_alter_osgroup_unique_together'), + ] + + operations = [ + migrations.RenameModel( + old_name='OSGroup', + new_name='OSRelease', + ), + migrations.RenameModel( + old_name='OS', + new_name='OSVariant', + ), + migrations.AlterModelOptions( + name='osrelease', + options={'ordering': ('name',), 'verbose_name': 'Operating System Release', 'verbose_name_plural': 'Operating System Releases'}, + ), + migrations.AlterModelOptions( + name='osvariant', + options={'ordering': ('name',), 'verbose_name': 'Operating System Variant', 'verbose_name_plural': 'Operating System Variants'}, + ), + migrations.RenameField( + model_name='osvariant', + old_name='osgroup', + new_name='osrelease', + ), + ] diff --git a/operatingsystems/models.py b/operatingsystems/models.py index 1b4c4245..0cde601d 100644 --- a/operatingsystems/models.py +++ b/operatingsystems/models.py @@ -20,7 +20,6 @@ from arch.models import MachineArchitecture from repos.models import Repository -from arch.models import MachineArchitecture class OSRelease(models.Model): diff --git a/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html b/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html new file mode 100644 index 00000000..16544aa7 --- /dev/null +++ b/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html @@ -0,0 +1,17 @@ +{% load common %} + + + + + + + + + {% for osrelease in object_list %} + + + + + {% endfor %} + +
    OS ReleaseRepos
    {{ osrelease }}{% if osrelease.repos.count != None %}{{ osrelease.repos.count }}{% else %}0{% endif %}
    diff --git a/operatingsystems/templates/operatingsystems/operatingsystemvariant_table.html b/operatingsystems/templates/operatingsystems/operatingsystemvariant_table.html new file mode 100644 index 00000000..50627579 --- /dev/null +++ b/operatingsystems/templates/operatingsystems/operatingsystemvariant_table.html @@ -0,0 +1,21 @@ +{% load common %} + + + + + + + + + + + {% for osvariant in object_list %} + + + + + + + {% endfor %} + +
    NameHostsOS ReleaseRepos (OS Release)
    {{ osvariant }}{{ osvariant.host_set.count }}{% if osvariant.osrelease != None %}{{ osvariant.osrelease }}{% endif %}{% if osvariant.osrelease.repos.count != None %}{{ osvariant.osrelease.repos.count }}{% else %}0{% endif %}
    diff --git a/operatingsystems/templates/operatingsystems/os_delete.html b/operatingsystems/templates/operatingsystems/os_delete.html deleted file mode 100644 index 5367c9c2..00000000 --- a/operatingsystems/templates/operatingsystems/os_delete.html +++ /dev/null @@ -1,77 +0,0 @@ -{% extends "base.html" %} - -{% load common bootstrap3 %} - -{% block page_title %}Operating System - {{ os }} {% endblock %} - -{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • - -{% if os %} - {{ os }} -{% endif %} - -{% if oses %} - Multiple -{% endif %} - -
  • {% endblock %} - -{% block content_title %}Operating System - {{ os }}{% endblock %} - -{% block content %} - -{% if os %} -
    - - - - -
    Name {{ os.name }}
    Hosts{% if os.host_set.count != None %} {{ os.host_set.count }} {% else %} 0 {% endif %}
    OS Group{% if os.osgroup != None %} {{ os.osgroup }} {% else %}No OS Group{% endif %}
    -{% endif %} - -{% if oses %} -
    - - - - - {% for os in oses %} - - - - - - {% endfor %} -
    NameHostsOS Group
    {{ os }} {% if os.host_set.count != None %} {{ os.host_set.count }} {% else %} 0 {% endif %}{% if os.osgroup != None %} {{ os.osgroup }} {% else %}No OS Group{% endif %}
    -{% endif %} - -
    - {% if user.is_authenticated and perms.is_admin %} -
    - Are you sure you want to delete - {% if os %} - this Operating System? - {% endif %} - {% if oses %} - these Operating Systems? - {% endif %} -
    -
    -
    - {% csrf_token %} - - -
    -
    - {% else %} -
    - You do not have permission to delete Operating Systems. -
    - - {% endif %} -
    -
    - -{% endblock %} diff --git a/operatingsystems/templates/operatingsystems/os_landing.html b/operatingsystems/templates/operatingsystems/os_landing.html new file mode 100644 index 00000000..1acf3dcb --- /dev/null +++ b/operatingsystems/templates/operatingsystems/os_landing.html @@ -0,0 +1,18 @@ +{% extends "base.html" %} + +{% block page_title %} Operating Systems {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • {% endblock %} + +{% block content_title %} Operating Systems {% endblock %} + +{% block content %} + + + +{% endblock %} diff --git a/operatingsystems/templates/operatingsystems/os_list.html b/operatingsystems/templates/operatingsystems/os_list.html deleted file mode 100644 index 28ea5ae1..00000000 --- a/operatingsystems/templates/operatingsystems/os_list.html +++ /dev/null @@ -1,19 +0,0 @@ -{% extends "objectlist.html" %} - -{% load common bootstrap3 %} - -{% block page_title %}Operating Systems{% endblock %} - -{% block content_title %} Operating Systems {% endblock %} - -{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • {% endblock %} - -{% block objectlist_actions %} - -{% if user.is_authenticated and perms.is_admin and empty_oses %} - -{% endif %} - -{% endblock %} diff --git a/operatingsystems/templates/operatingsystems/osgroup_detail.html b/operatingsystems/templates/operatingsystems/osgroup_detail.html deleted file mode 100644 index 881003a3..00000000 --- a/operatingsystems/templates/operatingsystems/osgroup_detail.html +++ /dev/null @@ -1,72 +0,0 @@ -{% extends "base.html" %} - -{% load common bootstrap3 %} - -{% block extrahead %} {{ repos_form.media }} {% endblock %} - -{% block page_title %}OS Group - {{ osgroup }} {% endblock %} - -{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • OS Groups
  • {{ osgroup }}
  • {% endblock %} - -{% block content_title %} OS Group - {{ osgroup }}{% endblock %} - -{% block content %} - - - -
    - -
    -
    - - - - - - - -
    Name{{ osgroup }}
    Operating Systems in this OS Group{{ osgroup.os_set.select_related.count }}
    - {% if user.is_authenticated and perms.is_admin %} - {% bootstrap_icon "trash" %} Delete this OS Group - {% endif %} -
    -
    - -
    -
    - - {% for repo in osgroup.os_set.select_related %} - - - - {% endfor %} -
    {{ repo }}
    -
    -
    - -
    -
    - {% if osgroup.repos.count == 0 %} - {{ osgroup }} has no Repositories
    - {% else %} - {% gen_table osgroup.repos.select_related %} - {% endif %} - {% if user.is_authenticated and perms.is_admin %} -
    -
    - {% csrf_token %} - {% bootstrap_form repos_form size='small' %} - -
    -
    - {% endif %} -
    -
    - -
    - -{% endblock %} diff --git a/operatingsystems/templates/operatingsystems/osgroup_list.html b/operatingsystems/templates/operatingsystems/osgroup_list.html deleted file mode 100644 index 22f39938..00000000 --- a/operatingsystems/templates/operatingsystems/osgroup_list.html +++ /dev/null @@ -1,7 +0,0 @@ -{% extends "objectlist.html" %} - -{% block page_title %}OS Groups{% endblock %} - -{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • OS Groups
  • {% endblock %} - -{% block content_title %} OS Groups {% endblock %} diff --git a/operatingsystems/templates/operatingsystems/osgroup_delete.html b/operatingsystems/templates/operatingsystems/osrelease_delete.html similarity index 58% rename from operatingsystems/templates/operatingsystems/osgroup_delete.html rename to operatingsystems/templates/operatingsystems/osrelease_delete.html index ded9292c..7022ee25 100644 --- a/operatingsystems/templates/operatingsystems/osgroup_delete.html +++ b/operatingsystems/templates/operatingsystems/osrelease_delete.html @@ -2,28 +2,28 @@ {% load common bootstrap3 %} -{% block page_title %}OS Group - {{ osgroup }} {% endblock %} +{% block page_title %}OS Release - {{ osrelease }} {% endblock %} -{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • OS Groups
  • {{ osgroup }}
  • {% endblock %} +{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • OS Releases
  • {{ osrelease }}
  • {% endblock %} -{% block content_title %} OS Group - {{ osgroup }} {% endblock %} +{% block content_title %} OS Release - {{ osrelease }} {% endblock %} {% block content %}
    - + - +
    Name{{ osgroup }}Name{{ osrelease }}
    Operating Systems in this OS Group{{ osgroup.os_set.select_related.count }}Variants{{ osrelease.osvariant_set.count }}
    {% if user.is_authenticated and perms.is_admin %}
    - Are you sure you want to delete this OS Group? + Are you sure you want to delete this OS Release?
    @@ -34,10 +34,10 @@
    {% else %}
    - You do not have permission to delete this OS Group. + You do not have permission to delete this OS Release.
    {% endif %}
    diff --git a/operatingsystems/templates/operatingsystems/osrelease_detail.html b/operatingsystems/templates/operatingsystems/osrelease_detail.html new file mode 100644 index 00000000..48677cc1 --- /dev/null +++ b/operatingsystems/templates/operatingsystems/osrelease_detail.html @@ -0,0 +1,70 @@ +{% extends "base.html" %} + +{% load common bootstrap3 %} + +{% block extrahead %} {{ repos_form.media }} {% endblock %} + +{% block page_title %}OS Release - {{ osrelease }} {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • OS Releases
  • {{ osrelease }}
  • {% endblock %} + +{% block content_title %} OS Release - {{ osrelease }}{% endblock %} + +{% block content %} + + + +
    + +
    +
    + + + + + + + +
    Name{{ osrelease }}
    Variants{{ osrelease.osvariant_set.count }}
    + {% if user.is_authenticated and perms.is_admin %} + {% bootstrap_icon "trash" %} Delete this OS Release + {% endif %} +
    +
    + +
    +
    + {% if osrelease.osvariant_set.count == 0 %} + {{ osrelease }} has no Variants
    + {% else %} + {% gen_table osrelease.osvariant_set.select_related %} + {% endif %} +
    +
    + +
    +
    + {% if osrelease.repos.count == 0 %} + {{ osrelease }} has no Repositories
    + {% else %} + {% gen_table osrelease.repos.select_related %} + {% endif %} + {% if user.is_authenticated and perms.is_admin %} +
    + + {% csrf_token %} + {% bootstrap_form repos_form size='large' %} + + +
    + {% endif %} +
    +
    + +
    + +{% endblock %} diff --git a/operatingsystems/templates/operatingsystems/osrelease_list.html b/operatingsystems/templates/operatingsystems/osrelease_list.html new file mode 100644 index 00000000..1dfc80e1 --- /dev/null +++ b/operatingsystems/templates/operatingsystems/osrelease_list.html @@ -0,0 +1,7 @@ +{% extends "objectlist.html" %} + +{% block page_title %}OS Releases{% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • OS Releases
  • {% endblock %} + +{% block content_title %} OS Releases {% endblock %} diff --git a/operatingsystems/templates/operatingsystems/osvariant_delete.html b/operatingsystems/templates/operatingsystems/osvariant_delete.html new file mode 100644 index 00000000..d8f60d2e --- /dev/null +++ b/operatingsystems/templates/operatingsystems/osvariant_delete.html @@ -0,0 +1,77 @@ +{% extends "base.html" %} + +{% load common bootstrap3 %} + +{% block page_title %}OS Variant - {{ osvariant }} {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • OS Variants
  • + +{% if osvariant %} + {{ osvariant }} +{% endif %} + +{% if osvariants %} + Multiple +{% endif %} + +
  • {% endblock %} + +{% block content_title %}OS Variant - {{ osvariant }}{% endblock %} + +{% block content %} + +{% if osvariant %} +
    + + + + +
    Name {{ osvariant.name }}
    Hosts{% if osvariant.host_set.count != None %} {{ osvariant.host_set.count }} {% else %} 0 {% endif %}
    OS Release{% if osvariant.osrelease != None %} {{ osvariant.osrelease }} {% else %}No OS Release{% endif %}
    +{% endif %} + +{% if osvariants %} +
    + + + + + {% for osvariant in osvariants %} + + + + + + {% endfor %} +
    NameHostsOS Release
    {{ osvariant }} {% if osvariant.host_set.count != None %} {{ osvariant.host_set.count }} {% else %} 0 {% endif %}{% if osvariant.osrelease != None %} {{ osvariant.osrelease }} {% else %}No OS Release{% endif %}
    +{% endif %} + +
    + {% if user.is_authenticated and perms.is_admin %} +
    + Are you sure you want to delete + {% if osvariant %} + this OS Variant? + {% endif %} + {% if osvariants %} + these OS Variants? + {% endif %} +
    +
    +
    + {% csrf_token %} + + +
    +
    + {% else %} +
    + You do not have permission to delete OS Variants. +
    + + {% endif %} +
    +
    + +{% endblock %} diff --git a/operatingsystems/templates/operatingsystems/os_detail.html b/operatingsystems/templates/operatingsystems/osvariant_detail.html similarity index 51% rename from operatingsystems/templates/operatingsystems/os_detail.html rename to operatingsystems/templates/operatingsystems/osvariant_detail.html index dc743770..9ff7721a 100644 --- a/operatingsystems/templates/operatingsystems/os_detail.html +++ b/operatingsystems/templates/operatingsystems/osvariant_detail.html @@ -2,18 +2,18 @@ {% load common bootstrap3 %} -{% block page_title %}Operating System - {{ os }} {% endblock %} +{% block page_title %}OS Variant - {{ osvariant }} {% endblock %} -{% block content_title %} Operating System - {{ os }} {% endblock %} +{% block content_title %} OS Variant - {{ osvariant }} {% endblock %} -{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • {{ os }}
  • {% endblock %} +{% block breadcrumbs %} {{ block.super }}
  • OS Variants
  • {{ osvariant }}
  • {% endblock %} {% block content %}
    @@ -21,37 +21,37 @@
    - - - + + +
    Name {{ os.name }}
    Hosts{% if os.host_set.count != None %} {{ os.host_set.count }} {% else %} 0 {% endif %}
    OS Group{% if os.osgroup != None %} {{ os.osgroup }} {% else %}No OS Group{% endif %}
    Name {{ osvariant.name }}
    Hosts{% if osvariant.host_set.count != None %} {{ osvariant.host_set.count }} {% else %} 0 {% endif %}
    OS Release{% if osvariant.osrelease != None %} {{ osvariant.osrelease }} {% else %}No OS Release{% endif %}
    {% if user.is_authenticated and perms.is_admin %} - {% bootstrap_icon "trash" %} Delete this Operating System + {% bootstrap_icon "trash" %} Delete this OS Variant {% endif %}
    - {% gen_table os.host_set.all %} + {% gen_table osvariant.host_set.all %}
    -
    +
    {% if user.is_authenticated and perms.is_admin %}
    {% csrf_token %} {% bootstrap_form add_form size='small' %} - +
    {% csrf_token %} {% bootstrap_form create_form size='small' %} - +
    {% endif %} diff --git a/operatingsystems/templates/operatingsystems/osvariant_list.html b/operatingsystems/templates/operatingsystems/osvariant_list.html new file mode 100644 index 00000000..3b866208 --- /dev/null +++ b/operatingsystems/templates/operatingsystems/osvariant_list.html @@ -0,0 +1,19 @@ +{% extends "objectlist.html" %} + +{% load common bootstrap3 %} + +{% block page_title %}OS Variants{% endblock %} + +{% block content_title %} OS Variants {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • OS Variants
  • {% endblock %} + +{% block objectlist_actions %} + +{% if user.is_authenticated and perms.is_admin and empty_osvariants %} + +{% endif %} + +{% endblock %} diff --git a/operatingsystems/views.py b/operatingsystems/views.py index 7873e627..f4f4c963 100644 --- a/operatingsystems/views.py +++ b/operatingsystems/views.py @@ -134,7 +134,7 @@ def osrelease_list(request): osreleases = OSRelease.objects.select_related() if 'erratum_id' in request.GET: - osrelease = osreleases.filter(erratum=int(request.GET['erratum_id'])) + osreleases = osreleases.filter(erratum=int(request.GET['erratum_id'])) if 'search' in request.GET: terms = request.GET['search'].lower() @@ -198,6 +198,7 @@ def osrelease_delete(request, osrelease_id): 'operatingsystems/osrelease_delete.html', {'osrelease': osrelease}) + @login_required def os_landing(request): return render(request, 'operatingsystems/os_landing.html') diff --git a/repos/views.py b/repos/views.py index f566a4f7..329f9aca 100644 --- a/repos/views.py +++ b/repos/views.py @@ -49,7 +49,7 @@ def repo_list(request): repos = repos.filter(arch=request.GET['arch']) if 'osrelease' in request.GET: - repos = repos.filter(osrelease=request.GET['osrelease']) + repos = repos.filter(osrelease=int(request.GET['osrelease'])) if 'security' in request.GET: security = request.GET['security'] == 'True' From 2e1ff8855f8d63f77e905095deaa7251a31d917d Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 10 Feb 2025 23:12:08 -0500 Subject: [PATCH 029/199] template, filter and view updates --- hosts/models.py | 12 +++---- hosts/templates/hosts/host_detail.html | 2 +- hosts/views.py | 13 ++++---- operatingsystems/forms.py | 2 +- packages/views.py | 45 +++++++++++++++++--------- reports/models.py | 8 ++--- reports/views.py | 5 ++- repos/templates/repos/repo_detail.html | 2 +- repos/views.py | 20 ++++++------ util/filterspecs.py | 22 ++++++------- util/templatetags/common.py | 18 +++++------ 11 files changed, 77 insertions(+), 72 deletions(-) diff --git a/hosts/models.py b/hosts/models.py index 237e4e54..ec81c5ee 100644 --- a/hosts/models.py +++ b/hosts/models.py @@ -197,14 +197,11 @@ def find_updates(self): kernel_packages = self.packages.filter(kernels_q) if self.host_repos_only: - update_ids = self.find_host_repo_updates(host_packages, - repo_packages) + update_ids = self.find_host_repo_updates(host_packages, repo_packages) else: - update_ids = self.find_osgrelease_repo_updates(host_packages, - repo_packages) + update_ids = self.find_osrelease_repo_updates(host_packages, repo_packages) - kernel_update_ids = self.find_kernel_updates(kernel_packages, - repo_packages) + kernel_update_ids = self.find_kernel_updates(kernel_packages, repo_packages) for ku_id in kernel_update_ids: update_ids.append(ku_id) @@ -276,7 +273,8 @@ def find_osrelease_repo_updates(self, host_packages, repo_packages): highest_package = package # find the packages that are potential updates - pu_q = Q(name=package.name, arch=package.arch, + pu_q = Q(name=package.name, + arch=package.arch, packagetype=package.packagetype) potential_updates = repo_packages.filter(pu_q) for pu in potential_updates: diff --git a/hosts/templates/hosts/host_detail.html b/hosts/templates/hosts/host_detail.html index da199d36..73871d1f 100644 --- a/hosts/templates/hosts/host_detail.html +++ b/hosts/templates/hosts/host_detail.html @@ -42,7 +42,7 @@ Last Report {{ host.lastreport }} Updates Available {{ host.updates.count }} Reboot Required {{ host.reboot_required }} - Packages Installed {{ host.packages.count}} + Packages Installed {{ host.packages.count}} Repos In Use{% if host.host_repos_only %}Host Repos{% else %}Host and OS Release Repos{% endif %} Last 3 reports diff --git a/hosts/views.py b/hosts/views.py index e0a90584..db72da05 100644 --- a/hosts/views.py +++ b/hosts/views.py @@ -91,13 +91,12 @@ def host_list(request): tags = {} for tag in Tag.objects.all(): tags[tag.name] = tag.name - filter_list.append(Filter(request, 'tag', tags)) - filter_list.append(Filter(request, 'domain', Domain.objects.all())) - filter_list.append(Filter(request, 'osvariant', OSVariant.objects.all())) - filter_list.append(Filter(request, 'osrelease', OSRelease.objects.all())) - filter_list.append(Filter(request, 'arch', MachineArchitecture.objects.all())) - filter_list.append(Filter(request, 'reboot_required', - {False: 'No', True: 'Yes'})) + filter_list.append(Filter(request, 'Tag', 'tag', tags)) + filter_list.append(Filter(request, 'Domain', 'domain', Domain.objects.all())) + filter_list.append(Filter(request, 'OS Variant', 'osvariant', OSVariant.objects.filter(host__in=hosts))) + filter_list.append(Filter(request, 'OS Release', 'osrelease', OSRelease.objects.filter(osvariant__host__in=hosts))) + filter_list.append(Filter(request, 'Architecture', 'arch', MachineArchitecture.objects.filter(host__in=hosts))) + filter_list.append(Filter(request, 'Reboot Required', 'reboot_required', {False: 'No', True: 'Yes'})) filter_bar = FilterBar(request, filter_list) return render(request, diff --git a/operatingsystems/forms.py b/operatingsystems/forms.py index 8dcd35b9..548a7d88 100644 --- a/operatingsystems/forms.py +++ b/operatingsystems/forms.py @@ -50,7 +50,7 @@ class AddReposToOSReleaseForm(ModelForm): queryset=Repository.objects.select_related(), required=False, label=None, - widget=FilteredSelectMultiple('Repos', False, attrs={'size':'30'})) + widget=FilteredSelectMultiple('Repos', False, attrs={'size': '30'})) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) diff --git a/packages/views.py b/packages/views.py index be1b228f..aaf51238 100644 --- a/packages/views.py +++ b/packages/views.py @@ -33,7 +33,7 @@ def package_list(request): packages = Package.objects.select_related() if 'arch' in request.GET: - packages = packages.filter(arch=int(request.GET['arch'])).distinct() + packages = packages.filter(arch=request.GET['arch']).distinct() if 'packagetype' in request.GET: packages = packages.filter(packagetype=request.GET['packagetype']).distinct() @@ -53,6 +53,27 @@ def package_list(request): if 'module_id' in request.GET: packages = packages.filter(module=request.GET['module_id']).distinct() + if 'affected_by_errata' in request.GET: + affected_by_errata = request.GET['affected_by_errata'] == 'True' + if affected_by_errata: + packages = packages.filter(erratum__isnull=False) + else: + packages = packages.filter(erratum__isnull=True) + + if 'installed_on_hosts' in request.GET: + installed_on_hosts = request.GET['installed_on_hosts'] == 'True' + if installed_on_hosts: + packages = packages.filter(host__isnull=False) + else: + packages = packages.filter(host__isnull=True) + + if 'available_in_repos' in request.GET: + available_in_repos = request.GET['available_in_repos'] == 'True' + if available_in_repos: + packages = packages.filter(mirror__isnull=False) + else: + packages = packages.filter(mirror__isnull=True) + if 'search' in request.GET: terms = request.GET['search'].lower() query = Q() @@ -74,13 +95,11 @@ def package_list(request): page = paginator.page(paginator.num_pages) filter_list = [] - filter_list.append( - Filter(request, 'arch', PackageArchitecture.objects.all())) -# Disabled due to being a huge slowdown -# filter_list.append( -# Filter( -# request, 'packagetype', -# Package.objects.values_list('packagetype', flat=True).distinct())) + filter_list.append(Filter(request, 'Affected by Errata', 'affected_by_errata', {False: 'No', True: 'Yes'})) + filter_list.append(Filter(request, 'Installed on Hosts', 'installed_on_hosts', {False: 'No', True: 'Yes'})) + filter_list.append(Filter(request, 'Available in Repos', 'available_in_repos', {False: 'No', True: 'Yes'})) + filter_list.append(Filter(request, 'Package Type', 'packagetype', Package.PACKAGE_TYPES)) + filter_list.append(Filter(request, 'Architecture', 'arch', PackageArchitecture.objects.all())) filter_bar = FilterBar(request, filter_list) return render(request, @@ -89,6 +108,7 @@ def package_list(request): 'filter_bar': filter_bar, 'terms': terms}) + @login_required def package_name_list(request): packages = PackageName.objects.select_related() @@ -122,13 +142,8 @@ def package_name_list(request): page = paginator.page(paginator.num_pages) filter_list = [] - filter_list.append( - Filter(request, 'arch', PackageArchitecture.objects.all())) -# Disabled due to being a huge slowdown -# filter_list.append( -# Filter( -# request, 'packagetype', -# Package.objects.values_list('packagetype', flat=True).distinct())) + filter_list.append(Filter(request, 'Package Type', 'packagetype', Package.PACKAGE_TYPES)) + filter_list.append(Filter(request, 'Architecture', 'arch', PackageArchitecture.objects.all())) filter_bar = FilterBar(request, filter_list) return render(request, diff --git a/reports/models.py b/reports/models.py index 95b954e2..911c27ed 100644 --- a/reports/models.py +++ b/reports/models.py @@ -20,8 +20,8 @@ from django.db import models, IntegrityError, DatabaseError, transaction from django.urls import reverse -from hosts.models import Host from arch.models import MachineArchitecture +from hosts.models import Host from operatingsystems.models import OSVariant, OSRelease from domains.models import Domain from patchman.signals import error_message, info_message @@ -138,11 +138,11 @@ def process(self, find_updates=True, verbose=False): self.host = self.report_ip with transaction.atomic(): - host, c = Hosts.objects.get_or_create( + host, c = Host.objects.get_or_create( hostname=self.host, defaults={ 'ipaddress': self.report_ip, - 'arch': arch, + 'arch': m_arch, 'osvariant': osvariant, 'domain': domain, 'lastreport': self.created, @@ -150,7 +150,7 @@ def process(self, find_updates=True, verbose=False): host.ipaddress = self.report_ip host.kernel = self.kernel - host.arch = arch + host.arch = m_arch host.osvariant = osvariant host.domain = domain host.lastreport = self.created diff --git a/reports/views.py b/reports/views.py index e3d672de..b9a18f2a 100644 --- a/reports/views.py +++ b/reports/views.py @@ -102,8 +102,7 @@ def report_list(request): page = paginator.page(paginator.num_pages) filter_list = [] - filter_list.append(Filter(request, 'processed', - {False: 'No', True: 'Yes'})) + filter_list.append(Filter(request, 'Processed', 'processed', {False: 'No', True: 'Yes'})) filter_bar = FilterBar(request, filter_list) return render(request, @@ -142,7 +141,7 @@ def report_delete(request, report_id): if request.method == 'POST': if 'delete' in request.POST: report.delete() - text = f'Report {report!s} has been deleted' + text = f'Report {report} has been deleted' messages.info(request, text) return redirect(reverse('reports:report_list')) elif 'cancel' in request.POST: diff --git a/repos/templates/repos/repo_detail.html b/repos/templates/repos/repo_detail.html index 175f2178..7cd2a1b3 100644 --- a/repos/templates/repos/repo_detail.html +++ b/repos/templates/repos/repo_detail.html @@ -28,7 +28,7 @@ Architecture {{ repo.arch }} Security {% yes_no_img repo.security 'Security' 'Not Security' %} Enabled {% yes_no_img repo.enabled 'Enabled' 'Not Enabled' %} - Mirrors {{ repo.mirror_set.count }} + Mirrors {{ repo.mirror_set.count }} Requires Authentication {{ repo.auth_required }} {% if user.is_authenticated and perms.is_admin %} diff --git a/repos/views.py b/repos/views.py index 329f9aca..2886239a 100644 --- a/repos/views.py +++ b/repos/views.py @@ -86,17 +86,12 @@ def repo_list(request): page = paginator.page(paginator.num_pages) filter_list = [] - filter_list.append( - Filter( - request, - 'repotype', - Repository.objects.values_list('repotype', flat=True).distinct())) - filter_list.append(Filter(request, - 'arch', - MachineArchitecture.objects.all())) - filter_list.append(Filter(request, 'enabled', {False: 'No', True: 'Yes'})) - filter_list.append(Filter(request, 'security', {False: 'No', True: 'Yes'})) - filter_list.append(Filter(request, 'osrelease', OSRelease.objects.all())) + filter_list.append(Filter(request, 'OS Release', 'osrelease', OSRelease.objects.filter(repos__in=repos))) + filter_list.append(Filter(request, 'Enabled', 'enabled', {False: 'No', True: 'Yes'})) + filter_list.append(Filter(request, 'Security', 'security', {False: 'No', True: 'Yes'})) + filter_list.append(Filter(request, 'Repo Type', 'repotype', Repository.REPO_TYPES)) + filter_list.append(Filter(request, 'Architecture', 'arch', + MachineArchitecture.objects.filter(repository__in=repos))) filter_bar = FilterBar(request, filter_list) return render(request, @@ -152,6 +147,9 @@ def move_mirrors(repo): if checksum is not None: mirrors = mirrors.filter(file_checksum=checksum) + if 'repo_id' in request.GET: + mirrors = mirrors.filter(repo=request.GET['repo_id']) + if 'search' in request.GET: terms = request.GET['search'].lower() query = Q() diff --git a/util/filterspecs.py b/util/filterspecs.py index c8b30840..1c845ff3 100644 --- a/util/filterspecs.py +++ b/util/filterspecs.py @@ -22,17 +22,14 @@ def get_query_string(qs): - newqs = [f'{k!s}={v!s}' for k, v in list(qs.items())] - return '?' + '&'.join(newqs).replace(' ', '%20') + new_qs = [f'{k}={v}' for k, v in list(qs.items())] + return '?' + '&'.join(new_qs).replace(' ', '%20') class Filter: - def __init__(self, request, name, filters, header=''): - if header == '': - self.header = name - else: - self.header = header + def __init__(self, request, header, name, filters): + self.header = header if isinstance(filters, tuple): filters = dict(filters) @@ -57,15 +54,14 @@ def output(self, qs): del qs[self.name] output = '
    \n' - output += '
    ' - output += f"{self.header.replace('_', ' ')!s}
    \n" + output += f'
    {self.header}
    \n' output += '
    \n' output += '
    \n' - output += f'{v!s}\n' + output += f'{v}\n' output += '
    ' return output diff --git a/util/templatetags/common.py b/util/templatetags/common.py index b177ca33..497c4714 100644 --- a/util/templatetags/common.py +++ b/util/templatetags/common.py @@ -16,6 +16,8 @@ # You should have received a copy of the GNU General Public License # along with Patchman If not, see . +import re + from humanize import naturaltime from datetime import datetime, timedelta from urllib.parse import urlencode @@ -34,9 +36,7 @@ @register.simple_tag def active(request, pattern): - import re - if re.search(f"^{request.META['SCRIPT_NAME']!s}/{pattern!s}", - request.path): + if re.search(fr"^{request.META['SCRIPT_NAME']}/{pattern}", request.path): return 'active' return '' @@ -46,9 +46,9 @@ def yes_no_img(boolean, alt_yes='Active', alt_no='Not Active'): yes_icon = static('img/icon-yes.gif') no_icon = static('img/icon-no.gif') if boolean: - html = f'{alt_yes!s}' + html = f'{alt_yes}' else: - html = f'{alt_no!s}' + html = f'{alt_no}' return format_html(html) @@ -57,9 +57,9 @@ def no_yes_img(boolean, alt_yes='Not Required', alt_no='Required'): yes_icon = static('img/icon-yes.gif') no_icon = static('img/icon-no.gif') if not boolean: - html = f'{alt_yes!s}' + html = f'{alt_yes}' else: - html = f'{alt_no!s}' + html = f'{alt_no}' return format_html(html) @@ -70,7 +70,7 @@ def gen_table(object_list, template_name=None): if not template_name: app_label = object_list.model._meta.app_label model_name = object_list.model._meta.verbose_name.replace(' ', '') - template_name = f'{app_label!s}/{model_name.lower()!s}_table.html' + template_name = f'{app_label}/{model_name.lower()}_table.html' template = get_template(template_name) html = template.render({'object_list': object_list}) return html @@ -83,7 +83,7 @@ def object_count(page): name = page.paginator.object_list.model._meta.verbose_name else: name = page.paginator.object_list.model._meta.verbose_name_plural - return f'{page.paginator.count!s} {name!s}' + return f'{page.paginator.count} {name}' @register.simple_tag From 1812ff784804ab5ea290e5f47ea4029a90d45b21 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 10 Feb 2025 23:13:21 -0500 Subject: [PATCH 030/199] flake8 fixes --- reports/utils.py | 8 ++++---- reports/views.py | 2 +- util/__init__.py | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/reports/utils.py b/reports/utils.py index 4c10226d..746d5042 100644 --- a/reports/utils.py +++ b/reports/utils.py @@ -224,9 +224,9 @@ def parse_repos(repos_string): """ repos = [] for r in [s for s in repos_string.splitlines() if s]: - repodata = re.findall('\'.*?\'', r) + repodata = re.findall(r"'.*?'", r) for i, rs in enumerate(repodata): - repodata[i] = rs.replace('\'', '') + repodata[i] = rs.replace("'", '') repos.append(repodata) return repos @@ -301,7 +301,7 @@ def parse_modules(modules_string): """ modules = [] for module in modules_string.splitlines(): - module_string = [m for m in module.replace('\'', '').split(' ') if m] + module_string = [m for m in module.replace("'", '').split(' ') if m] if module_string: modules.append(module_string) return modules @@ -369,7 +369,7 @@ def parse_packages(packages_string): """ packages = [] for p in packages_string.splitlines(): - packages.append(p.replace('\'', '').split(' ')) + packages.append(p.replace("'", '').split(' ')) return packages diff --git a/reports/views.py b/reports/views.py index b9a18f2a..0bf8367e 100644 --- a/reports/views.py +++ b/reports/views.py @@ -49,7 +49,7 @@ def upload(request): packages = [] if 'packages' in data: for p in data['packages'].splitlines(): - packages.append(p.replace('\'', '').split(' ')) + packages.append(p.replace("'", '').split(' ')) repos = data.get('repos') modules = data.get('modules') sec_updates = data.get('sec_updates') diff --git a/util/__init__.py b/util/__init__.py index aad5410e..05e87ccf 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -182,7 +182,7 @@ def bunzip2(contents): if e == 'invalid data stream': error_message.send(sender=None, text='bunzip2: ' + e) except ValueError as e: - if e == 'couldn\'t find end of stream': + if e == "couldn't find end of stream": error_message.send(sender=None, text='bunzip2: ' + e) From e95765a8051c2aa035d7c659ad70b341f936543d Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 10 Feb 2025 23:16:49 -0500 Subject: [PATCH 031/199] improve requests error handling --- repos/utils.py | 26 +++++++++++++-------- requirements.txt | 5 ++-- util/__init__.py | 60 +++++++++++++++++++++++++++--------------------- 3 files changed, 53 insertions(+), 38 deletions(-) diff --git a/repos/utils.py b/repos/utils.py index 5a381da8..c9254806 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -29,6 +29,7 @@ from debian.debian_support import Version from debian.deb822 import Packages from fnmatch import fnmatch +from tenacity import RetryError from django.conf import settings from django.db import IntegrityError, DatabaseError, transaction @@ -151,16 +152,17 @@ def get_modules_url(mirror_url, data): def find_mirror_url(stored_mirror_url, formats): """ Find the actual URL of the mirror by trying predefined paths """ - for fmt in formats: mirror_url = stored_mirror_url for f in formats: if mirror_url.endswith(f): mirror_url = mirror_url[:-len(f)] mirror_url = mirror_url.rstrip('/') + '/' + fmt - debug_message.send(sender=None, - text=f'Checking {mirror_url!s}') - res = get_url(mirror_url) + debug_message.send(sender=None, text=f'Checking for mirror at {mirror_url}') + try: + res = get_url(mirror_url) + except RetryError: + return if res is not None and res.ok: return res @@ -230,7 +232,10 @@ def is_metalink(url): def get_metalink_urls(url): """ Parses a metalink and returns a list of mirrors """ - res = get_url(url) + try: + res = get_url(url) + except RetryError: + return if response_is_valid(res): if 'content-type' in res.headers and \ res.headers['content-type'] == 'application/metalink+xml': @@ -251,15 +256,16 @@ def get_mirrorlist_urls(url): type text/plain and contains a list of urls. Returns a list of mirrors if it is a mirrorlist. """ - res = get_url(url) + try: + res = get_url(url) + except RetryError: + return if response_is_valid(res): - if 'content-type' in res.headers and \ - 'text/plain' in res.headers['content-type']: + if res.headers.get('content-type') == 'text/plain': data = download_url(res, 'Downloading repo info:') if data is None: return - mirror_urls = re.findall('^http[s]*://.*$|^ftp://.*$', - data.decode('utf-8'), re.MULTILINE) + mirror_urls = re.findall(r'^http[s]*://.*$|^ftp://.*$', data.decode('utf-8'), re.MULTILINE) if mirror_urls: return mirror_urls diff --git a/requirements.txt b/requirements.txt index 4d6a598f..0d53876f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -Django==3.2.25 +Django==4.2.19 django-tagging==0.5.0 django-taggit==4.0.0 django-extensions==3.2.1 @@ -11,10 +11,11 @@ PyYAML==6.0.1 chardet==4.0.0 requests==2.32.3 colorama==0.4.4 -djangorestframework==3.13.1 +djangorestframework==3.14.0 django-filter==21.1 humanize==3.13.1 version-utils==0.3.0 python-magic==0.4.25 pymemcache==4.0.0 gitpython==3.1.44 +tenacity==8.2.3 diff --git a/util/__init__.py b/util/__init__.py index 05e87ccf..1c138230 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -25,7 +25,10 @@ from datetime import datetime from enum import Enum from hashlib import md5, sha1, sha256, sha512 +from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential from progressbar import Bar, ETA, Percentage, ProgressBar +from requests.exceptions import HTTPError, Timeout, ConnectionError + from patchman.signals import error_message, info_message, debug_message from django.utils.timezone import make_aware @@ -43,6 +46,13 @@ Checksum = Enum('Checksum', 'md5 sha sha1 sha256 sha512') +def print_nocr(text): + """ Print text without a carriage return + """ + print(text, end='') + sys.stdout.softspace = False + + def get_verbosity(): """ Get the global verbosity level """ @@ -90,21 +100,22 @@ def update_pbar(index, **kwargs): pbar = None -def download_url(res, text='', ljust=35): +def download_url(response, text='', ljust=35): """ Display a progress bar to download the request content if verbose is True. Otherwise, just return the request content """ global verbose + if not response: + return if verbose: - content_length = res.headers.get('content-length') + content_length = response.headers.get('content-length') if content_length: clen = int(content_length) create_pbar(text, clen, ljust) chunk_size = 16384 i = 0 data = b'' - for chunk in res.iter_content(chunk_size=chunk_size, - decode_unicode=False): + for chunk in response.iter_content(chunk_size=chunk_size, decode_unicode=False): i += len(chunk) if i > clen: update_pbar(clen) @@ -114,39 +125,36 @@ def download_url(res, text='', ljust=35): return data else: info_message.send(sender=None, text=text) - return res.content - - -def print_nocr(text): - """ Print text without a carriage return - """ - print(text, end='') - sys.stdout.softspace = False + return response.content +@retry( + retry=retry_if_exception_type(HTTPError | Timeout | ConnectionError | ConnectionResetError), + stop=stop_after_attempt(5), + wait=wait_exponential(multiplier=1, min=2, max=15), + reraise=False, +) def get_url(url, headers={}, params={}): """ Perform a http GET on a URL. Return None on error. """ - res = None + response = None try: - res = requests.get(url, headers=headers, params=params, stream=True, timeout=30) - debug_message.send(sender=None, text=f'{res.status_code}: {res.headers}') - except requests.exceptions.Timeout: - error_message.send(sender=None, text=f'Timeout - {url!s}') + debug_message.send(sender=None, text=f'Trying {url} headers:{headers} params:{params}') + response = requests.get(url, headers=headers, params=params, stream=True, timeout=30) + debug_message.send(sender=None, text=f'{response.status_code}: {response.headers}') + if response.status_code == 404: + return None + response.raise_for_status() except requests.exceptions.TooManyRedirects: - error_message.send(sender=None, - text=f'Too many redirects - {url!s}') - except requests.exceptions.RequestException as e: - error_message.send(sender=None, - text=f'Error ({e!s}) - {url!s}') - return res + error_message.send(sender=None, text=f'Too many redirects - {url}') + return response -def response_is_valid(res): +def response_is_valid(response): """ Check if a http response is valid """ - if res is not None: - return res.ok + if response: + return response.ok else: return False From 697b83f739cb41e69c11de95363ea6eb2a45fcf9 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 10 Feb 2025 23:17:52 -0500 Subject: [PATCH 032/199] clean up repo and mirror handling --- repos/models.py | 2 + repos/utils.py | 326 ++++++++++++++++++++++++++---------------------- 2 files changed, 180 insertions(+), 148 deletions(-) diff --git a/repos/models.py b/repos/models.py index 9f6c219c..fb8b926a 100644 --- a/repos/models.py +++ b/repos/models.py @@ -181,6 +181,8 @@ def fail(self): self.refresh = False text = f'Mirror has failed {self.fail_count} times (max={max_mirror_failures}), disabling refresh' error_message.send(sender=None, text=text) + self.last_access_ok = False + self.save() def update_packages(self, packages): """ Update the packages associated with a mirror diff --git a/repos/utils.py b/repos/utils.py index c9254806..da5f069f 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -85,7 +85,7 @@ def update_mirror_packages(mirror, packages): removals = old.difference(packages) rlen = len(removals) - ptext = f'Removing {rlen!s} obsolete packages:' + ptext = f'Removing {rlen} obsolete packages:' progress_info_s.send(sender=None, ptext=ptext, plen=rlen) for i, strpackage in enumerate(removals): progress_update_s.send(sender=None, index=i + 1) @@ -94,7 +94,7 @@ def update_mirror_packages(mirror, packages): new = packages.difference(old) nlen = len(new) - ptext = f'Adding {nlen!s} new packages:' + ptext = f'Adding {nlen} new packages:' progress_info_s.send(sender=None, ptext=ptext, plen=nlen) for i, strpackage in enumerate(new): progress_update_s.send(sender=None, index=i + 1) @@ -277,17 +277,16 @@ def add_mirrors_from_urls(repo, mirror_urls): for mirror_url in mirror_urls: mirror_url = mirror_url.replace('$ARCH', repo.arch.name) mirror_url = mirror_url.replace('$basearch', repo.arch.name) - q = Q(mirrorlist=False, refresh=True) + q = Q(mirrorlist=False, refresh=True, enabled=True) existing = repo.mirror_set.filter(q).count() if existing >= max_mirrors: - text = f'{max_mirrors!s} mirrors already ' - text += f'exist, not adding {mirror_url!s}' + text = f'{existing} mirrors already exist (max={max_mirrors}), not adding any more' warning_message.send(sender=None, text=text) - continue + break from repos.models import Mirror m, c = Mirror.objects.get_or_create(repo=repo, url=mirror_url) if c: - text = f'Added mirror - {mirror_url!s}' + text = f'Added mirror - {mirror_url}' info_message.send(sender=None, text=text) @@ -301,7 +300,7 @@ def check_for_mirrorlists(repo): mirror.mirrorlist = True mirror.last_access_ok = True mirror.save() - text = f'Found mirrorlist - {mirror.url!s}' + text = f'Found mirrorlist - {mirror.url}' info_message.send(sender=None, text=text) add_mirrors_from_urls(repo, mirror_urls) @@ -319,7 +318,7 @@ def check_for_metalinks(repo): mirror.mirrorlist = True mirror.last_access_ok = True mirror.save() - text = f'Found metalink - {mirror.url!s}' + text = f'Found metalink - {mirror.url}' info_message.send(sender=None, text=text) add_mirrors_from_urls(repo, mirror_urls) @@ -332,7 +331,7 @@ def extract_module_metadata(data, url, repo): try: modules_yaml = yaml.safe_load_all(extracted) except yaml.YAMLError as e: - print(e) + error_message.send(sender=None, text=e) for doc in modules_yaml: document = doc['document'] modulemd = doc['data'] @@ -378,10 +377,10 @@ def extract_yum_packages(data, url): extracted = extract(data, url) ns = 'http://linux.duke.edu/metadata/common' m_context = etree.iterparse(BytesIO(extracted), - tag=f'{{{ns!s}}}metadata') + tag=f'{{{ns}}}metadata') plen = int(next(m_context)[1].get('packages')) p_context = etree.iterparse(BytesIO(extracted), - tag=f'{{{ns!s}}}package') + tag=f'{{{ns}}}package') packages = set() if plen > 0: @@ -538,63 +537,35 @@ def extract_arch_packages(data): return packages -def refresh_yum_repo(mirror, data, mirror_url, ts): - """ Refresh package metadata for a yum-style rpm mirror - and add the packages to the mirror - """ - primary_url, primary_checksum, primary_checksum_type = get_primary_url(mirror_url, data) - modules_url, modules_checksum, modules_checksum_type = get_modules_url(mirror_url, data) +def fetch_mirror_data(mirror, url, text, checksum=None, checksum_type=None, metadata_type=None): + if not url: + mirror.fail() + return - if not primary_url: + try: + res = get_url(url) + except RetryError: mirror.fail() return - res = get_url(primary_url) mirror.last_access_ok = response_is_valid(res) - if not mirror.last_access_ok: mirror.fail() return - package_data = download_url(res, 'Downloading package info:') - if package_data is None: + data = download_url(res, text) + if not data: mirror.fail() return - computed_checksum = get_checksum(package_data, Checksum[primary_checksum_type]) - if not mirror_checksum_is_valid(computed_checksum, primary_checksum, mirror, 'package'): - return - - if mirror.file_checksum == primary_checksum: - text = 'Mirror checksum has not changed, ' - text += 'not refreshing package metadata' - warning_message.send(sender=None, text=text) - return - - mirror.file_checksum = primary_checksum - - if modules_url: - res = get_url(modules_url) - module_data = download_url(res, 'Downloading module info:') - computed_checksum = get_checksum(module_data, Checksum[modules_checksum_type]) - if not mirror_checksum_is_valid(computed_checksum, modules_checksum, mirror, 'module'): + if checksum and checksum_type and metadata_type: + computed_checksum = get_checksum(data, Checksum[checksum_type]) + if not mirror_checksum_is_valid(computed_checksum, checksum, mirror, metadata_type): + mirror.fail() return - # only refresh X mirrors, where X = max_mirrors - max_mirrors = get_max_mirrors() - checksum_q = Q(mirrorlist=False, refresh=True, timestamp=ts, - file_checksum=primary_checksum) - have_checksum = mirror.repo.mirror_set.filter(checksum_q).count() - if have_checksum >= max_mirrors: - text = f'{max_mirrors!s} mirrors already have this ' - text += 'checksum, ignoring refresh to save time' - info_message.send(sender=None, text=text) - else: - packages = extract_yum_packages(package_data, primary_url) - if packages: - update_mirror_packages(mirror, packages) - if modules_url: - extract_module_metadata(module_data, modules_url, mirror.repo) + mirror.save() + return data def mirror_checksum_is_valid(computed, provided, mirror, metadata_type): @@ -602,8 +573,7 @@ def mirror_checksum_is_valid(computed, provided, mirror, metadata_type): Returns True if both match. """ if not computed or computed != provided: - text = f'Checksum failed for mirror {mirror.id!s}' - text += f', not refreshing {metadata_type} metadata' + text = f'Checksum failed for mirror {mirror.id}, not refreshing {metadata_type} metadata' error_message.send(sender=None, text=text) text = f'Found checksum: {computed}\nExpected checksum: {provided}' error_message.send(sender=None, text=text) @@ -614,42 +584,97 @@ def mirror_checksum_is_valid(computed, provided, mirror, metadata_type): return True +def refresh_yum_repo(mirror, data, mirror_url, ts): + """ Refresh package metadata for a yum-style rpm mirror + and add the packages to the mirror + """ + primary_url, primary_checksum, primary_checksum_type = get_primary_url(mirror_url, data) + package_data = fetch_mirror_data( + mirror=mirror, + url=primary_url, + checksum=primary_checksum, + checksum_type=primary_checksum_type, + text='Downloading package info:', + metadata_type='package') + if not package_data: + return + + if mirror.file_checksum == primary_checksum: + text = 'Mirror checksum has not changed, not refreshing package metadata' + warning_message.send(sender=None, text=text) + return + else: + mirror.file_checksum = primary_checksum + + # only refresh X mirrors, where X = max_mirrors + max_mirrors = get_max_mirrors() + mirrors_q = Q(mirrorlist=False, refresh=True, enabled=True, timestamp=ts, file_checksum=primary_checksum) + have_checksum = mirror.repo.mirror_set.filter(mirrors_q).count() + if have_checksum >= max_mirrors: + text = f'{max_mirrors} mirrors already have this checksum, skipping refresh' + info_message.send(sender=None, text=text) + return + + packages = extract_yum_packages(package_data, primary_url) + if packages: + update_mirror_packages(mirror, packages) + packages.clear() + + modules_url, modules_checksum, modules_checksum_type = get_modules_url(mirror_url, data) + if modules_url: + module_data = fetch_mirror_data( + mirror=mirror, + url=modules_url, + checksum=modules_checksum, + checksum_type=modules_checksum_type, + text='Downloading module info:', + metadata_type='module') + if module_data: + extract_module_metadata(module_data, modules_url, mirror.repo) + + mirror.save() + + def refresh_arch_repo(repo): """ Refresh all mirrors of an arch linux repo """ max_mirrors = get_max_mirrors() - fname = f'{repo.arch!s}/{repo.repo_id!s}.db' + fname = f'{repo.arch}/{repo.repo_id}.db' ts = datetime.now().astimezone().replace(microsecond=0) - for i, mirror in enumerate(repo.mirror_set.filter(refresh=True)): + + enabled_mirrors = repo.mirror_set.filter(refresh=True, enabled=True) + for i, mirror in enumerate(enabled_mirrors): + if i >= max_mirrors: + text = f'{max_mirrors} mirrors already refreshed (max={max_mirrors}), skipping further refreshes' + warning_message.send(sender=None, text=text) + break + res = find_mirror_url(mirror.url, [fname]) - mirror.last_access_ok = response_is_valid(res) - if mirror.last_access_ok: - if i >= max_mirrors: - text = f'{max_mirrors!s} mirrors already refreshed, ' - text += f' not refreshing {mirror.url!s}' - warning_message.send(sender=None, text=text) - continue - mirror_url = res.url - text = f'Found arch repo - {mirror_url!s}' - info_message.send(sender=None, text=text) - data = download_url(res, 'Downloading repo info:') - if data is None: - mirror.fail() - return - computed_checksum = get_checksum(data, Checksum.sha1) - if mirror.file_checksum == computed_checksum: - text = 'Mirror checksum has not changed, ' - text += 'not refreshing package metadata' - warning_message.send(sender=None, text=text) - else: - packages = extract_arch_packages(data) - mirror.last_access_ok = True - mirror.timestamp = ts - update_mirror_packages(mirror, packages) - mirror.file_checksum = computed_checksum - packages.clear() + if not res: + continue + mirror_url = res.url + text = f'Found arch repo - {mirror_url}' + info_message.send(sender=None, text=text) + + package_data = fetch_mirror_data( + mirror=mirror, + url=mirror_url, + text='Downloading repo info:') + if not package_data: + continue + + computed_checksum = get_checksum(package_data, Checksum.sha1) + if mirror.file_checksum == computed_checksum: + text = 'Mirror checksum has not changed, not refreshing package metadata' + warning_message.send(sender=None, text=text) + continue else: - mirror.fail() + mirror.file_checksum = computed_checksum + + packages = extract_arch_packages(package_data) + update_mirror_packages(mirror, packages) + packages.clear() + mirror.timestamp = ts mirror.save() @@ -822,20 +847,20 @@ def refresh_yast_repo(mirror, data): and add the packages to the mirror """ package_dir = re.findall('DESCRDIR *(.*)', data.decode('utf-8'))[0] - package_url = f'{mirror.url!s}/{package_dir!s}/packages.gz' - res = get_url(package_url) - mirror.last_access_ok = response_is_valid(res) - if mirror.last_access_ok: - data = download_url(res, 'Downloading yast repo info:') - if data is None: - mirror.fail() - return - mirror.file_checksum = 'yast' - packages = extract_yast_packages(data) - if packages: - update_mirror_packages(mirror, packages) - else: - mirror.fail() + package_url = f'{mirror.url}/{package_dir}/packages.gz' + + package_data = fetch_mirror_data( + mirror=mirror, + url=package_url, + text='Downloading yast repo info:') + if not package_data: + return + + mirror.file_checksum = 'yast' + packages = extract_yast_packages(package_data) + if packages: + update_mirror_packages(mirror, packages) + packages.clear() def refresh_rpm_repo(repo): @@ -862,32 +887,34 @@ def refresh_rpm_repo(repo): max_mirrors = get_max_mirrors() ts = datetime.now().astimezone().replace(microsecond=0) - enabled_mirrors = repo.mirror_set.filter(mirrorlist=False, refresh=True) + enabled_mirrors = repo.mirror_set.filter(mirrorlist=False, refresh=True, enabled=True) for i, mirror in enumerate(enabled_mirrors): + if i >= max_mirrors: + text = f'{max_mirrors} mirrors already refreshed (max={max_mirrors}), skipping further refreshes' + warning_message.send(sender=None, text=text) + break + res = find_mirror_url(mirror.url, formats) - mirror.last_access_ok = response_is_valid(res) - if mirror.last_access_ok: - if i >= max_mirrors: - text = f'{max_mirrors!s} mirrors already refreshed, ' - text += f' not refreshing {mirror.url!s}' - warning_message.send(sender=None, text=text) - continue - data = download_url(res, 'Downloading repo info:') - if data is None: - mirror.fail() - return - mirror_url = res.url - if res.url.endswith('content'): - text = f'Found yast rpm repo - {mirror_url!s}' - info_message.send(sender=None, text=text) - refresh_yast_repo(mirror, data) - else: - text = f'Found yum rpm repo - {mirror_url!s}' - info_message.send(sender=None, text=text) - refresh_yum_repo(mirror, data, mirror_url, ts) - mirror.timestamp = ts + if not res: + continue + mirror_url = res.url + + repo_data = fetch_mirror_data( + mirror=mirror, + url=mirror_url, + text='Downloading repo info:') + if not repo_data: + continue + + if mirror_url.endswith('content'): + text = f'Found yast rpm repo - {mirror_url}' + info_message.send(sender=None, text=text) + refresh_yast_repo(mirror, repo_data) else: - mirror.fail() + text = f'Found yum rpm repo - {mirror_url}' + info_message.send(sender=None, text=text) + refresh_yum_repo(mirror, repo_data, mirror_url, ts) + mirror.timestamp = ts mirror.save() @@ -900,35 +927,38 @@ def refresh_deb_repo(repo): formats = ['Packages.xz', 'Packages.bz2', 'Packages.gz', 'Packages'] ts = datetime.now().astimezone().replace(microsecond=0) - for mirror in repo.mirror_set.filter(refresh=True): + enabled_mirrors = repo.mirror_set.filter(refresh=True, enabled=True) + for mirror in enabled_mirrors: res = find_mirror_url(mirror.url, formats) - mirror.last_access_ok = response_is_valid(res) + if not res: + continue + mirror_url = res.url + text = f'Found deb repo - {mirror_url}' + info_message.send(sender=None, text=text) + + package_data = fetch_mirror_data( + mirror=mirror, + url=mirror_url, + text='Downloading repo info:') + if not package_data: + continue - if mirror.last_access_ok: - mirror_url = res.url - text = f'Found deb repo - {mirror_url!s}' - info_message.send(sender=None, text=text) - data = download_url(res, 'Downloading repo info:') - if data is None: - mirror.fail() - return - computed_checksum = get_checksum(data, Checksum.sha1) - if mirror.file_checksum == computed_checksum: - text = 'Mirror checksum has not changed, ' - text += 'not refreshing package metadata' - warning_message.send(sender=None, text=text) - else: - packages = extract_deb_packages(data, mirror_url) - if packages is None: - mirror.fail() - else: - mirror.last_access_ok = True - mirror.timestamp = ts - update_mirror_packages(mirror, packages) - mirror.file_checksum = computed_checksum - packages.clear() + computed_checksum = get_checksum(package_data, Checksum.sha1) + if mirror.file_checksum == computed_checksum: + text = 'Mirror checksum has not changed, not refreshing package metadata' + warning_message.send(sender=None, text=text) + continue else: + mirror.file_checksum = computed_checksum + + packages = extract_deb_packages(package_data, mirror_url) + if not packages: mirror.fail() + continue + + update_mirror_packages(mirror, packages) + packages.clear() + mirror.timestamp = ts mirror.save() From 0229e966ca71cbb1b882fc690248e37aff546e6a Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 10 Feb 2025 23:18:21 -0500 Subject: [PATCH 033/199] use timezone-aware datetimes by default --- patchman/settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/patchman/settings.py b/patchman/settings.py index 27198cc6..f409517d 100644 --- a/patchman/settings.py +++ b/patchman/settings.py @@ -59,7 +59,7 @@ TIME_ZONE = 'America/NewYork' USE_I18N = True USE_L10N = True -USE_TZ = False +USE_TZ = True DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' From bc62c2bd68947fca9750915736a33015df50fa9e Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 10 Feb 2025 23:50:05 -0500 Subject: [PATCH 034/199] flake8 fixes --- packages/utils.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/packages/utils.py b/packages/utils.py index e10df89f..134ffa2e 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -20,10 +20,9 @@ from django.core.exceptions import MultipleObjectsReturned from django.db import IntegrityError, DatabaseError, transaction -from arch.models import MachineArchitecture, PackageArchitecture +from arch.models import PackageArchitecture from packages.models import PackageName, Package, PackageUpdate, PackageCategory, PackageString -from patchman.signals import error_message, progress_info_s, progress_update_s -from util import bunzip2, get_url, download_url, get_sha1 +from patchman.signals import error_message def convert_package_to_packagestring(package): From f6d5ab8d7d3f901c740688353c95174c583db514 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 11 Feb 2025 00:36:27 -0500 Subject: [PATCH 035/199] workaround smart_str usage --- patchman/settings.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/patchman/settings.py b/patchman/settings.py index f409517d..bacf567f 100644 --- a/patchman/settings.py +++ b/patchman/settings.py @@ -4,6 +4,10 @@ import site import sys +import django +from django.utils.encoding import smart_str +django.utils.encoding.smart_text = smart_str + # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) From 6402e70e54b9b916613d9192ab8b7d34a364db82 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 11 Feb 2025 11:39:29 -0500 Subject: [PATCH 036/199] standardize fstrings --- errata/models.py | 2 +- errata/sources/distros/centos.py | 2 +- hooks/yum/patchman.py | 2 +- hooks/zypper/patchman.py | 8 +++---- hosts/models.py | 36 +++++++++++++++--------------- hosts/templatetags/report_alert.py | 2 +- hosts/utils.py | 2 +- hosts/views.py | 4 ++-- packages/models.py | 18 +++++++-------- packages/utils.py | 4 ++-- reports/models.py | 10 ++++----- reports/utils.py | 12 +++++----- repos/models.py | 14 ++++++------ repos/templatetags/repo_buttons.py | 12 +++++----- repos/views.py | 18 +++++++-------- util/__init__.py | 2 +- 16 files changed, 74 insertions(+), 74 deletions(-) diff --git a/errata/models.py b/errata/models.py index 89db7159..d744aadd 100644 --- a/errata/models.py +++ b/errata/models.py @@ -53,7 +53,7 @@ class Meta: verbose_name_plural = 'Errata' def __str__(self): - text = f'{self.name!s} ({self.e_type}), {self.cves.count()} related CVEs, ' + text = f'{self.name} ({self.e_type}), {self.cves.count()} related CVEs, ' text += f'affecting {self.packages.count()} packages and {self.osreleases.count()} OS Releases' return text diff --git a/errata/sources/distros/centos.py b/errata/sources/distros/centos.py index 569f5df3..43f73556 100644 --- a/errata/sources/distros/centos.py +++ b/errata/sources/distros/centos.py @@ -70,7 +70,7 @@ def parse_centos_errata(data): result = etree.XML(data) errata_xml = result.findall('*') elen = len(errata_xml) - ptext = f'Processing {elen!s} Errata:' + ptext = f'Processing {elen} Errata:' progress_info_s.send(sender=None, ptext=ptext, plen=elen) for i, child in enumerate(errata_xml): progress_update_s.send(sender=None, index=i + 1) diff --git a/hooks/yum/patchman.py b/hooks/yum/patchman.py index c59af372..343144eb 100644 --- a/hooks/yum/patchman.py +++ b/hooks/yum/patchman.py @@ -27,5 +27,5 @@ def posttrans_hook(conduit): 'servicecmd', '/usr/sbin/patchman-client') args = '-n' - command = f'{servicecmd!s} {args!s}> /dev/null' + command = f'{servicecmd} {args}> /dev/null' os.system(command) diff --git a/hooks/zypper/patchman.py b/hooks/zypper/patchman.py index 3d8f5da9..5ee8aa46 100755 --- a/hooks/zypper/patchman.py +++ b/hooks/zypper/patchman.py @@ -27,22 +27,22 @@ class MyPlugin(Plugin): def PLUGINBEGIN(self, headers, body): logging.info('PLUGINBEGIN') - logging.debug(f'headers: {headers!s}') + logging.debug(f'headers: {headers}') self.ack() def PACKAGESETCHANGED(self, headers, body): logging.info('PACKAGESETCHANGED') - logging.debug(f'headers: {headers!s}') + logging.debug(f'headers: {headers}') print('Sending report to patchman server...') servicecmd = '/usr/sbin/patchman-client' args = '-n' - command = f'{servicecmd!s} {args!s}> /dev/null' + command = f'{servicecmd} {args}> /dev/null' os.system(command) self.ack() def PLUGINEND(self, headers, body): logging.info('PLUGINEND') - logging.debug(f'headers: {headers!s}') + logging.debug(f'headers: {headers}') self.ack() diff --git a/hosts/models.py b/hosts/models.py index ec81c5ee..e1d6d4a8 100644 --- a/hosts/models.py +++ b/hosts/models.py @@ -72,21 +72,21 @@ def __str__(self): def show(self): """ Show info about this host """ - text = f'{self!s}:\n' - text += f'IP address : {self.ipaddress!s}\n' - text += f'Reverse DNS : {self.reversedns!s}\n' - text += f'Domain : {self.domain!s}\n' - text += f'OS Variant : {self.osvariant!s}\n' - text += f'Kernel : {self.kernel!s}\n' - text += f'Architecture : {self.arch!s}\n' - text += f'Last report : {self.lastreport!s}\n' - text += f'Packages : {self.get_num_packages()!s}\n' - text += f'Repos : {self.get_num_repos()!s}\n' - text += f'Updates : {self.get_num_updates()!s}\n' - text += f'Tags : {self.tags!s}\n' - text += f'Needs reboot : {self.reboot_required!s}\n' - text += f'Updated at : {self.updated_at!s}\n' - text += f'Host repos : {self.host_repos_only!s}\n' + text = f'{self}:\n' + text += f'IP address : {self.ipaddress}\n' + text += f'Reverse DNS : {self.reversedns}\n' + text += f'Domain : {self.domain}\n' + text += f'OS Variant : {self.osvariant}\n' + text += f'Kernel : {self.kernel}\n' + text += f'Architecture : {self.arch}\n' + text += f'Last report : {self.lastreport}\n' + text += f'Packages : {self.get_num_packages()}\n' + text += f'Repos : {self.get_num_repos()}\n' + text += f'Updates : {self.get_num_updates()}\n' + text += f'Tags : {self.tags}\n' + text += f'Needs reboot : {self.reboot_required}\n' + text += f'Updated at : {self.updated_at}\n' + text += f'Host repos : {self.host_repos_only}\n' info_message.send(sender=None, text=text) @@ -115,7 +115,7 @@ def check_rdns(self): info_message.send(sender=None, text='Reverse DNS matches') else: text = 'Reverse DNS mismatch found: ' - text += f'{self.hostname!s} != {self.reversedns!s}' + text += f'{self.hostname} != {self.reversedns}' info_message.send(sender=None, text=text) else: info_message.send(sender=None, @@ -161,7 +161,7 @@ def process_update(self, package, highest_package): try: with transaction.atomic(): self.updates.add(update) - info_message.send(sender=None, text=f'{update!s}') + info_message.send(sender=None, text=f'{update}') return update.id except IntegrityError as e: error_message.send(sender=None, text=e) @@ -356,4 +356,4 @@ class Meta: unique_together = ('host', 'repo') def __str__(self): - return f'{self.host!s}-{self.repo!s}' + return f'{self.host}-{self.repo}' diff --git a/hosts/templatetags/report_alert.py b/hosts/templatetags/report_alert.py index 72cd2540..b005a056 100644 --- a/hosts/templatetags/report_alert.py +++ b/hosts/templatetags/report_alert.py @@ -37,5 +37,5 @@ def report_alert(lastreport): else: days = 14 if lastreport < (timezone.now() - timedelta(days=days)): - html = f'Outdated Report' + html = f'Outdated Report' return format_html(html) diff --git a/hosts/utils.py b/hosts/utils.py index 6dae1d77..b0a8e675 100644 --- a/hosts/utils.py +++ b/hosts/utils.py @@ -55,7 +55,7 @@ def remove_reports(host, timestamp): del_reports = Report.objects.filter(host=host).exclude(id__in=report_ids) rlen = del_reports.count() - ptext = f'Cleaning {rlen!s} old reports' + ptext = f'Cleaning {rlen} old reports' progress_info_s.send(sender=None, ptext=ptext, plen=rlen) for i, report in enumerate(del_reports): report.delete() diff --git a/hosts/views.py b/hosts/views.py index db72da05..1acf3726 100644 --- a/hosts/views.py +++ b/hosts/views.py @@ -129,7 +129,7 @@ def host_edit(request, hostname): if edit_form.is_valid(): host = edit_form.save() host.save() - text = f'Saved changes to Host {host!s}' + text = f'Saved changes to Host {host}' messages.info(request, text) return redirect(host.get_absolute_url()) else: @@ -153,7 +153,7 @@ def host_delete(request, hostname): if request.method == 'POST': if 'delete' in request.POST: host.delete() - text = f'Host {hostname!s} has been deleted' + text = f'Host {hostname} has been deleted' messages.info(request, text) return redirect(reverse('hosts:host_list')) elif 'cancel' in request.POST: diff --git a/packages/models.py b/packages/models.py index 51005aeb..30cded29 100644 --- a/packages/models.py +++ b/packages/models.py @@ -91,17 +91,17 @@ class Meta: def __str__(self): if self.epoch: - epo = f'{self.epoch!s}:' + epo = f'{self.epoch}:' else: epo = '' if self.release: - rel = f'-{self.release!s}' + rel = f'-{self.release}' else: rel = '' if self.packagetype == 'G': - return f'{self.category!s}/{self.name!s}-{epo!s}{self.version!s}{rel!s}-{self.arch!s}' + return f'{self.category}/{self.name}-{epo}{self.version}{rel}-{self.arch}' else: - return f'{self.name!s}-{epo!s}{self.version!s}{rel!s}-{self.arch!s}' + return f'{self.name}-{epo}{self.version}{rel}-{self.arch}' def get_absolute_url(self): return reverse('packages:package_detail', args=[self.id]) @@ -184,17 +184,17 @@ class Meta: def __str__(self): if self.epoch: - epo = f'{self.epoch!s}:' + epo = f'{self.epoch}:' else: epo = '' if self.release: - rel = f'-{self.release!s}' + rel = f'-{self.release}' else: rel = '' if self.packagetype == 'G': - return f'{self.category!s}/{self.name!s}-{epo!s}{self.version!s}{rel!s}-{self.arch!s}' + return f'{self.category}/{self.name}-{epo}{self.version}{rel}-{self.arch}' else: - return f'{self.name!s}-{epo!s}{self.version!s}{rel!s}-{self.arch!s}' + return f'{self.name}-{epo}{self.version}{rel}-{self.arch}' def __key(self): return (self.name, self.epoch, self.version, self.release, self.arch, self.packagetype, self.category) @@ -229,4 +229,4 @@ def __str__(self): update_type = 'Security' else: update_type = 'Bugfix' - return f'{self.oldpackage!s} -> {self.newpackage!s} ({update_type!s})' + return f'{self.oldpackage} -> {self.newpackage} ({update_type})' diff --git a/packages/utils.py b/packages/utils.py index 134ffa2e..6dc01241 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -109,12 +109,12 @@ def find_version(s, epoch, release): """ Given a package version string, return the version """ try: - es = f'{epoch!s}:' + es = f'{epoch}:' e = s.index(es) + len(epoch) + 1 except ValueError: e = 0 try: - rs = f'-{release!s}' + rs = f'-{release}' r = s.index(rs) except ValueError: r = len(s) diff --git a/reports/models.py b/reports/models.py index 911c27ed..4b966ae2 100644 --- a/reports/models.py +++ b/reports/models.py @@ -56,7 +56,7 @@ class Meta: ordering = ('-created',) def __str__(self): - return f"{self.host!s} {self.created.strftime('%c')!s}" + return f"{self.host} {self.created.strftime('%c')}" def get_absolute_url(self): return reverse('reports:report_detail', args=[str(self.id)]) @@ -170,7 +170,7 @@ def process(self, find_updates=True, verbose=False): if verbose: text = 'Processing report ' - text += f'{self.id!s} - {self.host!s}' + text += f'{self.id} - {self.host}' info_message.send(sender=None, text=text) from reports.utils import process_packages, \ @@ -191,15 +191,15 @@ def process(self, find_updates=True, verbose=False): if find_updates: if verbose: text = 'Finding updates for report ' - text += f'{self.id!s} - {self.host!s}' + text += f'{self.id} - {self.host}' info_message.send(sender=None, text=text) host.find_updates() else: if self.processed: - text = f'Report {self.id!s} ' + text = f'Report {self.id} ' text += 'has already been processed' info_message.send(sender=None, text=text) else: text = 'Error: OS, kernel or arch not sent ' - text += f'with report {self.id!s}' + text += f'with report {self.id}' error_message.send(sender=None, text=text) diff --git a/reports/utils.py b/reports/utils.py index 746d5042..a777cfa1 100644 --- a/reports/utils.py +++ b/reports/utils.py @@ -40,7 +40,7 @@ def process_repos(report, host): repos = parse_repos(report.repos) progress_info_s.send(sender=None, - ptext=f'{str(host)[0:25]!s} repos', + ptext=f'{str(host)[0:25]} repos', plen=len(repos)) for i, repo_str in enumerate(repos): repo, priority = process_repo(repo_str, report.arch) @@ -75,7 +75,7 @@ def process_modules(report, host): modules = parse_modules(report.modules) progress_info_s.send(sender=None, - ptext=f'{str(host)[0:25]!s} modules', + ptext=f'{str(host)[0:25]} modules', plen=len(modules)) for i, module_str in enumerate(modules): module = process_module(module_str) @@ -103,7 +103,7 @@ def process_packages(report, host): packages = parse_packages(report.packages) progress_info_s.send(sender=None, - ptext=f'{str(host)[0:25]!s} packages', + ptext=f'{str(host)[0:25]} packages', plen=len(packages)) for i, pkg_str in enumerate(packages): package = process_package(pkg_str, report.protocol) @@ -118,7 +118,7 @@ def process_packages(report, host): error_message.send(sender=None, text=e) else: if pkg_str[0].lower() != 'gpg-pubkey': - text = f'No package returned for {pkg_str!s}' + text = f'No package returned for {pkg_str}' info_message.send(sender=None, text=text) progress_update_s.send(sender=None, index=i + 1) @@ -158,7 +158,7 @@ def add_updates(updates, host): host.updates.remove(host_update) ulen = len(updates) if ulen > 0: - ptext = f'{str(host)[0:25]!s} updates' + ptext = f'{str(host)[0:25]} updates' progress_info_s.send(sender=None, ptext=ptext, plen=ulen) for i, (u, sec) in enumerate(updates.items()): @@ -175,7 +175,7 @@ def parse_updates(updates_string, security): updates = {} ulist = updates_string.lower().split() while ulist: - name = f'{ulist[0]!s} {ulist[1]!s} {ulist[2]!s}\n' + name = f'{ulist[0]} {ulist[1]} {ulist[2]}\n' del ulist[:3] updates[name] = security return updates diff --git a/repos/models.py b/repos/models.py index fb8b926a..72f8f8d8 100644 --- a/repos/models.py +++ b/repos/models.py @@ -66,9 +66,9 @@ def get_absolute_url(self): def show(self): """ Show info about this repo, including mirrors """ - text = f'{self.id!s} : {self.name!s}\n' - text += f'security: {self.security!s} ' - text += f'arch: {self.arch!s}\n' + text = f'{self.id} : {self.name}\n' + text += f'security: {self.security} ' + text += f'arch: {self.arch}\n' text += 'Mirrors:' info_message.send(sender=None, text=text) @@ -97,7 +97,7 @@ def refresh(self, force=False): refresh_gentoo_repo(self) else: text = 'Error: unknown repo type for repo ' - text += f'{self.id!s}: {self.repotype!s}' + text += f'{self.id}: {self.repotype}' error_message.send(sender=None, text=text) else: text = 'Repo requires certificate authentication, not updating' @@ -156,9 +156,9 @@ def get_absolute_url(self): def show(self): """ Show info about this mirror """ - text = f' {self.id!s} : {self.url!s}\n' + text = f' {self.id} : {self.url}\n' text += ' last updated: ' - text += f'{self.timestamp!s} checksum: {self.file_checksum!s}\n' + text += f'{self.timestamp} checksum: {self.file_checksum}\n' info_message.send(sender=None, text=text) def fail(self): @@ -167,7 +167,7 @@ def fail(self): Set MAX_MIRROR_FAILURES to -1 to disable marking mirrors as failures Default is 28 """ - text = f'No usable mirror found at {self.url!s}' + text = f'No usable mirror found at {self.url}' error_message.send(sender=None, text=text) default_max_mirror_failures = 28 if has_setting_of_type('MAX_MIRROR_FAILURES', int): diff --git a/repos/templatetags/repo_buttons.py b/repos/templatetags/repo_buttons.py index 67f0af7f..3689c8b7 100644 --- a/repos/templatetags/repo_buttons.py +++ b/repos/templatetags/repo_buttons.py @@ -29,11 +29,11 @@ def yes_no_button_repo_en(repo): yes_icon = static('img/icon-yes.gif') no_icon = static('img/icon-no.gif') html = '' return format_html(html) @@ -45,10 +45,10 @@ def yes_no_button_repo_sec(repo): yes_icon = static('img/icon-yes.gif') no_icon = static('img/icon-no.gif') html = '' return format_html(html) diff --git a/repos/views.py b/repos/views.py index 2886239a..53c1c848 100644 --- a/repos/views.py +++ b/repos/views.py @@ -189,7 +189,7 @@ def move_mirrors(repo): repo.security = security repo.save() move_mirrors(repo) - text = f'Mirrors linked to new Repository {repo!s}' + text = f'Mirrors linked to new Repository {repo}' messages.info(request, text) return redirect(repo.get_absolute_url()) @@ -197,7 +197,7 @@ def move_mirrors(repo): if link_form.is_valid(): repo = link_form.cleaned_data['name'] move_mirrors(repo) - text = f'Mirrors linked to Repository {repo!s}' + text = f'Mirrors linked to Repository {repo}' messages.info(request, text) return redirect(repo.get_absolute_url()) else: @@ -236,7 +236,7 @@ def mirror_delete(request, mirror_id): if request.method == 'POST': if 'delete' in request.POST: mirror.delete() - text = f'Mirror {mirror!s} has been deleted' + text = f'Mirror {mirror} has been deleted' messages.info(request, text) return redirect(reverse('repos:mirror_list')) elif 'cancel' in request.POST: @@ -258,7 +258,7 @@ def mirror_edit(request, mirror_id): if edit_form.is_valid(): mirror = edit_form.save() mirror.save() - text = f'Saved changes to Mirror {mirror!s}' + text = f'Saved changes to Mirror {mirror}' messages.info(request, text) return redirect(mirror.get_absolute_url()) else: @@ -302,7 +302,7 @@ def repo_edit(request, repo_id): repo.enable() else: repo.disable() - text = f'Saved changes to Repository {repo!s}' + text = f'Saved changes to Repository {repo}' messages.info(request, text) return redirect(repo.get_absolute_url()) else: @@ -328,7 +328,7 @@ def repo_delete(request, repo_id): for mirror in repo.mirror_set.all(): mirror.delete() repo.delete() - text = f'Repository {repo!s} has been deleted' + text = f'Repository {repo} has been deleted' messages.info(request, text) return redirect(reverse('repos:repo_list')) elif 'cancel' in request.POST: @@ -353,7 +353,7 @@ def repo_toggle_enabled(request, repo_id): if request.is_ajax(): return HttpResponse(status=204) else: - text = f'Repository {repo!s} has been {status!s}' + text = f'Repository {repo} has been {status}' messages.info(request, text) return redirect(repo.get_absolute_url()) @@ -372,8 +372,8 @@ def repo_toggle_security(request, repo_id): if request.is_ajax(): return HttpResponse(status=204) else: - text = f'Repository {repo!s} has been marked' - text += f' as a {sectype!s} update repo' + text = f'Repository {repo} has been marked' + text += f' as a {sectype} update repo' messages.info(request, text) return redirect(repo.get_absolute_url()) diff --git a/util/__init__.py b/util/__init__.py index 1c138230..7c174d55 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -237,7 +237,7 @@ def get_checksum(data, checksum_type): elif checksum_type == Checksum.md5: checksum = get_md5(data) else: - text = f'Unknown checksum type: {checksum_type!s}' + text = f'Unknown checksum type: {checksum_type}' error_message.send(sender=None, text=text) return checksum From 803045619ce4d2f78b7fd9483e612718d19845fa Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 11 Feb 2025 11:44:34 -0500 Subject: [PATCH 037/199] lowercase boolean querystring args --- hosts/views.py | 4 ++-- packages/views.py | 12 ++++++------ reports/views.py | 4 ++-- repos/views.py | 8 ++++---- 4 files changed, 14 insertions(+), 14 deletions(-) diff --git a/hosts/views.py b/hosts/views.py index 1acf3726..a6de4366 100644 --- a/hosts/views.py +++ b/hosts/views.py @@ -64,7 +64,7 @@ def host_list(request): hosts = hosts.filter(tags__name__in=[request.GET['tag']]) if 'reboot_required' in request.GET: - reboot_required = request.GET['reboot_required'] == 'True' + reboot_required = request.GET['reboot_required'] == 'true' hosts = hosts.filter(reboot_required=reboot_required) if 'search' in request.GET: @@ -96,7 +96,7 @@ def host_list(request): filter_list.append(Filter(request, 'OS Variant', 'osvariant', OSVariant.objects.filter(host__in=hosts))) filter_list.append(Filter(request, 'OS Release', 'osrelease', OSRelease.objects.filter(osvariant__host__in=hosts))) filter_list.append(Filter(request, 'Architecture', 'arch', MachineArchitecture.objects.filter(host__in=hosts))) - filter_list.append(Filter(request, 'Reboot Required', 'reboot_required', {False: 'No', True: 'Yes'})) + filter_list.append(Filter(request, 'Reboot Required', 'reboot_required', {'true': 'Yes', 'false': 'No'})) filter_bar = FilterBar(request, filter_list) return render(request, diff --git a/packages/views.py b/packages/views.py index aaf51238..66f3720f 100644 --- a/packages/views.py +++ b/packages/views.py @@ -54,21 +54,21 @@ def package_list(request): packages = packages.filter(module=request.GET['module_id']).distinct() if 'affected_by_errata' in request.GET: - affected_by_errata = request.GET['affected_by_errata'] == 'True' + affected_by_errata = request.GET['affected_by_errata'] == 'true' if affected_by_errata: packages = packages.filter(erratum__isnull=False) else: packages = packages.filter(erratum__isnull=True) if 'installed_on_hosts' in request.GET: - installed_on_hosts = request.GET['installed_on_hosts'] == 'True' + installed_on_hosts = request.GET['installed_on_hosts'] == 'true' if installed_on_hosts: packages = packages.filter(host__isnull=False) else: packages = packages.filter(host__isnull=True) if 'available_in_repos' in request.GET: - available_in_repos = request.GET['available_in_repos'] == 'True' + available_in_repos = request.GET['available_in_repos'] == 'true' if available_in_repos: packages = packages.filter(mirror__isnull=False) else: @@ -95,9 +95,9 @@ def package_list(request): page = paginator.page(paginator.num_pages) filter_list = [] - filter_list.append(Filter(request, 'Affected by Errata', 'affected_by_errata', {False: 'No', True: 'Yes'})) - filter_list.append(Filter(request, 'Installed on Hosts', 'installed_on_hosts', {False: 'No', True: 'Yes'})) - filter_list.append(Filter(request, 'Available in Repos', 'available_in_repos', {False: 'No', True: 'Yes'})) + filter_list.append(Filter(request, 'Affected by Errata', 'affected_by_errata', {'true': 'Yes', 'false': 'No'})) + filter_list.append(Filter(request, 'Installed on Hosts', 'installed_on_hosts', {'true': 'Yes', 'false': 'No'})) + filter_list.append(Filter(request, 'Available in Repos', 'available_in_repos', {'true': 'Yes', 'false': 'No'})) filter_list.append(Filter(request, 'Package Type', 'packagetype', Package.PACKAGE_TYPES)) filter_list.append(Filter(request, 'Architecture', 'arch', PackageArchitecture.objects.all())) filter_bar = FilterBar(request, filter_list) diff --git a/reports/views.py b/reports/views.py index 0bf8367e..39b17d9d 100644 --- a/reports/views.py +++ b/reports/views.py @@ -78,7 +78,7 @@ def report_list(request): reports = reports.filter(hostname=int(request.GET['host_id'])) if 'processed' in request.GET: - processed = request.GET['processed'] == 'True' + processed = request.GET['processed'] == 'true' reports = reports.filter(processed=processed) if 'search' in request.GET: @@ -102,7 +102,7 @@ def report_list(request): page = paginator.page(paginator.num_pages) filter_list = [] - filter_list.append(Filter(request, 'Processed', 'processed', {False: 'No', True: 'Yes'})) + filter_list.append(Filter(request, 'Processed', 'processed', {'true': 'Yes', 'false': 'No'})) filter_bar = FilterBar(request, filter_list) return render(request, diff --git a/repos/views.py b/repos/views.py index 53c1c848..9303fcab 100644 --- a/repos/views.py +++ b/repos/views.py @@ -52,11 +52,11 @@ def repo_list(request): repos = repos.filter(osrelease=int(request.GET['osrelease'])) if 'security' in request.GET: - security = request.GET['security'] == 'True' + security = request.GET['security'] == 'true' repos = repos.filter(security=security) if 'enabled' in request.GET: - enabled = request.GET['enabled'] == 'True' + enabled = request.GET['enabled'] == 'true' repos = repos.filter(enabled=enabled) if 'package_id' in request.GET: @@ -87,8 +87,8 @@ def repo_list(request): filter_list = [] filter_list.append(Filter(request, 'OS Release', 'osrelease', OSRelease.objects.filter(repos__in=repos))) - filter_list.append(Filter(request, 'Enabled', 'enabled', {False: 'No', True: 'Yes'})) - filter_list.append(Filter(request, 'Security', 'security', {False: 'No', True: 'Yes'})) + filter_list.append(Filter(request, 'Enabled', 'enabled', {'true': 'Yes', 'false': 'No'})) + filter_list.append(Filter(request, 'Security', 'security', {'true': 'Yes', 'false': 'No'})) filter_list.append(Filter(request, 'Repo Type', 'repotype', Repository.REPO_TYPES)) filter_list.append(Filter(request, 'Architecture', 'arch', MachineArchitecture.objects.filter(repository__in=repos))) From 85ea6bcda8b309ec3460300a930d89efafefab71 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 11 Feb 2025 11:47:35 -0500 Subject: [PATCH 038/199] remove extra comma --- hosts/views.py | 4 ++-- modules/views.py | 4 ++-- reports/views.py | 8 ++++---- repos/views.py | 24 +++++++++++------------- 4 files changed, 19 insertions(+), 21 deletions(-) diff --git a/hosts/views.py b/hosts/views.py index a6de4366..7ff4b66d 100644 --- a/hosts/views.py +++ b/hosts/views.py @@ -143,7 +143,7 @@ def host_edit(request, hostname): 'hosts/host_edit.html', {'host': host, 'reports': reports, - 'edit_form': edit_form}, ) + 'edit_form': edit_form}) @login_required @@ -163,7 +163,7 @@ def host_delete(request, hostname): return render(request, 'hosts/host_delete.html', {'host': host, - 'reports': reports}, ) + 'reports': reports}) class HostViewSet(viewsets.ModelViewSet): diff --git a/modules/views.py b/modules/views.py index 87678e6e..b897a709 100644 --- a/modules/views.py +++ b/modules/views.py @@ -53,7 +53,7 @@ def module_list(request): return render(request, 'modules/module_list.html', {'page': page, - 'terms': terms}, ) + 'terms': terms}) @login_required @@ -62,7 +62,7 @@ def module_detail(request, module_id): module = get_object_or_404(Module, id=module_id) return render(request, 'modules/module_detail.html', - {'module': module}, ) + {'module': module}) class ModuleViewSet(viewsets.ModelViewSet): diff --git a/reports/views.py b/reports/views.py index 39b17d9d..6f9b5ffd 100644 --- a/reports/views.py +++ b/reports/views.py @@ -109,7 +109,7 @@ def report_list(request): 'reports/report_list.html', {'page': page, 'filter_bar': filter_bar, - 'terms': terms}, ) + 'terms': terms}) @login_required @@ -119,7 +119,7 @@ def report_detail(request, report_id): return render(request, 'reports/report_detail.html', - {'report': report}, ) + {'report': report}) @login_required @@ -130,7 +130,7 @@ def report_process(request, report_id): return render(request, 'reports/report_detail.html', - {'report': report}, ) + {'report': report}) @login_required @@ -149,4 +149,4 @@ def report_delete(request, report_id): return render(request, 'reports/report_delete.html', - {'report': report}, ) + {'report': report}) diff --git a/repos/views.py b/repos/views.py index 9303fcab..04966c36 100644 --- a/repos/views.py +++ b/repos/views.py @@ -98,7 +98,7 @@ def repo_list(request): 'repos/repo_list.html', {'page': page, 'filter_bar': filter_bar, - 'terms': terms}, ) + 'terms': terms}) @login_required @@ -110,9 +110,7 @@ def pre_reqs(arch, repotype): text = 'Not all mirror architectures are the same,' text += ' cannot link to or create repos' messages.info(request, text) - return render(request, - 'repos/mirror_with_repo_list.html', - {'page': page, 'checksum': checksum}, ) + return render(request, 'repos/mirror_with_repo_list.html', {'page': page, 'checksum': checksum}) if mirror.repo.repotype != repotype: text = 'Not all mirror repotypes are the same,' @@ -120,7 +118,7 @@ def pre_reqs(arch, repotype): messages.info(request, text) return render(request, 'repos/mirror_with_repo_list.html', - {'page': page, 'checksum': checksum}, ) + {'page': page, 'checksum': checksum}) return True def move_mirrors(repo): @@ -215,10 +213,10 @@ def move_mirrors(repo): {'page': page, 'link_form': link_form, 'create_form': create_form, - 'checksum': checksum}, ) + 'checksum': checksum}) return render(request, 'repos/mirror_list.html', - {'page': page}, ) + {'page': page}) @login_required @@ -226,7 +224,7 @@ def mirror_detail(request, mirror_id): mirror = get_object_or_404(Mirror, id=mirror_id) return render(request, 'repos/mirror_detail.html', - {'mirror': mirror}, ) + {'mirror': mirror}) @login_required @@ -244,7 +242,7 @@ def mirror_delete(request, mirror_id): return render(request, 'repos/mirror_delete.html', - {'mirror': mirror}, ) + {'mirror': mirror}) @login_required @@ -270,7 +268,7 @@ def mirror_edit(request, mirror_id): return render(request, 'repos/mirror_edit.html', - {'mirror': mirror, 'edit_form': edit_form}, ) + {'mirror': mirror, 'edit_form': edit_form}) @login_required @@ -280,7 +278,7 @@ def repo_detail(request, repo_id): return render(request, 'repos/repo_detail.html', - {'repo': repo}, ) + {'repo': repo}) @login_required @@ -315,7 +313,7 @@ def repo_edit(request, repo_id): return render(request, 'repos/repo_edit.html', - {'repo': repo, 'edit_form': edit_form}, ) + {'repo': repo, 'edit_form': edit_form}) @login_required @@ -336,7 +334,7 @@ def repo_delete(request, repo_id): return render(request, 'repos/repo_delete.html', - {'repo': repo}, ) + {'repo': repo}) @login_required From 428d71603a51752d5c67bac8bf7e322506b2a547 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 11 Feb 2025 11:48:04 -0500 Subject: [PATCH 039/199] add icon to external links --- security/templates/security/cve_detail.html | 8 ++++---- security/templates/security/cve_table.html | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/security/templates/security/cve_detail.html b/security/templates/security/cve_detail.html index 27323976..81f866f6 100644 --- a/security/templates/security/cve_detail.html +++ b/security/templates/security/cve_detail.html @@ -36,7 +36,7 @@ CWEs {% for cwe in cve.cwes.all %} - {{ cwe.cwe_id }} - {{ cwe.name }}
    + {{ cwe.cwe_id }} - {{ cwe.name }}
    {% endfor %} @@ -54,9 +54,9 @@ URLs - - - + + + {% for reference in cve.erratum_set.references.all %} diff --git a/security/templates/security/cve_table.html b/security/templates/security/cve_table.html index 81259636..493b1b98 100644 --- a/security/templates/security/cve_table.html +++ b/security/templates/security/cve_table.html @@ -19,9 +19,9 @@ From 59b9cea73f1a35fdd7087f44fe1e68b310045085 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 11 Feb 2025 11:48:28 -0500 Subject: [PATCH 040/199] return response for 404 --- util/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/util/__init__.py b/util/__init__.py index 7c174d55..3855ad7a 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -143,7 +143,7 @@ def get_url(url, headers={}, params={}): response = requests.get(url, headers=headers, params=params, stream=True, timeout=30) debug_message.send(sender=None, text=f'{response.status_code}: {response.headers}') if response.status_code == 404: - return None + return response response.raise_for_status() except requests.exceptions.TooManyRedirects: error_message.send(sender=None, text=f'Too many redirects - {url}') From 483a81d74d900ea081a2e6e88a3b282976f4b8cb Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 12 Feb 2025 01:45:24 -0500 Subject: [PATCH 041/199] disable centos updates --- errata/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/errata/utils.py b/errata/utils.py index 16a98289..f33e4546 100644 --- a/errata/utils.py +++ b/errata/utils.py @@ -36,7 +36,7 @@ def update_errata(): if has_setting_of_type('ERRATA_OS_UPDATES', list): errata_os_updates = settings.ERRATA_OS_UPDATES else: - errata_os_updates = ['rocky', 'alma', 'centos', 'arch', 'ubuntu', 'debian', 'rhel', 'suse'] + errata_os_updates = ['rocky', 'alma', 'arch', 'ubuntu', 'debian', 'rhel', 'suse', 'amazon'] if 'arch' in errata_os_updates: update_arch_errata() if 'alma' in errata_os_updates: From e55bfce7d2fe55f3f0f62f65b5b6e2b260c2c965 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 12 Feb 2025 01:49:44 -0500 Subject: [PATCH 042/199] switch to redis for caching and add celery tasks --- errata/tasks.py | 48 +++++++++++++++++++++++++++++++++ etc/patchman/local_settings.py | 10 +++---- patchman/__init__.py | 6 +++++ patchman/celery.py | 6 +---- patchman/settings.py | 12 +++------ reports/tasks.py | 28 ++++++++++++++----- reports/views.py | 6 ++--- repos/tasks.py | 45 +++++++++++++++++++++++++++++++ requirements.txt | 5 ++-- security/tasks.py | 49 ++++++++++++++++++++++++++++++++++ 10 files changed, 183 insertions(+), 32 deletions(-) create mode 100644 errata/tasks.py create mode 100644 repos/tasks.py create mode 100644 security/tasks.py diff --git a/errata/tasks.py b/errata/tasks.py new file mode 100644 index 00000000..5b253ffa --- /dev/null +++ b/errata/tasks.py @@ -0,0 +1,48 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from datetime import timedelta + +from django.conf import settings + +from celery import shared_task +from patchman.celery import app + +from errata.models import Erratum +from security.tasks import update_cves, update_cwes + +app.conf.beat_schedule = { + 'update-errata-cves-cwes-every-6-hours': { + 'task': 'tasks.update_errata', + 'schedule': timedelta(hours=6), + }, +} + +@shared_task +def update_erratum(erratum): + """ Task to update an erratum + """ + erratum.update() + + +@shared_task +def update_errata(): + """ Task to update all errata + """ + for e in Erratum.objects.all(): + update_erratum.delay(e) + update_cves.delay() + update_cwes.delay() diff --git a/etc/patchman/local_settings.py b/etc/patchman/local_settings.py index 9b9430e2..c14de3cd 100644 --- a/etc/patchman/local_settings.py +++ b/etc/patchman/local_settings.py @@ -37,13 +37,11 @@ # Whether to run patchman under the gunicorn web server RUN_GUNICORN = False -# Enable memcached +# Enable redis caching for 30 seconds CACHES = { 'default': { - 'BACKEND': 'django.core.cache.backends.memcached.PyMemcacheCache', - 'LOCATION': '127.0.0.1:11211', - 'OPTIONS': { - 'ignore_exc': True, - }, + 'BACKEND': 'django.core.cache.backends.redis.RedisCache', + 'LOCATION': 'redis://127.0.0.1:6379', + 'TIMEOUT': 30, } } diff --git a/patchman/__init__.py b/patchman/__init__.py index 34f6f97e..e78b0cd3 100644 --- a/patchman/__init__.py +++ b/patchman/__init__.py @@ -15,3 +15,9 @@ # along with Patchman. If not, see from .receivers import * # noqa + +# This will make sure the app is always imported when +# Django starts so that shared_task will use this app. +from .celery import app as celery_app + +__all__ = ('celery_app',) diff --git a/patchman/celery.py b/patchman/celery.py index f75dd1f0..3c58edc5 100644 --- a/patchman/celery.py +++ b/patchman/celery.py @@ -20,10 +20,6 @@ os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'patchman.settings') # noqa from django.conf import settings # noqa - app = Celery('patchman') app.config_from_object('django.conf:settings', namespace='CELERY') -app.autodiscover_tasks(lambda: settings.INSTALLED_APPS) - -if __name__ == '__main__': - app.start() +app.autodiscover_tasks() diff --git a/patchman/settings.py b/patchman/settings.py index bacf567f..706bda92 100644 --- a/patchman/settings.py +++ b/patchman/settings.py @@ -86,6 +86,8 @@ 'bootstrap3', 'rest_framework', 'django_filters', + 'celery', + 'django_celery_beat' ] LOCAL_APPS = [ @@ -111,15 +113,7 @@ TAGGIT_CASE_INSENSITIVE = True -try: - from celery import Celery # noqa -except ImportError: - USE_ASYNC_PROCESSING = False -else: - THIRD_PARTY_APPS += ['celery'] - CELERY_IMPORTS = ['reports.tasks'] - USE_ASYNC_PROCESSING = True - CELERY_BROKER_URL = 'redis://127.0.0.1:6379/0' +CELERY_BROKER_URL = 'redis://127.0.0.1:6379/0' LOGIN_REDIRECT_URL = '/patchman/' LOGOUT_REDIRECT_URL = '/patchman/login/' diff --git a/reports/tasks.py b/reports/tasks.py index d205d7c5..3fd74645 100755 --- a/reports/tasks.py +++ b/reports/tasks.py @@ -19,11 +19,25 @@ from reports.models import Report -if settings.USE_ASYNC_PROCESSING: - from celery import shared_task - from patchman.celery import app # noqa +from celery import shared_task +from celery.schedules import crontab +from patchman.celery import app - @shared_task - def process_report(report_id): - report = Report.objects.get(id=report_id) - report.process(verbose=True) +app.conf.beat_schedule = { + 'process-reports': { + 'task': 'reports.tasks.process_reports', + 'schedule': crontab(minute='*/5'), + }, +} + +@shared_task +def process_report(report_id): + report = Report.objects.get(report_id) + report.process() + + +@shared_task +def process_reports(): + reports = Report.objects.all(processed=False) + for report in reports: + process_report.delay(report.id) diff --git a/reports/views.py b/reports/views.py index 6f9b5ffd..e676970b 100644 --- a/reports/views.py +++ b/reports/views.py @@ -41,9 +41,9 @@ def upload(request): with transaction.atomic(): report = Report.objects.create() report.parse(data, meta) - if settings.USE_ASYNC_PROCESSING: - from reports.tasks import process_report - process_report.delay(report.id) + + from reports.tasks import process_report + process_report.delay(report.id) if 'report' in data and data['report'] == 'true': packages = [] diff --git a/repos/tasks.py b/repos/tasks.py new file mode 100644 index 00000000..077668e9 --- /dev/null +++ b/repos/tasks.py @@ -0,0 +1,45 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.conf import settings + +from repos.models import Repository, Mirror + +from celery import shared_task +from celery.schedules import crontab +from patchman.celery import app + +app.conf.beat_schedule = { + 'refresh-repos-every-day': { + 'task': 'tasks.refresh_repos', + 'schedule': crontab(hour=6, minute=00), + }, +} + +@shared_task +def refresh_repo(force=False): + """ Refresh metadata for a single repo + """ + repo.refresh(force) + + +@shared_task +def refresh_repos(force=False): + """ Refresh metadata for all enabled repos + """ + repos = Repository.objects.filter(enabled=True) + for repo in repos: + refresh_repo.delay(repo, force) diff --git a/requirements.txt b/requirements.txt index 0d53876f..5b16ddc2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,4 @@ Django==4.2.19 -django-tagging==0.5.0 django-taggit==4.0.0 django-extensions==3.2.1 django-bootstrap3==23.1 @@ -16,6 +15,8 @@ django-filter==21.1 humanize==3.13.1 version-utils==0.3.0 python-magic==0.4.25 -pymemcache==4.0.0 gitpython==3.1.44 tenacity==8.2.3 +celery==5.4.0 +redis==5.2.1 +django-celery-beat==2.7.0 diff --git a/security/tasks.py b/security/tasks.py new file mode 100644 index 00000000..f3935235 --- /dev/null +++ b/security/tasks.py @@ -0,0 +1,49 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.conf import settings + +from security.models import CVE, CWE + +from celery import shared_task +from patchman.celery import app + + +@shared_task +def update_cve(cve): + """ Task to update a CVE + """ + cve.update() + +@shared_task +def update_cves(): + """ Task to update all CVEs + """ + for cve in CVE.objects.all(): + update_cve.delay(cve) + +@shared_task +def update_cwe(cwe): + """ Task to update a CWE + """ + cwe.update() + +@shared_task +def update_cwes(): + """ Task to update all CWEa + """ + for cwe in CWE.objects.all(): + update_cwe.delay(cwe) From 972b62601ca6e30775434391e0bbbc2d09632074 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 12 Feb 2025 01:50:51 -0500 Subject: [PATCH 043/199] standardize f-strings --- sbin/patchman | 74 +++++++++++++++++++++++++-------------------------- 1 file changed, 37 insertions(+), 37 deletions(-) diff --git a/sbin/patchman b/sbin/patchman index c947b393..28bce4ad 100755 --- a/sbin/patchman +++ b/sbin/patchman @@ -50,7 +50,7 @@ def get_host(host=None, action='Performing action'): """ host_obj = None hostdot = host + '.' - text = f'{action!s} for Host {host!s}' + text = f'{action} for Host {host}' try: host_obj = Host.objects.get(hostname__startswith=hostdot) @@ -58,10 +58,10 @@ def get_host(host=None, action='Performing action'): try: host_obj = Host.objects.get(hostname__startswith=host) except Host.DoesNotExist: - text = f'Host {host!s} does not exist' + text = f'Host {host} does not exist' except MultipleObjectsReturned: matches = Host.objects.filter(hostname__startswith=host).count() - text = f'{matches!s} Hosts match hostname "{host!s}"' + text = f'{matches} Hosts match hostname "{host}"' info_message.send(sender=None, text=text) return host_obj @@ -82,7 +82,7 @@ def get_hosts(hosts=None, action='Performing action'): if host_obj is not None: host_objs.append(host_obj) else: - text = f'{action!s} for all Hosts\n' + text = f'{action} for all Hosts\n' info_message.send(sender=None, text=text) host_objs = Host.objects.all() @@ -96,11 +96,11 @@ def get_repos(repo=None, action='Performing action', only_enabled=False): if repo: try: repos.append(Repository.objects.get(id=repo)) - text = f'{action!s} for Repo {repo!s}' + text = f'{action} for Repo {repo}' except Repository.DoesNotExist: - text = f'Repo {repo!s} does not exist' + text = f'Repo {repo} does not exist' else: - text = f'{action!s} for all Repos\n' + text = f'{action} for all Repos\n' if only_enabled: repos = Repository.objects.filter(enabled=True) else: @@ -116,7 +116,7 @@ def refresh_repos(repo=None, force=False): """ repos = get_repos(repo, 'Refreshing metadata', True) for repo in repos: - text = f'Repository {repo.id!s} : {repo!s}' + text = f'Repository {repo.id} : {repo}' info_message.send(sender=None, text=text) repo.refresh(force) info_message.send(sender=None, text='') @@ -151,7 +151,7 @@ def clean_packages(): if plen == 0: info_message.send(sender=None, text='No orphaned Packages found.') else: - create_pbar(f'Removing {plen!s} orphaned Packages:', plen) + create_pbar(f'Removing {plen} orphaned Packages:', plen) for i, o in enumerate(packages): p = Package.objects.get(name=o.name, epoch=o.epoch, @@ -173,7 +173,7 @@ def clean_arches(): text = 'No orphaned Package Architectures found.' info_message.send(sender=None, text=text) else: - create_pbar(f'Removing {plen!s} orphaned P Arches:', plen) + create_pbar(f'Removing {plen} orphaned P Arches:', plen) for i, p in enumerate(parches): a = PackageArchitecture.objects.get(name=p.name) a.delete() @@ -189,7 +189,7 @@ def clean_arches(): text = 'No orphaned Machine Architectures found.' info_message.send(sender=None, text=text) else: - create_pbar(f'Removing {mlen!s} orphaned M Arches:', mlen) + create_pbar(f'Removing {mlen} orphaned M Arches:', mlen) for i, m in enumerate(marches): a = MachineArchitecture.objects.get(name=m.name) a.delete() @@ -205,7 +205,7 @@ def clean_package_names(): if nlen == 0: info_message.send(sender=None, text='No orphaned Package names found.') else: - create_pbar(f'Removing {nlen!s} unused Package names:', nlen) + create_pbar(f'Removing {nlen} unused Package names:', nlen) for i, packagename in enumerate(names): packagename.delete() update_pbar(i + 1) @@ -221,7 +221,7 @@ def clean_repos(): text = 'No Repositories with zero Mirrors found.' info_message.send(sender=None, text=text) else: - create_pbar(f'Removing {rlen!s} empty Repos:', rlen) + create_pbar(f'Removing {rlen} empty Repos:', rlen) for i, repo in enumerate(repos): repo.delete() update_pbar(i + 1) @@ -243,7 +243,7 @@ def clean_reports(s_host=None): reports = Report.objects.filter(accessed__lt=timestamp) rlen = reports.count() if rlen != 0: - create_pbar(f'Removing {rlen!s} extraneous Reports:', + create_pbar(f'Removing {rlen} extraneous Reports:', rlen) for i, report in enumerate(reports): report.delete() @@ -260,7 +260,7 @@ def clean_modules(): text = 'No orphaned Modules found.' info_message.send(sender=None, text=text) else: - create_pbar(f'Removing {mlen!s} empty Modules:', mlen) + create_pbar(f'Removing {mlen} empty Modules:', mlen) for i, module in enumerate(modules): module.delete() update_pbar(i + 1) @@ -282,7 +282,7 @@ def clean_tags(): tlen = len(to_delete) if tlen != 0: - create_pbar(f'Removing {tlen!s} unused tagged items', tlen) + create_pbar(f'Removing {tlen} unused tagged items', tlen) for i, t in enumerate(to_delete): t.delete() update_pbar(i + 1) @@ -333,7 +333,7 @@ def host_updates_alt(host=None): phost.updated_at = ts phost.save() updated_hosts.append(phost) - text = f'Added the same updates to {phost!s}' + text = f'Added the same updates to {phost}' info_message.send(sender=None, text=text) else: text = 'Updates already added in this run' @@ -370,45 +370,45 @@ def diff_hosts(hosts): repo_diff_AB = reposA.difference(reposB) repo_diff_BA = reposB.difference(reposA) - info_message.send(sender=None, text=f'+ {hostA.hostname!s}') - info_message.send(sender=None, text=f'- {hostB.hostname!s}') + info_message.send(sender=None, text=f'+ {hostA.hostname}') + info_message.send(sender=None, text=f'- {hostB.hostname}') if hostA.os != hostB.os: info_message.send(sender=None, text='\nOperating Systems') - info_message.send(sender=None, text=f'+ {hostA.os!s}') - info_message.send(sender=None, text=f'- {hostB.os!s}') + info_message.send(sender=None, text=f'+ {hostA.os}') + info_message.send(sender=None, text=f'- {hostB.os}') else: info_message.send(sender=None, text='\nNo OS differences') if hostA.arch != hostB.arch: info_message.send(sender=None, text='\nArchitecture') - info_message.send(sender=None, text=f'+ {hostA.arch!s}') - info_message.send(sender=None, text=f'- {hostB.arch!s}') + info_message.send(sender=None, text=f'+ {hostA.arch}') + info_message.send(sender=None, text=f'- {hostB.arch}') else: info_message.send(sender=None, text='\nNo Architecture differences') if hostA.kernel != hostB.kernel: info_message.send(sender=None, text='\nKernels') - info_message.send(sender=None, text=f'+ {hostA.kernel!s}') - info_message.send(sender=None, text=f'- {hostB.kernel!s}') + info_message.send(sender=None, text=f'+ {hostA.kernel}') + info_message.send(sender=None, text=f'- {hostB.kernel}') else: info_message.send(sender=None, text='\nNo Kernel differences') if len(package_diff_AB) != 0 or len(package_diff_BA) != 0: info_message.send(sender=None, text='\nPackages') for package in package_diff_AB: - info_message.send(sender=None, text=f'+ {package!s}') + info_message.send(sender=None, text=f'+ {package}') for package in package_diff_BA: - info_message.send(sender=None, text=f'- {package!s}') + info_message.send(sender=None, text=f'- {package}') else: info_message.send(sender=None, text='\nNo Package differences') if len(repo_diff_AB) != 0 or len(repo_diff_BA) != 0: info_message.send(sender=None, text='\nRepositories') for repo in repo_diff_AB: - info_message.send(sender=None, text=f'+ {repo!s}') + info_message.send(sender=None, text=f'+ {repo}') for repo in repo_diff_BA: - info_message.send(sender=None, text=f'- {repo!s}') + info_message.send(sender=None, text=f'- {repo}') else: info_message.send(sender=None, text='\nNo Repo differences') @@ -419,7 +419,7 @@ def delete_hosts(hosts=None): if hosts: matching_hosts = get_hosts(hosts) for host in matching_hosts: - text = f'Deleting host: {host.hostname!s}:' + text = f'Deleting host: {host.hostname}:' info_message.send(sender=None, text=text) host.delete() @@ -432,7 +432,7 @@ def toggle_host_hro(hosts=None, host_repos_only=True): else: toggle = "Unsetting" if hosts: - matching_hosts = get_hosts(hosts, f"{toggle!s} host_repos_only") + matching_hosts = get_hosts(hosts, f"{toggle} host_repos_only") for host in matching_hosts: info_message.send(sender=None, text=str(host)) host.host_repos_only = host_repos_only @@ -447,7 +447,7 @@ def toggle_host_check_dns(hosts=None, check_dns=True): else: toggle = "Unsetting" if hosts: - matching_hosts = get_hosts(hosts, f"{toggle!s} check_dns") + matching_hosts = get_hosts(hosts, f"{toggle} check_dns") for host in matching_hosts: info_message.send(sender=None, text=str(host)) host.check_dns = check_dns @@ -461,7 +461,7 @@ def dns_checks(host=None): hosts = get_hosts(host, 'Checking rDNS') for host in hosts: if get_verbosity(): - text = f'{str(host)[0:25].ljust(25)!s}: ' + text = f'{str(host)[0:25].ljust(25)}: ' print_nocr(text) host.check_rdns() @@ -477,9 +477,9 @@ def process_reports(host=None, force=False): try: reports = Report.objects.filter( processed=force, host=host).order_by('created') - text = f'Processing Reports for Host {host!s}' + text = f'Processing Reports for Host {host}' except Report.DoesNotExist: - text = f'No Reports exist for Host {host!s}' + text = f'No Reports exist for Host {host}' else: text = 'Processing Reports for all Hosts' reports = Report.objects.filter(processed=force).order_by('created') @@ -498,7 +498,7 @@ def clean_updates(): for update in package_updates: if update.host_set.count() == 0: - text = f'Removing unused update {update!s}' + text = f'Removing unused update {update}' info_message.send(sender=None, text=text) update.delete() for duplicate in package_updates: @@ -506,7 +506,7 @@ def clean_updates(): update.newpackage == duplicate.newpackage and \ update.security == duplicate.security and \ update.id != duplicate.id: - text = f'Removing duplicate update: {update!s}' + text = f'Removing duplicate update: {update}' info_message.send(sender=None, text=text) for host in duplicate.host_set.all(): host.updates.remove(duplicate) From 39b290bae926fcd319d2f3138b1c40edd02bbf4c Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 12 Feb 2025 01:51:27 -0500 Subject: [PATCH 044/199] remove tagging requirement --- hosts/migrations/0001_initial.py | 35 +++++++++++-------- .../0004_remove_host_tags_host_tags.py | 34 ++++++++++++++---- patchman/settings.py | 7 ++-- 3 files changed, 51 insertions(+), 25 deletions(-) diff --git a/hosts/migrations/0001_initial.py b/hosts/migrations/0001_initial.py index f8f8d45c..43366684 100644 --- a/hosts/migrations/0001_initial.py +++ b/hosts/migrations/0001_initial.py @@ -3,7 +3,11 @@ from django.db import migrations, models import django.db.models.deletion import django.utils.timezone -import tagging.fields +try: + import tagging.fields + has_tagging = True +except ImportError: + has_tagging = False class Migration(migrations.Migration): @@ -13,22 +17,25 @@ class Migration(migrations.Migration): dependencies = [ ] + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('hostname', models.CharField(max_length=255, unique=True)), + ('ipaddress', models.GenericIPAddressField()), + ('reversedns', models.CharField(blank=True, max_length=255, null=True)), + ('check_dns', models.BooleanField(default=False)), + ('kernel', models.CharField(max_length=255)), + ('lastreport', models.DateTimeField()), + ('reboot_required', models.BooleanField(default=False)), + ('host_repos_only', models.BooleanField(default=True)), + ('updated_at', models.DateTimeField(default=django.utils.timezone.now)), + ] + if has_tagging: + fields.append(('tags', tagging.fields.TagField(blank=True, max_length=255))) + operations = [ migrations.CreateModel( name='Host', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('hostname', models.CharField(max_length=255, unique=True)), - ('ipaddress', models.GenericIPAddressField()), - ('reversedns', models.CharField(blank=True, max_length=255, null=True)), - ('check_dns', models.BooleanField(default=False)), - ('kernel', models.CharField(max_length=255)), - ('lastreport', models.DateTimeField()), - ('reboot_required', models.BooleanField(default=False)), - ('host_repos_only', models.BooleanField(default=True)), - ('tags', tagging.fields.TagField(blank=True, max_length=255)), - ('updated_at', models.DateTimeField(default=django.utils.timezone.now)), - ], + fields=fields, options={ 'verbose_name': 'Host', 'verbose_name_plural': 'Hosts', diff --git a/hosts/migrations/0004_remove_host_tags_host_tags.py b/hosts/migrations/0004_remove_host_tags_host_tags.py index 2f77b14f..053de91a 100644 --- a/hosts/migrations/0004_remove_host_tags_host_tags.py +++ b/hosts/migrations/0004_remove_host_tags_host_tags.py @@ -1,7 +1,21 @@ # Generated by Django 4.2.18 on 2025-02-04 23:37 +from django.apps import apps from django.db import migrations import taggit.managers +try: + import tagging +except ImportError: + pass + + +def check_tagging_tag_field_exists(app_label, model_name, field_name): + Model = apps.get_model(app_label, model_name) + fields = Model._meta.get_fields() + for field in fields: + if field.name == field_name and 'tagging' in str(field.related_model): + return True + return False class Migration(migrations.Migration): @@ -11,14 +25,20 @@ class Migration(migrations.Migration): ('hosts', '0003_host_modules'), ] - operations = [ - migrations.RemoveField( - model_name='host', - name='tags', - ), + if check_tagging_tag_field_exists('hosts', 'Host', 'tags'): + operations = [ + migrations.RemoveField( + model_name='host', + name='tags', + ) + ] + else: + operations = [] + + operations.append( migrations.AddField( model_name='host', name='tags', field=taggit.managers.TaggableManager(help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags'), - ), - ] + ) + ) diff --git a/patchman/settings.py b/patchman/settings.py index 706bda92..90e68151 100644 --- a/patchman/settings.py +++ b/patchman/settings.py @@ -4,9 +4,9 @@ import site import sys -import django -from django.utils.encoding import smart_str -django.utils.encoding.smart_text = smart_str +#import django +#from django.utils.encoding import smart_str +#django.utils.encoding.smart_text = smart_str # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) @@ -81,7 +81,6 @@ THIRD_PARTY_APPS = [ 'django_extensions', - 'tagging', 'taggit', 'bootstrap3', 'rest_framework', From 314180d5e3ac252ac0866de7e5062991a20a41a2 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 12 Feb 2025 13:49:10 -0500 Subject: [PATCH 045/199] submit CPE_NAME with client --- client/patchman-client | 3 +++ 1 file changed, 3 insertions(+) diff --git a/client/patchman-client b/client/patchman-client index 3f06969b..cba65917 100755 --- a/client/patchman-client +++ b/client/patchman-client @@ -398,6 +398,9 @@ get_host_data() { fi done fi + if [ ! -z "${CPE_NAME}" ] ; then + os="${os} [${CPE_NAME}]" + fi if ${verbose} ; then echo "Kernel: ${host_kernel}" echo "Arch: ${host_arch}" From 3f6915b5c04e5f1af7aaae7734619e6ff0ea37dd Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 12 Feb 2025 15:24:31 -0500 Subject: [PATCH 046/199] remove unused os templates --- .../operatingsystem_table.html | 21 ------------------- .../operatingsystemgroup_table.html | 17 --------------- 2 files changed, 38 deletions(-) delete mode 100644 operatingsystems/templates/operatingsystems/operatingsystem_table.html delete mode 100644 operatingsystems/templates/operatingsystems/operatingsystemgroup_table.html diff --git a/operatingsystems/templates/operatingsystems/operatingsystem_table.html b/operatingsystems/templates/operatingsystems/operatingsystem_table.html deleted file mode 100644 index f407e8a7..00000000 --- a/operatingsystems/templates/operatingsystems/operatingsystem_table.html +++ /dev/null @@ -1,21 +0,0 @@ -{% load common %} -
    NIST
    MITRE
    osv.dev
    NIST
    MITRE
    osv.dev
    {{ reference.er_type }}
    {{ cve.cve_id }} - [NIST] - [MITRE] - [osv.dev] + NIST    + MITRE    + osv.dev {{ cve.description|truncatechars:60 }} {% for score in cve.cvss_scores.all %} {{ score.score }} {% endfor %}
    - - - - - - - - - - {% for os in object_list %} - - - - - - - {% endfor %} - -
    OS NameHostsOS GroupRepos (OS Group)
    {{ os }}{{ os.host_set.count }}{% if os.osgroup != None %}{{ os.osgroup }}{% endif %}{% if os.osgroup.repos.count != None %}{{ os.osgroup.repos.count }}{% else %}0{% endif %}
    diff --git a/operatingsystems/templates/operatingsystems/operatingsystemgroup_table.html b/operatingsystems/templates/operatingsystems/operatingsystemgroup_table.html deleted file mode 100644 index b0153949..00000000 --- a/operatingsystems/templates/operatingsystems/operatingsystemgroup_table.html +++ /dev/null @@ -1,17 +0,0 @@ -{% load common %} - - - - - - - - - {% for osgroup in object_list %} - - - - - {% endfor %} - -
    OS GroupRepos
    {{ osgroup }}{% if osgroup.repos.count != None %}{{ osgroup.repos.count }}{% else %}0{% endif %}
    From 035d3a7b6f9195134e30e1c613a3c9d20b327466 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 12 Feb 2025 22:15:45 -0500 Subject: [PATCH 047/199] simplify apt repo naming --- client/patchman-client | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/client/patchman-client b/client/patchman-client index cba65917..4f30d325 100755 --- a/client/patchman-client +++ b/client/patchman-client @@ -499,7 +499,8 @@ get_repos() { if ${verbose} ; then echo 'Finding apt repos...' fi - IFS=${FULL_IFS} read -r osname shortversion <<<$(echo "${os}" | awk '{print $1,$2}' | cut -d . -f 1,2) + osname=$(echo ${os} | cut -d " " -f 1) + shortversion=${VERSION_ID} repo_string="'deb\' \'${osname} ${shortversion} ${host_arch} repo at" repos=$(apt-cache policy | grep -v Translation | grep -E "^ *[0-9]{1,5}" | grep -E " mirror\+file|http(s)?:" | sed -e "s/^ *//g" -e "s/ *$//g" | cut -d " " -f 1,2,3,4) non_mirror_repos=$(echo "${repos}" | grep -Ev "mirror\+file") From d565cdb3f6fc0db616c75529b7d3aebcd9c61873 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Thu, 13 Feb 2025 02:54:10 -0500 Subject: [PATCH 048/199] improvements to errata downloading and processing --- errata/sources/distros/debian.py | 15 ++++-- errata/sources/distros/rocky.py | 93 +++++++++++++++++++++++++++----- errata/utils.py | 9 ++++ 3 files changed, 100 insertions(+), 17 deletions(-) diff --git a/errata/sources/distros/debian.py b/errata/sources/distros/debian.py index 77f184b5..60d03ae0 100644 --- a/errata/sources/distros/debian.py +++ b/errata/sources/distros/debian.py @@ -21,8 +21,9 @@ from io import StringIO from django.conf import settings +from django.db.utils import IntegrityError -from operatingsystems.models import OSRelease, OSVariant +from operatingsystems.models import OSRelease from packages.models import Package from packages.utils import get_or_create_package, find_evr from util import get_url, download_url, has_setting_of_type @@ -206,10 +207,14 @@ def create_debian_os_releases(codename_to_version): for codename, version in codename_to_version.items(): if codename in accepted_codenames: osrelease_name = f'Debian {version}' - osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name, codename=codename) - for osvariant in OSVariant.objects.filter(name__startswith=osrelease_name): - osvariant.osrelease = osrelease - osvariant.save() + try: + osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name, codename=codename) + except IntegrityError: + osreleases = OSRelease.objects.filter(name=osrelease_name) + if osreleases.count() == 1: + osrelease = osreleases[0] + osrelease.codename = codename + osrelease.save() def process_debian_erratum_affected_packages(e, dsc): diff --git a/errata/sources/distros/rocky.py b/errata/sources/distros/rocky.py index 4927c4e1..663a8689 100644 --- a/errata/sources/distros/rocky.py +++ b/errata/sources/distros/rocky.py @@ -15,10 +15,12 @@ # along with Patchman. If not, see import json +import concurrent.futures +from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential from django.db import transaction +from django.db.utils import OperationalError -from arch.models import MachineArchitecture from packages.models import Package from packages.utils import parse_package_string, get_or_create_package from util import get_url, download_url, info_message, error_message @@ -31,8 +33,8 @@ def update_rocky_errata(): rocky_errata_api_host = 'https://apollo.build.resf.org' rocky_errata_api_url = '/api/v3/' if check_rocky_errata_endpoint_health(rocky_errata_api_host): - advisories = download_rocky_advisories(rocky_errata_api_host, rocky_errata_api_url) - process_rocky_errata(advisories) + advisories = download_rocky_advisories_concurrently(rocky_errata_api_host, rocky_errata_api_url) + process_rocky_errata_concurrently(advisories) def check_rocky_errata_endpoint_health(rocky_errata_api_host): @@ -88,6 +90,81 @@ def download_rocky_advisories(rocky_errata_api_host, rocky_errata_api_url): return advisories +def download_rocky_advisories_concurrently(rocky_errata_api_host, rocky_errata_api_url): + """ Download Rocky Linux advisories concurrently and return the list + """ + rocky_errata_advisories_url = rocky_errata_api_host + rocky_errata_api_url + 'advisories/' + headers = {'Accept': 'application/json'} + advisories = [] + params = {'page': 1, 'size': 100} + res = get_url(rocky_errata_advisories_url, headers=headers, params=params) + data = download_url(res, 'Rocky Linux Advisories Page 1') + advisories_dict = json.loads(data) + links = advisories_dict.get('links') + last_link = links.get('last') + pages = int(last_link.split('=')[-1]) + ptext = 'Downloading Rocky Linux Advisories:' + progress_info_s.send(sender=None, ptext=ptext, plen=pages) + i = 0 + with concurrent.futures.ThreadPoolExecutor(max_workers=100) as executor: + futures = [executor.submit(get_rocky_advisory, rocky_errata_advisories_url, page) + for page in range(1, pages + 1)] + for future in concurrent.futures.as_completed(futures): + advisories += future.result() + i += 1 + progress_update_s.send(sender=None, index=i + 1) + return advisories + + +def get_rocky_advisory(rocky_errata_advisories_url, page): + """ Download a single Rocky Linux advisory + """ + headers = {'Accept': 'application/json'} + params = {'page': page, 'size': 100} + res = get_url(rocky_errata_advisories_url, headers=headers, params=params) + data = res.content + advisories_dict = json.loads(data) + return advisories_dict.get('advisories') + + +def process_rocky_errata_concurrently(advisories): + """ Process Rocky Linux errata concurrently + """ + elen = len(advisories) + ptext = f'Processing {elen} Errata:' + progress_info_s.send(sender=None, ptext=ptext, plen=elen) + i = 0 + with concurrent.futures.ProcessPoolExecutor(max_workers=10) as executor: + futures = [executor.submit(process_rocky_erratum, advisory) for advisory in advisories] + for future in concurrent.futures.as_completed(futures): + i += 1 + progress_update_s.send(sender=None, index=i + 1) + + +@retry( + retry=retry_if_exception_type(OperationalError), + stop=stop_after_attempt(5), + wait=wait_exponential(multiplier=1, min=2, max=15), +) +def process_rocky_erratum(advisory): + """ Process a single Rocky Linux erratum + """ + from errata.utils import get_or_create_erratum + erratum_name = advisory.get('name') + e_type = advisory.get('kind').lower().replace(' ', '') + issue_date = advisory.get('published_at') + synopsis = advisory.get('synopsis') + e, created = get_or_create_erratum( + name=erratum_name, + e_type=e_type, + issue_date=issue_date, + synopsis=synopsis, + ) + add_rocky_erratum_references(e, advisory) + add_rocky_erratum_oses(e, advisory) + add_rocky_erratum_packages(e, advisory) + + def process_rocky_errata(advisories): """ Process Rocky Linux errata """ @@ -129,21 +206,13 @@ def add_rocky_erratum_oses(e, advisory): """ Update OS Variant, OS Release and MachineArch for Rocky Linux errata """ affected_oses = advisory.get('affected_products') - from operatingsystems.models import OSVariant, OSRelease + from operatingsystems.models import OSRelease for affected_os in affected_oses: - arch = affected_os.get('arch') variant = affected_os.get('variant') major_version = affected_os.get('major_version') osrelease_name = f'{variant} {major_version}' with transaction.atomic(): osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name) - osvariant_name = affected_os.get('name').replace(' (Legacy)', '') - with transaction.atomic(): - m_arch, created = MachineArchitecture.objects.get_or_create(name=arch) - with transaction.atomic(): - osvariant, created = OSVariant.objects.get_or_create(name=osvariant_name, arch=m_arch) - osvariant.osrelease = osrelease - osvariant.save() e.osreleases.add(osrelease) e.save() diff --git a/errata/utils.py b/errata/utils.py index f33e4546..20e5f742 100644 --- a/errata/utils.py +++ b/errata/utils.py @@ -47,6 +47,15 @@ def update_errata(): update_debian_errata() if 'ubuntu' in errata_os_updates: update_ubuntu_errata() + if 'rhel' in errata_os_updates: + # update_rhel_errata() + pass + if 'suse' in errata_os_updates: + # update_suse_errata() + pass + if 'amazon' in errata_os_updates: + # update_amazon_errata() + pass if 'centos' in errata_os_updates: update_centos_errata() From a01261f42cc906a8ae556e8c67ce1621d5e1aeb9 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Thu, 13 Feb 2025 02:57:47 -0500 Subject: [PATCH 049/199] template and view updates --- ...6_osrelease_cpe_name_osvariant_codename.py | 23 ++++ operatingsystems/models.py | 11 +- .../operatingsystemrelease_table.html | 12 +- .../operatingsystemvariant_table.html | 12 +- .../operatingsystems/osrelease_delete.html | 12 +- .../operatingsystems/osrelease_detail.html | 13 +- .../operatingsystems/osvariant_delete.html | 56 ++------- .../osvariant_delete_multiple.html | 56 +++++++++ .../operatingsystems/osvariant_detail.html | 4 +- .../operatingsystems/osvariant_list.html | 4 +- operatingsystems/urls.py | 1 + operatingsystems/views.py | 70 ++++++----- reports/models.py | 112 +++++++++++++++--- reports/utils.py | 23 ++-- reports/views.py | 25 ++-- repos/templates/repos/mirror_delete.html | 2 +- repos/templates/repos/mirror_detail.html | 2 +- repos/templates/repos/mirror_table.html | 4 +- repos/templates/repos/repo_delete.html | 4 +- repos/templates/repos/repo_detail.html | 5 +- repos/templates/repos/repository_table.html | 8 +- repos/views.py | 18 ++- util/templates/dashboard.html | 2 +- 23 files changed, 326 insertions(+), 153 deletions(-) create mode 100644 operatingsystems/migrations/0006_osrelease_cpe_name_osvariant_codename.py create mode 100644 operatingsystems/templates/operatingsystems/osvariant_delete_multiple.html diff --git a/operatingsystems/migrations/0006_osrelease_cpe_name_osvariant_codename.py b/operatingsystems/migrations/0006_osrelease_cpe_name_osvariant_codename.py new file mode 100644 index 00000000..bb68b2a5 --- /dev/null +++ b/operatingsystems/migrations/0006_osrelease_cpe_name_osvariant_codename.py @@ -0,0 +1,23 @@ +# Generated by Django 4.2.19 on 2025-02-12 20:04 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('operatingsystems', '0005_rename_osgroup_osrelease_rename_os_osvariant_and_more'), + ] + + operations = [ + migrations.AddField( + model_name='osrelease', + name='cpe_name', + field=models.CharField(blank=True, max_length=255, null=True, unique=True), + ), + migrations.AddField( + model_name='osvariant', + name='codename', + field=models.CharField(blank=True, max_length=255), + ), + ] diff --git a/operatingsystems/models.py b/operatingsystems/models.py index 0cde601d..bed134af 100644 --- a/operatingsystems/models.py +++ b/operatingsystems/models.py @@ -24,9 +24,10 @@ class OSRelease(models.Model): - name = models.CharField(max_length=255, unique=True) + name = models.CharField(max_length=255, unique=True, blank=False, null=False) repos = models.ManyToManyField(Repository, blank=True) codename = models.CharField(max_length=255, blank=True) + cpe_name = models.CharField(max_length=255, null=True, blank=True, unique=True) from operatingsystems.managers import OSReleaseManager objects = OSReleaseManager() @@ -34,7 +35,7 @@ class OSRelease(models.Model): class Meta: verbose_name = 'Operating System Release' verbose_name_plural = 'Operating System Releases' - unique_together = ('name', 'codename') + unique_together = ('name', 'codename', 'cpe_name') ordering = ('name',) def __str__(self): @@ -47,7 +48,7 @@ def get_absolute_url(self): return reverse('operatingsystems:osrelease_detail', args=[str(self.id)]) def natural_key(self): - return (self.name, self.codename) + return (self.name, self.codename, self.cpe_name) class OSVariant(models.Model): @@ -55,6 +56,7 @@ class OSVariant(models.Model): name = models.CharField(max_length=255, unique=True) arch = models.ForeignKey(MachineArchitecture, blank=True, null=True, on_delete=models.CASCADE) osrelease = models.ForeignKey(OSRelease, blank=True, null=True, on_delete=models.SET_NULL) + codename = models.CharField(max_length=255, blank=True) class Meta: verbose_name = 'Operating System Variant' @@ -62,7 +64,8 @@ class Meta: ordering = ('name',) def __str__(self): - return self.name + osvariant_name = f'{self.name} {self.arch}' + return osvariant_name def get_absolute_url(self): return reverse('operatingsystems:osvariant_detail', args=[str(self.id)]) diff --git a/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html b/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html index 16544aa7..33ada936 100644 --- a/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html +++ b/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html @@ -2,15 +2,21 @@ - - + + + + + {% for osrelease in object_list %} - + + + + {% endfor %} diff --git a/operatingsystems/templates/operatingsystems/operatingsystemvariant_table.html b/operatingsystems/templates/operatingsystems/operatingsystemvariant_table.html index 50627579..fe60af1b 100644 --- a/operatingsystems/templates/operatingsystems/operatingsystemvariant_table.html +++ b/operatingsystems/templates/operatingsystems/operatingsystemvariant_table.html @@ -3,17 +3,21 @@ - + + + - + {% for osvariant in object_list %} - - + + + + {% endfor %} diff --git a/operatingsystems/templates/operatingsystems/osrelease_delete.html b/operatingsystems/templates/operatingsystems/osrelease_delete.html index 7022ee25..47b66c9b 100644 --- a/operatingsystems/templates/operatingsystems/osrelease_delete.html +++ b/operatingsystems/templates/operatingsystems/osrelease_delete.html @@ -12,12 +12,12 @@
    OS ReleaseReposOS ReleaseCPE NameCodenameReposOS Variants
    {{ osrelease }}{% if osrelease.repos.count != None %}{{ osrelease.repos.count }}{% else %}0{% endif %}{% if osrelease.codename %}{{ osrelease.cpe_name }}{% endif %}{% if osrelease.codename %}{{ osrelease.codename }}{% endif %}{{ osrelease.repos.count }}{{ osrelease.osvariant_set.count }}
    NameHostsArchitectureCodenameHosts OS ReleaseRepos (OS Release)Repos (OS Release)
    {{ osvariant }}{{ osvariant.host_set.count }}{% if osvariant.osrelease != None %}{{ osvariant.osrelease }}{% endif %}{{ osvariant.arch }}{% if osvariant.codename %}{{ osvariant.codename }}{% else %}{% if osvariant.osrelease %}{{ osvariant.osrelease.codename }}{% endif %}{% endif %}{{ osvariant.host_set.count }}{% if osvariant.osrelease %}{{ osvariant.osrelease }}{% endif %} {% if osvariant.osrelease.repos.count != None %}{{ osvariant.osrelease.repos.count }}{% else %}0{% endif %}
    - - - - - - + + + + + +
    Name{{ osrelease }}
    Variants{{ osrelease.osvariant_set.count }}
    Name{{ osrelease }}
    CPE Name{% if osrelease.cpe_name %}{{ osrelease.cpe_name }}{% endif %}
    Codename{% if osrelease.codename %}{{ osrelease.codename }}{% endif %}
    OS Variants{{ osrelease.osvariant_set.count }}
    Repositories{{ osrelease.repos.count }}
    Hosts{{ host_count }}
    diff --git a/operatingsystems/templates/operatingsystems/osrelease_detail.html b/operatingsystems/templates/operatingsystems/osrelease_detail.html index 48677cc1..a93446f2 100644 --- a/operatingsystems/templates/operatingsystems/osrelease_detail.html +++ b/operatingsystems/templates/operatingsystems/osrelease_detail.html @@ -23,13 +23,14 @@
    - - - - - - + + + + + +
    Name{{ osrelease }}
    Variants{{ osrelease.osvariant_set.count }}
    Name{{ osrelease }}
    CPE Name{% if osrelease.cpe_name %}{{ osrelease.cpe_name }}{% endif %}
    Codename{% if osrelease.codename %}{{ osrelease.codename }}{% endif %}
    OS Variants{{ osrelease.osvariant_set.count }}
    Repositories{{ osrelease.repos.count }}
    Hosts{{ host_count }}
    + {% if user.is_authenticated and perms.is_admin %} {% bootstrap_icon "trash" %} Delete this OS Release {% endif %} diff --git a/operatingsystems/templates/operatingsystems/osvariant_delete.html b/operatingsystems/templates/operatingsystems/osvariant_delete.html index d8f60d2e..bfb556d3 100644 --- a/operatingsystems/templates/operatingsystems/osvariant_delete.html +++ b/operatingsystems/templates/operatingsystems/osvariant_delete.html @@ -4,57 +4,25 @@ {% block page_title %}OS Variant - {{ osvariant }} {% endblock %} -{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • OS Variants
  • +{% block content_title %} OS Variant - {{ osvariant }} {% endblock %} -{% if osvariant %} - {{ osvariant }} -{% endif %} - -{% if osvariants %} - Multiple -{% endif %} - -
  • {% endblock %} - -{% block content_title %}OS Variant - {{ osvariant }}{% endblock %} +{% block breadcrumbs %} {{ block.super }}
  • OS Variants
  • {{ osvariant }}
  • {% endblock %} {% block content %} -{% if osvariant %} -
    - - - - -
    Name {{ osvariant.name }}
    Hosts{% if osvariant.host_set.count != None %} {{ osvariant.host_set.count }} {% else %} 0 {% endif %}
    OS Release{% if osvariant.osrelease != None %} {{ osvariant.osrelease }} {% else %}No OS Release{% endif %}
    -{% endif %} - -{% if osvariants %} -
    - - - - - {% for osvariant in osvariants %} - - - - - - {% endfor %} -
    NameHostsOS Release
    {{ osvariant }} {% if osvariant.host_set.count != None %} {{ osvariant.host_set.count }} {% else %} 0 {% endif %}{% if osvariant.osrelease != None %} {{ osvariant.osrelease }} {% else %}No OS Release{% endif %}
    -{% endif %} +
    + + + + + + +
    Name {{ osvariant.name }}
    Architecture {{ osvariant.arch }}
    Codename {{ osvariant.codename }}
    Hosts{{ osvariant.host_set.count }}
    OS Release{% if osvariant.osrelease != None %} {{ osvariant.osrelease }} {% else %}No OS Release{% endif %}
    {% if user.is_authenticated and perms.is_admin %}
    - Are you sure you want to delete - {% if osvariant %} - this OS Variant? - {% endif %} - {% if osvariants %} - these OS Variants? - {% endif %} + Are you sure you want to delete this OS Variant?
    @@ -68,7 +36,7 @@ You do not have permission to delete OS Variants.
    {% endif %}
    diff --git a/operatingsystems/templates/operatingsystems/osvariant_delete_multiple.html b/operatingsystems/templates/operatingsystems/osvariant_delete_multiple.html new file mode 100644 index 00000000..f6d0373d --- /dev/null +++ b/operatingsystems/templates/operatingsystems/osvariant_delete_multiple.html @@ -0,0 +1,56 @@ +{% extends "base.html" %} + +{% load common bootstrap3 %} + +{% block page_title %}OS Variant - Multiple {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • OS Variants
  • + +{% if osvariants %} + Multiple +{% endif %} + +
  • {% endblock %} + +{% block content_title %}OS Variant - Multiple{% endblock %} + +{% block content %} + +
    + + + + + {% for osvariant in osvariants %} + + + + + + {% endfor %} +
    NameHostsOS Release
    {{ osvariant }} {% if osvariant.host_set.count != None %} {{ osvariant.host_set.count }} {% else %} 0 {% endif %}{% if osvariant.osrelease != None %} {{ osvariant.osrelease }} {% else %}No OS Release{% endif %}
    + +
    + {% if user.is_authenticated and perms.is_admin %} +
    + Are you sure you want to delete these OS Variants? +
    +
    + + {% csrf_token %} + + + +
    + {% else %} +
    + You do not have permission to delete OS Variants. +
    + + {% endif %} +
    +
    + +{% endblock %} diff --git a/operatingsystems/templates/operatingsystems/osvariant_detail.html b/operatingsystems/templates/operatingsystems/osvariant_detail.html index 9ff7721a..71034664 100644 --- a/operatingsystems/templates/operatingsystems/osvariant_detail.html +++ b/operatingsystems/templates/operatingsystems/osvariant_detail.html @@ -22,7 +22,9 @@
    - + + +
    Name {{ osvariant.name }}
    Hosts{% if osvariant.host_set.count != None %} {{ osvariant.host_set.count }} {% else %} 0 {% endif %}
    Architecture {{ osvariant.arch }}
    Codename {{ osvariant.codename }}
    Hosts{{ osvariant.host_set.count }}
    OS Release{% if osvariant.osrelease != None %} {{ osvariant.osrelease }} {% else %}No OS Release{% endif %}
    {% if user.is_authenticated and perms.is_admin %} diff --git a/operatingsystems/templates/operatingsystems/osvariant_list.html b/operatingsystems/templates/operatingsystems/osvariant_list.html index 3b866208..b83ede5f 100644 --- a/operatingsystems/templates/operatingsystems/osvariant_list.html +++ b/operatingsystems/templates/operatingsystems/osvariant_list.html @@ -10,9 +10,9 @@ {% block objectlist_actions %} -{% if user.is_authenticated and perms.is_admin and empty_osvariants %} +{% if user.is_authenticated and perms.is_admin and nohost_osvariants %} {% endif %} diff --git a/operatingsystems/urls.py b/operatingsystems/urls.py index 923c8b62..df194c9d 100644 --- a/operatingsystems/urls.py +++ b/operatingsystems/urls.py @@ -26,6 +26,7 @@ path('variants/', views.osvariant_list, name='osvariant_list'), path('variants//', views.osvariant_detail, name='osvariant_detail'), path('variants//delete/', views.osvariant_delete, name='osvariant_delete'), + path('variants/no_host/delete/', views.delete_nohost_osvariants, name='delete_nohost_osvariants'), path('releases/', views.osrelease_list, name='osrelease_list'), path('releases//', views.osrelease_detail, name='osrelease_detail'), path('releases//delete/', views.osrelease_delete, name='osrelease_delete'), diff --git a/operatingsystems/views.py b/operatingsystems/views.py index f4f4c963..131c9258 100644 --- a/operatingsystems/views.py +++ b/operatingsystems/views.py @@ -24,6 +24,7 @@ from rest_framework import viewsets +from hosts.models import Host from operatingsystems.models import OSVariant, OSRelease from operatingsystems.forms import AddOSVariantToOSReleaseForm, AddReposToOSReleaseForm, CreateOSReleaseForm from operatingsystems.serializers import OSVariantSerializer, OSReleaseSerializer @@ -33,6 +34,9 @@ def osvariant_list(request): osvariants = OSVariant.objects.select_related() + if 'osrelease' in request.GET: + osvariants = osvariants.filter(osrelease=int(request.GET['osrelease'])) + if 'search' in request.GET: terms = request.GET['search'].lower() query = Q() @@ -53,13 +57,13 @@ def osvariant_list(request): except EmptyPage: page = paginator.page(paginator.num_pages) - empty_osvariants = list(OSVariant.objects.filter(host__isnull=True)) + nohost_osvariants = OSVariant.objects.filter(host__isnull=True).count() >= 1 return render(request, 'operatingsystems/osvariant_list.html', {'page': page, 'terms': terms, - 'empty_osvariants': empty_osvariants}) + 'nohost_osvariants': nohost_osvariants}) @login_required @@ -94,39 +98,38 @@ def osvariant_detail(request, osvariant_id): @login_required def osvariant_delete(request, osvariant_id): - if osvariant_id == 'empty_osvariants': - osvariant = False - osvariants = list(OSVariant.objects.filter(host__isnull=True)) - else: - osvariant = get_object_or_404(OSVariant, id=osvariant_id) - osvariants = False + osvariant = get_object_or_404(OSVariant, id=osvariant_id) if request.method == 'POST': if 'delete' in request.POST: - if osvariant: - osvariant.delete() - messages.info(request, f'OS Variant {osvariant} has been deleted') - return redirect(reverse('operatingsystems:osvariant_list')) - else: - if not osvariants: - text = 'There are no OS Variants with no Hosts' - messages.info(request, text) - return redirect(reverse('operatingsystems:osvariant_list')) - for osvariant in osvariants: - osvariant.delete() - text = f'{len(osvariants)} OS Variants have been deleted' + osvariant.delete() + messages.info(request, f'OS Variant {osvariant} has been deleted') + return redirect(reverse('operatingsystems:osvariant_list')) + elif 'cancel' in request.POST: + return redirect(osvariant.get_absolute_url()) + + return render(request, 'operatingsystems/osvariant_delete.html', {'osvariant': osvariant}) + + +@login_required +def delete_nohost_osvariants(request): + osvariants = list(OSVariant.objects.filter(host__isnull=True)) + + if request.method == 'POST': + if 'delete' in request.POST: + if not osvariants: + text = 'There are no OS Variants with no Hosts' messages.info(request, text) return redirect(reverse('operatingsystems:osvariant_list')) + for osvariant in osvariants: + osvariant.delete() + text = f'{len(osvariants)} OS Variants have been deleted' + messages.info(request, text) + return redirect(reverse('operatingsystems:osvariant_list')) elif 'cancel' in request.POST: - if osvariant_id == 'empty_oses': - return redirect(reverse('operatingsystems:osvariant_list')) - else: - return redirect(osvariant.get_absolute_url()) + return redirect(reverse('operatingsystems:osvariant_list')) - return render(request, - 'operatingsystems/osvariant_delete.html', - {'osvariant': osvariant, - 'osvariants': osvariants}) + return render(request, 'operatingsystems/osvariant_delete_multiple.html', {'osvariants': osvariants}) @login_required @@ -174,11 +177,13 @@ def osrelease_detail(request, osrelease_id): return redirect(osrelease.get_absolute_url()) repos_form = AddReposToOSReleaseForm(instance=osrelease) + host_count = Host.objects.filter(osvariant__osrelease=osrelease).count() return render(request, 'operatingsystems/osrelease_detail.html', {'osrelease': osrelease, - 'repos_form': repos_form}) + 'repos_form': repos_form, + 'host_count': host_count}) @login_required @@ -190,13 +195,16 @@ def osrelease_delete(request, osrelease_id): osrelease.delete() text = f'OS Release {osrelease} has been deleted' messages.info(request, text) - return redirect(reverse('operatingsystems:osvariant_list')) + return redirect(reverse('operatingsystems:osrelease_list')) elif 'cancel' in request.POST: return redirect(osrelease.get_absolute_url()) + host_count = Host.objects.filter(osvariant__osrelease=osrelease).count() + return render(request, 'operatingsystems/osrelease_delete.html', - {'osrelease': osrelease}) + {'osrelease': osrelease, + 'host_count': host_count}) @login_required diff --git a/reports/models.py b/reports/models.py index 4b966ae2..7cc072b5 100644 --- a/reports/models.py +++ b/reports/models.py @@ -62,7 +62,8 @@ def get_absolute_url(self): return reverse('reports:report_detail', args=[str(self.id)]) def parse(self, data, meta): - + """ Parse a report and save the object + """ x_real_ip = meta.get('HTTP_X_REAL_IP') x_forwarded_for = meta.get('HTTP_X_FORWARDED_FOR') if x_forwarded_for: @@ -106,24 +107,102 @@ def process(self, find_updates=True, verbose=False): """ Process a report and extract os, arch, domain, packages, repos etc """ if self.os and self.kernel and self.arch and not self.processed: + self_os = self.os + os = self.os + cpe_name = None + codename = None osrelease_codename = None - match = re.match(r'(.*) \((.*)\)', self.os) + osvariant_codename = None + osrelease_name = os + osvariant_name = os + + # find cpe_name if it exists + match = re.match(r'(.*) \[(.*)\]', os) if match: - os_name = match.group(1) - osrelease_codename = match.group(2) - else: - os_name = self.os + cpe_name = match.group(2) + os = match.group(1) + + # find codename if it exists + match = re.match(r'(.*) \((.*)\)', os) + if match: + osrelease_name = match.group(1) + codename = match.group(2) + if not os.startswith('AlmaLinux'): + osrelease_codename = codename + + if os.startswith('Gentoo'): + osrelease_name = 'Gentoo Linux' + # presumptive, can be changed once a real cpe is assigned/used + cpe_name = 'cpe:2.3:o:gentoo:gentoo_linux:::' + + if os.startswith('AlmaLinux'): + os = os.replace('AlmaLinux', 'Alma Linux') + osrelease_name = os.split('.')[0] + # alma changes the codename with each minor release, so it's useless to us now + osvariant_name = os.replace(f' ({codename})', '') + osvariant_codename = codename + + if os.startswith('Debian'): + major, minor = os.split(' ')[1].split('.') + debian_version = f'{major}.{minor}' + osrelease_name = f'Debian {major}' + # presumptive, can be changed once a real cpe is assigned/used + cpe_name = f'cpe:2.3:o:debian:debian_linux:{debian_version}::' + + if os.startswith('Ubuntu'): + lts = '' + if 'LTS' in os: + lts = ' LTS' + major, minor, patch = os.split(' ')[1].split('.') + ubuntu_version = f'{major}_{minor}' + osrelease_name = f'Ubuntu {major}.{minor}{lts}' + cpe_name = f'cpe:2.3:o:canonical:ubuntu_linux:{ubuntu_version}::' + + if os.startswith('Arch'): + # presumptive, can be changed once a real cpe is assigned/used + cpe_name = 'cpe:2.3:o:archlinux:arch_linux:::' + + if os.startswith('Rocky'): + osrelease_name = os.split('.')[0] with transaction.atomic(): m_arch, created = MachineArchitecture.objects.get_or_create(name=self.arch) with transaction.atomic(): - osvariant, created = OSVariant.objects.get_or_create(name=os_name, arch=m_arch) + try: + osvariant, created = OSVariant.objects.get_or_create(name=osvariant_name, arch=m_arch) + except IntegrityError: + osvariants = OSVariant.objects.filter(name=osvariant_name) + if osvariants.count() == 1: + osvariant = osvariants[0] + if osvariant.arch is None: + osvariant.arch = m_arch + + if osvariant and osvariant_codename: + osvariant.codename = osvariant_codename - if osrelease_codename: + if cpe_name: + try: + osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name, cpe_name=cpe_name) + except IntegrityError: + osreleases = OSRelease.objects.filter(name=osrelease_name) + if osreleases.count() == 1: + osrelease = osreleases[0] + osrelease.cpe_name = cpe_name + elif osrelease_codename: osreleases = OSRelease.objects.filter(codename=osrelease_codename) if osreleases.count() == 1: - osvariant.osrelease = osreleases[0] + osrelease = osreleases[0] + elif osrelease_name: + osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name) + osrelease.save() + osvariant.osrelease = osrelease + osvariant.save() + + o = locals().items() + for name, value in o: + if name in ['self_os', 'os', 'osrelease_name', 'osvariant_name', 'osrelease', 'osvariant']: + print(name, value) if not self.domain: self.domain = 'unknown' @@ -169,12 +248,10 @@ def process(self, find_updates=True, verbose=False): host.check_rdns() if verbose: - text = 'Processing report ' - text += f'{self.id} - {self.host}' + text = 'Processing report {self.id} - {self.host}' info_message.send(sender=None, text=text) - from reports.utils import process_packages, \ - process_repos, process_updates, process_modules + from reports.utils import process_packages, process_repos, process_updates, process_modules with transaction.atomic(): process_repos(report=self, host=host) with transaction.atomic(): @@ -190,16 +267,13 @@ def process(self, find_updates=True, verbose=False): if find_updates: if verbose: - text = 'Finding updates for report ' - text += f'{self.id} - {self.host}' + text = 'Finding updates for report {self.id} - {self.host}' info_message.send(sender=None, text=text) host.find_updates() else: if self.processed: - text = f'Report {self.id} ' - text += 'has already been processed' + text = f'Report {self.id} has already been processed' info_message.send(sender=None, text=text) else: - text = 'Error: OS, kernel or arch not sent ' - text += f'with report {self.id}' + text = 'Error: OS, kernel or arch not sent with report {self.id}' error_message.send(sender=None, text=text) diff --git a/reports/utils.py b/reports/utils.py index a777cfa1..5c65d58a 100644 --- a/reports/utils.py +++ b/reports/utils.py @@ -24,11 +24,9 @@ from repos.models import Repository, Mirror, MirrorPackage from modules.models import Module from packages.models import Package, PackageCategory -from packages.utils import find_evr, get_or_create_package, \ - get_or_create_package_update, parse_package_string +from packages.utils import find_evr, get_or_create_package, get_or_create_package_update, parse_package_string from repos.utils import get_or_create_repo -from patchman.signals import progress_info_s, progress_update_s, \ - error_message, info_message +from patchman.signals import progress_info_s, progress_update_s, error_message, info_message def process_repos(report, host): @@ -255,9 +253,8 @@ def process_repo(repo, arch): if repo[1]: r_name = repo[1] - machine_arches = MachineArchitecture.objects.all() with transaction.atomic(): - r_arch, c = machine_arches.get_or_create(name=arch) + r_arch, c = MachineArchitecture.objects.get_or_create(name=arch) unknown = [] for r_url in repo[3:]: @@ -278,6 +275,11 @@ def process_repo(repo, arch): with transaction.atomic(): repository.save() + if r_name and repository.name != r_name: + repository.name = r_name + with transaction.atomic(): + repository.save() + for url in unknown: Mirror.objects.create(repo=repository, url=url) @@ -415,10 +417,15 @@ def process_package(pkg, protocol): repo_arch, created = machine_arches.get_or_create(name='any') repo_name = 'Gentoo Linux' - repo = get_or_create_repo(repo_name, repo_arch, Repository.GENTOO) + repo = get_or_create_repo(repo_name, repo_arch, Repository.GENTOO, p_repo) with transaction.atomic(): - url = f'gentoo virtual for {p_repo}' + if p_repo == 'gentoo': + url = 'https://api.gentoo.org/mirrors/distfiles.xml' + else: + # this may not be correct. the urls are hardcoded anyway in repos/utils.py + # need to figure out a better way to determine which repo/repo url to use + url = 'https://api.gentoo.org/overlays/repositories.xml' mirror, c = Mirror.objects.get_or_create(repo=repo, url=url, mirrorlist=True) MirrorPackage.objects.create(mirror=mirror, package=package) diff --git a/reports/views.py b/reports/views.py index e676970b..fe8d6e7b 100644 --- a/reports/views.py +++ b/reports/views.py @@ -15,6 +15,8 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see +from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential + from django.http import HttpResponse, Http404 from django.views.decorators.csrf import csrf_exempt from django.shortcuts import get_object_or_404, render, redirect @@ -23,14 +25,18 @@ from django.urls import reverse from django.db import transaction from django.db.models import Q -from django.conf import settings from django.contrib import messages +from django.db.utils import OperationalError from util.filterspecs import Filter, FilterBar - from reports.models import Report +@retry( + retry=retry_if_exception_type(OperationalError), + stop=stop_after_attempt(5), + wait=wait_exponential(multiplier=1, min=2, max=15), +) @csrf_exempt def upload(request): @@ -124,13 +130,16 @@ def report_detail(request, report_id): @login_required def report_process(request, report_id): - + """ Process a report using a celery task + """ report = get_object_or_404(Report, id=report_id) - report.process() - - return render(request, - 'reports/report_detail.html', - {'report': report}) + report.processed = False + report.save() + from reports.tasks import process_report + process_report.delay(report.id) + text = f'Report {report} is being processed' + messages.info(request, text) + return redirect(report.get_absolute_url()) @login_required diff --git a/repos/templates/repos/mirror_delete.html b/repos/templates/repos/mirror_delete.html index 808c8024..2b203a15 100644 --- a/repos/templates/repos/mirror_delete.html +++ b/repos/templates/repos/mirror_delete.html @@ -14,7 +14,7 @@ - + diff --git a/repos/templates/repos/mirror_detail.html b/repos/templates/repos/mirror_detail.html index 35c81877..9d772b95 100644 --- a/repos/templates/repos/mirror_detail.html +++ b/repos/templates/repos/mirror_detail.html @@ -14,7 +14,7 @@
    Repo {{ mirror.repo }}
    URL {{ mirror.url }}
    Packages {{ mirror.packages.count }}
    Packages{{ mirror.packages.count }}
    Enabled {% yes_no_img mirror.enabled 'Enabled' 'Not Enabled' %}
    Refresh {% yes_no_img mirror.refresh 'True' 'False' %}
    Mirrorlist/Metalink {% yes_no_img mirror.mirrorlist 'True' 'False' %}
    - + diff --git a/repos/templates/repos/mirror_table.html b/repos/templates/repos/mirror_table.html index 32e9b57c..2e09986c 100644 --- a/repos/templates/repos/mirror_table.html +++ b/repos/templates/repos/mirror_table.html @@ -3,13 +3,13 @@ - + - + diff --git a/repos/templates/repos/repo_delete.html b/repos/templates/repos/repo_delete.html index ebe74fe7..7c09cf45 100644 --- a/repos/templates/repos/repo_delete.html +++ b/repos/templates/repos/repo_delete.html @@ -12,8 +12,8 @@
    Repo {{ mirror.repo }}
    URL {{ mirror.url }}
    Packages {{ mirror.packages.count }}
    Packages{{ mirror.packages.count }}
    Enabled {% yes_no_img mirror.enabled 'Enabled' 'Not Enabled' %}
    Refresh {% yes_no_img mirror.refresh 'True' 'False' %}
    Mirrorlist/Metalink {% yes_no_img mirror.mirrorlist 'True' 'False' %}
    IDURLURL Packages Enabled Refresh Mirrorlist/Metalink Last Access OKTimestampTimestamp Checksum Delete Edit
    - - + + diff --git a/repos/templates/repos/repo_detail.html b/repos/templates/repos/repo_detail.html index 7cd2a1b3..d182a085 100644 --- a/repos/templates/repos/repo_detail.html +++ b/repos/templates/repos/repo_detail.html @@ -22,8 +22,8 @@
    Name {{ repo.name }}
    ID {% if repo.id != None %} {{ repo.id }} {% endif %}
    Name {{ repo.name }}
    Repo ID {% if repo.repo_id != None %} {{ repo.repo_id }} {% endif %}
    Type {{ repo.get_repotype_display }}
    Architecture {{ repo.arch }}
    Security {% yes_no_img repo.security 'Security' 'Not Security' %}
    - - + + @@ -44,6 +44,7 @@ {% else %} {% bootstrap_icon "star" %} Mark as Security repo {% endif %} + {% bootstrap_icon "tasks" %} Refresh this Repository {% endif %} diff --git a/repos/templates/repos/repository_table.html b/repos/templates/repos/repository_table.html index cf716690..bcd7e721 100644 --- a/repos/templates/repos/repository_table.html +++ b/repos/templates/repos/repository_table.html @@ -2,8 +2,8 @@
    Name {{ repo.name }}
    ID {% if repo.id != None %} {{ repo.id }} {% endif %}
    Name {{ repo.name }}
    Repo ID {% if repo.repo_id %} {{ repo.repo_id }} {% endif %}
    Type {{ repo.get_repotype_display }}
    Architecture {{ repo.arch }}
    Security {% yes_no_img repo.security 'Security' 'Not Security' %}
    - - + + @@ -14,8 +14,8 @@ {% for repo in object_list %} - - + + diff --git a/repos/views.py b/repos/views.py index 04966c36..5b0fa955 100644 --- a/repos/views.py +++ b/repos/views.py @@ -31,10 +31,8 @@ from repos.models import Repository, Mirror, MirrorPackage from operatingsystems.models import OSRelease from arch.models import MachineArchitecture -from repos.forms import EditRepoForm, LinkRepoForm, CreateRepoForm, \ - EditMirrorForm -from repos.serializers import RepositorySerializer, \ - MirrorSerializer, MirrorPackageSerializer +from repos.forms import EditRepoForm, LinkRepoForm, CreateRepoForm, EditMirrorForm +from repos.serializers import RepositorySerializer, MirrorSerializer, MirrorPackageSerializer @login_required @@ -376,6 +374,18 @@ def repo_toggle_security(request, repo_id): return redirect(repo.get_absolute_url()) +@login_required +def repo_refresh(request, repo_id): + """ Refresh a repo using a celery task + """ + repo = get_object_or_404(Repository, id=repo_id) + from repos.tasks import refresh_repo + refresh_repo.delay(repo.id) + text = f'Repostory {repo} is being refreshed' + messages.info(request, text) + return redirect(repo.get_absolute_url()) + + class RepositoryViewSet(viewsets.ModelViewSet): """ API endpoint that allows repositories to be viewed or edited. diff --git a/util/templates/dashboard.html b/util/templates/dashboard.html index b8dd8e96..14964e98 100644 --- a/util/templates/dashboard.html +++ b/util/templates/dashboard.html @@ -191,7 +191,7 @@
    {% for checksum in possible_mirrors %} - {{ checksum }} + {{ checksum }} {% endfor %}
    From fc068d165152d006080cff6f6feb734c1603dd02 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Thu, 13 Feb 2025 03:01:38 -0500 Subject: [PATCH 050/199] celery and task updates --- errata/tasks.py | 11 ----------- etc/patchman/local_settings.py | 36 ++++++++++++++++++++++++++++------ patchman/__init__.py | 2 +- patchman/settings.py | 4 ---- reports/tasks.py | 20 ++++++------------- reports/urls.py | 2 +- repos/models.py | 7 +++---- repos/tasks.py | 17 ++++------------ repos/urls.py | 11 ++++++----- repos/utils.py | 5 ++++- security/tasks.py | 8 ++++---- 11 files changed, 59 insertions(+), 64 deletions(-) diff --git a/errata/tasks.py b/errata/tasks.py index 5b253ffa..76b31c9a 100644 --- a/errata/tasks.py +++ b/errata/tasks.py @@ -14,22 +14,11 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -from datetime import timedelta - -from django.conf import settings - from celery import shared_task -from patchman.celery import app from errata.models import Erratum from security.tasks import update_cves, update_cwes -app.conf.beat_schedule = { - 'update-errata-cves-cwes-every-6-hours': { - 'task': 'tasks.update_errata', - 'schedule': timedelta(hours=6), - }, -} @shared_task def update_erratum(erratum): diff --git a/etc/patchman/local_settings.py b/etc/patchman/local_settings.py index c14de3cd..d00edddb 100644 --- a/etc/patchman/local_settings.py +++ b/etc/patchman/local_settings.py @@ -29,19 +29,43 @@ ALLOWED_HOSTS = ['127.0.0.1', '*'] # Maximum number of mirrors to add or refresh per repo -MAX_MIRRORS = 5 +MAX_MIRRORS = 3 -# Number of days to wait before notifying users that a host has not reported +# Number of days to wait before raising that a host has not reported DAYS_WITHOUT_REPORT = 14 # Whether to run patchman under the gunicorn web server RUN_GUNICORN = False -# Enable redis caching for 30 seconds CACHES = { 'default': { - 'BACKEND': 'django.core.cache.backends.redis.RedisCache', - 'LOCATION': 'redis://127.0.0.1:6379', - 'TIMEOUT': 30, + 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } } + +# Uncomment to enable redis caching for e.g. 30 seconds +# Note that the UI results may be out of date for this amount of time +# CACHES = { +# 'default': { +# 'BACKEND': 'django.core.cache.backends.redis.RedisCache', +# 'LOCATION': 'redis://127.0.0.1:6379', +# 'TIMEOUT': 30, +# } +# } + +from datetime import timedelta # noqa +from celery.schedules import crontab # noqa +CELERY_BEAT_SCHEDULE = { + 'process_all_unprocessed_reports': { + 'task': 'reports.tasks.process_reports', + 'schedule': crontab(minute='*/5'), + }, + 'refresh_repos_daily': { + 'task': 'tasks.refresh_repos', + 'schedule': crontab(hour=6, minute=00), + }, + 'update_errata_cves_cwes_every_12_hours': { + 'task': 'tasks.update_errata', + 'schedule': timedelta(hours=12), + }, +} diff --git a/patchman/__init__.py b/patchman/__init__.py index e78b0cd3..af122cc6 100644 --- a/patchman/__init__.py +++ b/patchman/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2013-2021 Marcus Furlong +# Copyright 2013-2025 Marcus Furlong # # This file is part of Patchman. # diff --git a/patchman/settings.py b/patchman/settings.py index 90e68151..85ff6719 100644 --- a/patchman/settings.py +++ b/patchman/settings.py @@ -4,10 +4,6 @@ import site import sys -#import django -#from django.utils.encoding import smart_str -#django.utils.encoding.smart_text = smart_str - # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) diff --git a/reports/tasks.py b/reports/tasks.py index 3fd74645..53de5e64 100755 --- a/reports/tasks.py +++ b/reports/tasks.py @@ -15,24 +15,16 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -from django.conf import settings +from celery import shared_task -from reports.models import Report +from django.db.utils import OperationalError -from celery import shared_task -from celery.schedules import crontab -from patchman.celery import app +from reports.models import Report -app.conf.beat_schedule = { - 'process-reports': { - 'task': 'reports.tasks.process_reports', - 'schedule': crontab(minute='*/5'), - }, -} -@shared_task -def process_report(report_id): - report = Report.objects.get(report_id) +@shared_task(bind=True, autoretry_for=(OperationalError,), retry_backoff=True, retry_kwargs={'max_retries': 5}) +def process_report(self, report_id): + report = Report.objects.get(id=report_id) report.process() diff --git a/reports/urls.py b/reports/urls.py index 56965f52..8826cc82 100644 --- a/reports/urls.py +++ b/reports/urls.py @@ -26,5 +26,5 @@ path('upload/', views.upload), path('/', views.report_detail, name='report_detail'), path('/delete/', views.report_delete, name='report_delete'), - path('/process/', views.report_process, name='report_process'), # noqa + path('/process/', views.report_process, name='report_process'), ] diff --git a/repos/models.py b/repos/models.py index 72f8f8d8..43b0e7d2 100644 --- a/repos/models.py +++ b/repos/models.py @@ -23,8 +23,8 @@ from packages.models import Package from util import has_setting_of_type -from repos.utils import refresh_deb_repo, refresh_rpm_repo, \ - refresh_arch_repo, refresh_gentoo_repo, update_mirror_packages +from repos.utils import refresh_deb_repo, refresh_rpm_repo, refresh_arch_repo, refresh_gentoo_repo, \ + update_mirror_packages from patchman.signals import info_message, warning_message, error_message @@ -96,8 +96,7 @@ def refresh(self, force=False): elif self.repotype == Repository.GENTOO: refresh_gentoo_repo(self) else: - text = 'Error: unknown repo type for repo ' - text += f'{self.id}: {self.repotype}' + text = 'Error: unknown repo type for repo {self.id}: {self.repotype}' error_message.send(sender=None, text=text) else: text = 'Repo requires certificate authentication, not updating' diff --git a/repos/tasks.py b/repos/tasks.py index 077668e9..1319b9c8 100644 --- a/repos/tasks.py +++ b/repos/tasks.py @@ -14,25 +14,16 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -from django.conf import settings - -from repos.models import Repository, Mirror - from celery import shared_task -from celery.schedules import crontab -from patchman.celery import app -app.conf.beat_schedule = { - 'refresh-repos-every-day': { - 'task': 'tasks.refresh_repos', - 'schedule': crontab(hour=6, minute=00), - }, -} +from repos.models import Repository + @shared_task -def refresh_repo(force=False): +def refresh_repo(repo_id, force=False): """ Refresh metadata for a single repo """ + repo = Repository.objects.get(id=repo_id) repo.refresh(force) diff --git a/repos/urls.py b/repos/urls.py index 246ce678..176f9a13 100644 --- a/repos/urls.py +++ b/repos/urls.py @@ -24,12 +24,13 @@ urlpatterns = [ path('', views.repo_list, name='repo_list'), path('/', views.repo_detail, name='repo_detail'), - path('/toggle_enabled/', views.repo_toggle_enabled, name='repo_toggle_enabled'), # noqa - path('/toggle_security/', views.repo_toggle_security, name='repo_toggle_security'), # noqa + path('/toggle_enabled/', views.repo_toggle_enabled, name='repo_toggle_enabled'), + path('/toggle_security/', views.repo_toggle_security, name='repo_toggle_security'), path('/edit/', views.repo_edit, name='repo_edit'), path('/delete/', views.repo_delete, name='repo_delete'), + path('/refresh/', views.repo_refresh, name='repo_refresh'), path('mirrors/', views.mirror_list, name='mirror_list'), - path('mirrors/mirror//', views.mirror_detail, name='mirror_detail'), # noqa - path('mirrors/mirror//edit/', views.mirror_edit, name='mirror_edit'), # noqa - path('mirrors/mirror//delete/', views.mirror_delete, name='mirror_delete'), # noqa + path('mirrors/mirror//', views.mirror_detail, name='mirror_detail'), + path('mirrors/mirror//edit/', views.mirror_edit, name='mirror_edit'), + path('mirrors/mirror//delete/', views.mirror_delete, name='mirror_delete'), ] diff --git a/repos/utils.py b/repos/utils.py index da5f069f..6148b82d 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -45,7 +45,7 @@ info_message, warning_message, error_message, debug_message -def get_or_create_repo(r_name, r_arch, r_type): +def get_or_create_repo(r_name, r_arch, r_type, r_id=None): """ Get or create a Repository object. Returns the object. Returns None if it cannot get or create the object. """ @@ -64,6 +64,9 @@ def get_or_create_repo(r_name, r_arch, r_type): except DatabaseError as e: error_message.send(sender=None, text=e) if repository: + if r_id: + repository.repo_id = r_id + repository.save() return repository diff --git a/security/tasks.py b/security/tasks.py index f3935235..8219fba5 100644 --- a/security/tasks.py +++ b/security/tasks.py @@ -14,13 +14,10 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -from django.conf import settings +from celery import shared_task from security.models import CVE, CWE -from celery import shared_task -from patchman.celery import app - @shared_task def update_cve(cve): @@ -28,6 +25,7 @@ def update_cve(cve): """ cve.update() + @shared_task def update_cves(): """ Task to update all CVEs @@ -35,12 +33,14 @@ def update_cves(): for cve in CVE.objects.all(): update_cve.delay(cve) + @shared_task def update_cwe(cwe): """ Task to update a CWE """ cwe.update() + @shared_task def update_cwes(): """ Task to update all CWEa From f3a5cf4da7d235c1c94a0904983661a0e0041fc1 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 24 Feb 2025 11:17:53 -0500 Subject: [PATCH 051/199] correctly filter periodic reports --- reports/models.py | 5 ----- reports/tasks.py | 2 +- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/reports/models.py b/reports/models.py index 7cc072b5..96dcf2a0 100644 --- a/reports/models.py +++ b/reports/models.py @@ -199,11 +199,6 @@ def process(self, find_updates=True, verbose=False): osvariant.osrelease = osrelease osvariant.save() - o = locals().items() - for name, value in o: - if name in ['self_os', 'os', 'osrelease_name', 'osvariant_name', 'osrelease', 'osvariant']: - print(name, value) - if not self.domain: self.domain = 'unknown' domains = Domain.objects.all() diff --git a/reports/tasks.py b/reports/tasks.py index 53de5e64..bfa751cc 100755 --- a/reports/tasks.py +++ b/reports/tasks.py @@ -30,6 +30,6 @@ def process_report(self, report_id): @shared_task def process_reports(): - reports = Report.objects.all(processed=False) + reports = Report.objects.filter(processed=False) for report in reports: process_report.delay(report.id) From 9d2169f8b3cb8dd5140788e79b999d5dbac7fda4 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 25 Feb 2025 21:59:12 -0500 Subject: [PATCH 052/199] celery tasks use id instead of non-serializable json --- errata/tasks.py | 5 +++-- etc/patchman/local_settings.py | 4 ++-- reports/models.py | 1 - reports/tasks.py | 4 ++++ repos/tasks.py | 2 +- security/tasks.py | 10 ++++++---- 6 files changed, 16 insertions(+), 10 deletions(-) diff --git a/errata/tasks.py b/errata/tasks.py index 76b31c9a..438c33e0 100644 --- a/errata/tasks.py +++ b/errata/tasks.py @@ -21,9 +21,10 @@ @shared_task -def update_erratum(erratum): +def update_erratum(erratum_id): """ Task to update an erratum """ + erratum = Erratum.objects.get(id=erratum_id) erratum.update() @@ -32,6 +33,6 @@ def update_errata(): """ Task to update all errata """ for e in Erratum.objects.all(): - update_erratum.delay(e) + update_erratum.delay(e.id) update_cves.delay() update_cwes.delay() diff --git a/etc/patchman/local_settings.py b/etc/patchman/local_settings.py index d00edddb..7fb660a2 100644 --- a/etc/patchman/local_settings.py +++ b/etc/patchman/local_settings.py @@ -61,11 +61,11 @@ 'schedule': crontab(minute='*/5'), }, 'refresh_repos_daily': { - 'task': 'tasks.refresh_repos', + 'task': 'repos.tasks.refresh_repos', 'schedule': crontab(hour=6, minute=00), }, 'update_errata_cves_cwes_every_12_hours': { - 'task': 'tasks.update_errata', + 'task': 'errata.tasks.update_errata', 'schedule': timedelta(hours=12), }, } diff --git a/reports/models.py b/reports/models.py index 96dcf2a0..abdcb556 100644 --- a/reports/models.py +++ b/reports/models.py @@ -107,7 +107,6 @@ def process(self, find_updates=True, verbose=False): """ Process a report and extract os, arch, domain, packages, repos etc """ if self.os and self.kernel and self.arch and not self.processed: - self_os = self.os os = self.os cpe_name = None codename = None diff --git a/reports/tasks.py b/reports/tasks.py index bfa751cc..46131155 100755 --- a/reports/tasks.py +++ b/reports/tasks.py @@ -24,12 +24,16 @@ @shared_task(bind=True, autoretry_for=(OperationalError,), retry_backoff=True, retry_kwargs={'max_retries': 5}) def process_report(self, report_id): + """ Task to process a single report + """ report = Report.objects.get(id=report_id) report.process() @shared_task def process_reports(): + """ Task to process all unprocessed reports + """ reports = Report.objects.filter(processed=False) for report in reports: process_report.delay(report.id) diff --git a/repos/tasks.py b/repos/tasks.py index 1319b9c8..39098fa8 100644 --- a/repos/tasks.py +++ b/repos/tasks.py @@ -33,4 +33,4 @@ def refresh_repos(force=False): """ repos = Repository.objects.filter(enabled=True) for repo in repos: - refresh_repo.delay(repo, force) + refresh_repo.delay(repo.id, force) diff --git a/security/tasks.py b/security/tasks.py index 8219fba5..d99b9579 100644 --- a/security/tasks.py +++ b/security/tasks.py @@ -20,9 +20,10 @@ @shared_task -def update_cve(cve): +def update_cve(cve_id): """ Task to update a CVE """ + cve = CVE.objects.get(id=cve_id) cve.update() @@ -31,13 +32,14 @@ def update_cves(): """ Task to update all CVEs """ for cve in CVE.objects.all(): - update_cve.delay(cve) + update_cve.delay(cve.id) @shared_task -def update_cwe(cwe): +def update_cwe(cwe_id): """ Task to update a CWE """ + cwe = CWE.objects.get(id=cwe_id) cwe.update() @@ -46,4 +48,4 @@ def update_cwes(): """ Task to update all CWEa """ for cwe in CWE.objects.all(): - update_cwe.delay(cwe) + update_cwe.delay(cwe.id) From 45387cfe19d76c17b215eb282c5f0489f13e9172 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 25 Feb 2025 23:34:45 -0500 Subject: [PATCH 053/199] add patchman-celery service --- debian/python3-patchman.install | 1 + etc/systemd/patchman-celery.service | 14 ++++++++++++++ 2 files changed, 15 insertions(+) create mode 100644 etc/systemd/patchman-celery.service diff --git a/debian/python3-patchman.install b/debian/python3-patchman.install index ededd6ec..e13b11ca 100755 --- a/debian/python3-patchman.install +++ b/debian/python3-patchman.install @@ -1,3 +1,4 @@ #!/usr/bin/dh-exec etc/patchman/apache.conf.example => etc/apache2/conf-available/patchman.conf etc/patchman/local_settings.py etc/patchman +etc/systemd/system/patchman-celery.service => lib/systemd/system/patchman-celery.service diff --git a/etc/systemd/patchman-celery.service b/etc/systemd/patchman-celery.service new file mode 100644 index 00000000..2da16299 --- /dev/null +++ b/etc/systemd/patchman-celery.service @@ -0,0 +1,14 @@ +[Unit] +Description=Patchman Celery Service +Requires=network-online.target +After=network-onlne.target + +[Service] +Type=simple +User=patchman-celery +Group=patchman-celery +EnvironmentFile=/etc/patchman/celery +ExecStart=/usr/bin/celery --broker redis://${REDIS_HOST}:${REDIS_PORT}/0 --app patchman worker --loglevel info --beat --scheduler django_celery_beat.schedulers:DatabaseScheduler --task-events + +[Install] +WantedBy=multi-user.target From 1a6d6a3f2d5b68a5c0eb1ceb5044a6f7adafe2f8 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 26 Feb 2025 00:31:59 -0500 Subject: [PATCH 054/199] enable WAL mode for sqlite3 db --- debian/python3-patchman.postinst | 1 + scripts/rpm-post-install.sh | 1 + 2 files changed, 2 insertions(+) diff --git a/debian/python3-patchman.postinst b/debian/python3-patchman.postinst index 015a9e27..ade265b8 100644 --- a/debian/python3-patchman.postinst +++ b/debian/python3-patchman.postinst @@ -20,6 +20,7 @@ if [ "$1" = "configure" ] ; then patchman-manage makemigrations patchman-manage migrate --run-syncdb --fake-initial + sqlite3 /var/lib/patchman/db/patchman.db 'PRAGMA journal_mode=WAL;' chown -R www-data:www-data /var/lib/patchman diff --git a/scripts/rpm-post-install.sh b/scripts/rpm-post-install.sh index 18f28615..6c88a917 100644 --- a/scripts/rpm-post-install.sh +++ b/scripts/rpm-post-install.sh @@ -20,6 +20,7 @@ patchman-manage collectstatic --noinput patchman-manage makemigrations patchman-manage migrate --run-syncdb --fake-initial +sqlite3 /var/lib/patchman/db/patchman.db 'PRAGMA journal_mode=WAL;' chown -R apache:apache /var/lib/patchman chcon --type httpd_sys_rw_content_t /var/lib/patchman/db/patchman.db From 687e0f4628f581a1f5e20373e09b78758e51434c Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 26 Feb 2025 00:51:26 -0500 Subject: [PATCH 055/199] simplify osvariant deletion --- operatingsystems/views.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/operatingsystems/views.py b/operatingsystems/views.py index 131c9258..9b9945c5 100644 --- a/operatingsystems/views.py +++ b/operatingsystems/views.py @@ -57,7 +57,7 @@ def osvariant_list(request): except EmptyPage: page = paginator.page(paginator.num_pages) - nohost_osvariants = OSVariant.objects.filter(host__isnull=True).count() >= 1 + nohost_osvariants = OSVariant.objects.filter(host__isnull=True).exists() return render(request, 'operatingsystems/osvariant_list.html', @@ -113,7 +113,7 @@ def osvariant_delete(request, osvariant_id): @login_required def delete_nohost_osvariants(request): - osvariants = list(OSVariant.objects.filter(host__isnull=True)) + osvariants = OSVariant.objects.filter(host__isnull=True) if request.method == 'POST': if 'delete' in request.POST: @@ -121,9 +121,8 @@ def delete_nohost_osvariants(request): text = 'There are no OS Variants with no Hosts' messages.info(request, text) return redirect(reverse('operatingsystems:osvariant_list')) - for osvariant in osvariants: - osvariant.delete() - text = f'{len(osvariants)} OS Variants have been deleted' + text = f'{osvariants.count()} OS Variants have been deleted' + osvariants.delete() messages.info(request, text) return redirect(reverse('operatingsystems:osvariant_list')) elif 'cancel' in request.POST: From 929c4fb30aa3ca963e5a97d419a124e807a1312d Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 26 Feb 2025 00:51:55 -0500 Subject: [PATCH 056/199] add missing f-strings --- reports/models.py | 12 ++++-------- repos/models.py | 2 +- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/reports/models.py b/reports/models.py index abdcb556..816891bd 100644 --- a/reports/models.py +++ b/reports/models.py @@ -242,8 +242,7 @@ def process(self, find_updates=True, verbose=False): host.check_rdns() if verbose: - text = 'Processing report {self.id} - {self.host}' - info_message.send(sender=None, text=text) + info_message.send(sender=None, text=f'Processing report {self.id} - {self.host}') from reports.utils import process_packages, process_repos, process_updates, process_modules with transaction.atomic(): @@ -261,13 +260,10 @@ def process(self, find_updates=True, verbose=False): if find_updates: if verbose: - text = 'Finding updates for report {self.id} - {self.host}' - info_message.send(sender=None, text=text) + info_message.send(sender=None, text=f'Finding updates for report {self.id} - {self.host}') host.find_updates() else: if self.processed: - text = f'Report {self.id} has already been processed' - info_message.send(sender=None, text=text) + info_message.send(sender=None, text=f'Report {self.id} has already been processed') else: - text = 'Error: OS, kernel or arch not sent with report {self.id}' - error_message.send(sender=None, text=text) + error_message.send(sender=None, text=f'Error: OS, kernel or arch not sent with report {self.id}') diff --git a/repos/models.py b/repos/models.py index 43b0e7d2..dd3156e0 100644 --- a/repos/models.py +++ b/repos/models.py @@ -96,7 +96,7 @@ def refresh(self, force=False): elif self.repotype == Repository.GENTOO: refresh_gentoo_repo(self) else: - text = 'Error: unknown repo type for repo {self.id}: {self.repotype}' + text = f'Error: unknown repo type for repo {self.id}: {self.repotype}' error_message.send(sender=None, text=text) else: text = 'Repo requires certificate authentication, not updating' From 8b4b68635b1480725b080723afa660f5334b4b7f Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 26 Feb 2025 11:03:03 -0500 Subject: [PATCH 057/199] add migrations for os and cves/cwes --- .../0007_alter_osrelease_unique_together.py | 17 ++++++++++++++ ...r_cve_description_alter_cwe_description.py | 23 +++++++++++++++++++ security/models.py | 4 ++-- 3 files changed, 42 insertions(+), 2 deletions(-) create mode 100644 operatingsystems/migrations/0007_alter_osrelease_unique_together.py create mode 100644 security/migrations/0003_alter_cve_description_alter_cwe_description.py diff --git a/operatingsystems/migrations/0007_alter_osrelease_unique_together.py b/operatingsystems/migrations/0007_alter_osrelease_unique_together.py new file mode 100644 index 00000000..862b6f06 --- /dev/null +++ b/operatingsystems/migrations/0007_alter_osrelease_unique_together.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.19 on 2025-02-26 16:01 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('operatingsystems', '0006_osrelease_cpe_name_osvariant_codename'), + ] + + operations = [ + migrations.AlterUniqueTogether( + name='osrelease', + unique_together={('name', 'codename', 'cpe_name')}, + ), + ] diff --git a/security/migrations/0003_alter_cve_description_alter_cwe_description.py b/security/migrations/0003_alter_cve_description_alter_cwe_description.py new file mode 100644 index 00000000..0c44a981 --- /dev/null +++ b/security/migrations/0003_alter_cve_description_alter_cwe_description.py @@ -0,0 +1,23 @@ +# Generated by Django 4.2.19 on 2025-02-26 16:01 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('security', '0002_alter_cve_options'), + ] + + operations = [ + migrations.AlterField( + model_name='cve', + name='description', + field=models.TextField(blank=True, default=''), + ), + migrations.AlterField( + model_name='cwe', + name='description', + field=models.TextField(blank=True, default=''), + ), + ] diff --git a/security/models.py b/security/models.py index a957adb9..a4a414e4 100644 --- a/security/models.py +++ b/security/models.py @@ -28,7 +28,7 @@ class CWE(models.Model): cwe_id = models.CharField(max_length=255, unique=True) name = models.CharField(max_length=255, blank=True, null=True) - description = models.CharField(max_length=65535, blank=True, null=True) + description = models.TextField(blank=True, default='') def __str__(self): return f'{self.cwe_id} - {self.name}' @@ -76,7 +76,7 @@ class CVE(models.Model): cve_id = models.CharField(max_length=255, unique=True) title = models.CharField(max_length=255, blank=True, null=True) - description = models.CharField(max_length=65535) + description = models.TextField(blank=True, default='') reserved_date = models.DateTimeField(blank=True, null=True) published_date = models.DateTimeField(blank=True, null=True) rejected_date = models.DateTimeField(blank=True, null=True) From 695705e503df4fb22756a00855f93f3e2ea8c205 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 25 Feb 2025 23:22:43 -0500 Subject: [PATCH 058/199] create db cleaning celery task --- arch/utils.py | 52 ++++++++++ etc/patchman/local_settings.py | 6 +- modules/utils.py | 17 +++- packages/utils.py | 52 +++++++++- repos/utils.py | 18 +++- sbin/patchman | 178 ++++----------------------------- util/tasks.py | 36 +++++++ 7 files changed, 195 insertions(+), 164 deletions(-) create mode 100644 arch/utils.py create mode 100644 util/tasks.py diff --git a/arch/utils.py b/arch/utils.py new file mode 100644 index 00000000..1498fdec --- /dev/null +++ b/arch/utils.py @@ -0,0 +1,52 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from arch.models import PackageArchitecture, MachineArchitecture +from patchman.signals import info_message + + +def clean_package_architectures(): + """ Remove package architectures that are no longer in use + """ + parches = PackageArchitecture.objects.filter(package__isnull=True) + plen = parches.count() + if plen == 0: + info_message.send(sender=None, text='No orphaned PackageArchitectures found.') + else: + info_message.send(sender=None, text=f'Removing {plen} orphaned PackageArchitectures') + parches.delete() + + +def clean_machine_architectures(): + """ Remove machine architectures that are no longer in use + """ + marches = MachineArchitecture.objects.filter( + host__isnull=True, + repository__isnull=True, + ) + mlen = marches.count() + if mlen == 0: + info_message.send(sender=None, text='No orphaned MachineArchitectures found.') + else: + info_message.send(sender=None, text=f'Removing {mlen} orphaned MachineArchitectures') + marches.delete() + + +def clean_architectures(): + """ Remove architectures that are no longer in use + """ + clean_package_architectures() + clean_machine_architectures() diff --git a/etc/patchman/local_settings.py b/etc/patchman/local_settings.py index 7fb660a2..5a0c4985 100644 --- a/etc/patchman/local_settings.py +++ b/etc/patchman/local_settings.py @@ -62,10 +62,14 @@ }, 'refresh_repos_daily': { 'task': 'repos.tasks.refresh_repos', - 'schedule': crontab(hour=6, minute=00), + 'schedule': crontab(hour=4, minute=00), }, 'update_errata_cves_cwes_every_12_hours': { 'task': 'errata.tasks.update_errata', 'schedule': timedelta(hours=12), }, + 'run_database_maintenance_daily': { + 'task': 'util.tasks.clean_database', + 'schedule': crontab(hour=6, minute=00), + }, } diff --git a/modules/utils.py b/modules/utils.py index 020052b3..53eb236d 100644 --- a/modules/utils.py +++ b/modules/utils.py @@ -15,7 +15,7 @@ # along with Patchman. If not, see from django.db import IntegrityError, DatabaseError, transaction -from patchman.signals import error_message +from patchman.signals import error_message, info_message from modules.models import Module from arch.models import PackageArchitecture @@ -67,3 +67,18 @@ def get_matching_modules(name, stream, version, context, arch): arch=m_arch, ) return modules + + +def clean_modules(): + """ Delete modules that have no host or no repo + """ + modules = Module.objects.filter( + host__isnull=True, + repo__isnull=True, + ) + mlen = modules.count() + if mlen == 0: + info_message.send(sender=None, text='No orphaned Modules found.') + else: + info_message.send(sender=None, text=f'{mlen} orphaned Modules found.') + modules.delete() diff --git a/packages/utils.py b/packages/utils.py index 6dc01241..2b906118 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -22,7 +22,7 @@ from arch.models import PackageArchitecture from packages.models import PackageName, Package, PackageUpdate, PackageCategory, PackageString -from patchman.signals import error_message +from patchman.signals import error_message, info_message def convert_package_to_packagestring(package): @@ -285,3 +285,53 @@ def get_matching_packages(name, epoch, version, release, p_type): packagetype=p_type, ) return packages + + +def clean_packageupdates(): + """ Removes PackageUpdate objects that are no longer linked to any hosts + """ + package_updates = list(PackageUpdate.objects.all()) + for update in package_updates: + if update.host_set.count() == 0: + text = f'Removing unused PackageUpdate {update}' + info_message.send(sender=None, text=text) + update.delete() + for duplicate in package_updates: + if update.oldpackage == duplicate.oldpackage and update.newpackage == duplicate.newpackage and \ + update.security == duplicate.security and update.id != duplicate.id: + text = f'Removing duplicate PackageUpdate: {update}' + info_message.send(sender=None, text=text) + for host in duplicate.host_set.all(): + host.updates.remove(duplicate) + host.updates.add(update) + host.save() + duplicate.delete() + + +def clean_packages(): + """ Remove packages that are no longer in use + """ + packages = Package.objects.filter( + mirror__isnull=True, + host__isnull=True, + erratum__isnull=True, + module__isnull=True, + ) + plen = packages.count() + if plen == 0: + info_message.send(sender=None, text='No orphaned Packages found.') + else: + info_message.send(sender=None, text=f'Removing {plen} orphaned Packages') + packages.delete() + + +def clean_packagenames(): + """ Remove package names that are no longer in use + """ + names = PackageName.objects.filter(package__isnull=True) + nlen = names.count() + if nlen == 0: + info_message.send(sender=None, text='No orphaned PackageNames found.') + else: + info_message.send(sender=None, text=f'Removing {nlen} orphaned PackageNames') + names.delete() diff --git a/repos/utils.py b/repos/utils.py index 6148b82d..a832312e 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -35,12 +35,11 @@ from django.db import IntegrityError, DatabaseError, transaction from django.db.models import Q +from arch.models import PackageArchitecture from packages.models import Package, PackageString from packages.utils import parse_package_string, get_or_create_package, find_evr, \ convert_package_to_packagestring, convert_packagestring_to_package -from arch.models import PackageArchitecture -from util import get_url, download_url, response_is_valid, extract, \ - get_checksum, Checksum, has_setting_of_type +from util import get_url, download_url, response_is_valid, extract, get_checksum, Checksum, has_setting_of_type from patchman.signals import progress_info_s, progress_update_s, \ info_message, warning_message, error_message, debug_message @@ -992,3 +991,16 @@ def get_max_mirrors(): else: max_mirrors = 5 return max_mirrors + + +def clean_repos(): + """ Remove repositories that contain no mirrors + """ + from repos.models import Repository + repos = Repository.objects.filter(mirror__isnull=True) + rlen = repos.count() + if rlen == 0: + info_message.send(sender=None, text='No Repositories with zero Mirrors found.') + else: + info_message.send(sender=None, text=f'Removing {rlen} empty Repos') + repos.delete() diff --git a/sbin/patchman b/sbin/patchman index 28bce4ad..91020c88 100755 --- a/sbin/patchman +++ b/sbin/patchman @@ -20,29 +20,27 @@ import os import sys import argparse +from datetime import date, datetime -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'patchman.settings') -from django.conf import settings from django.core.exceptions import MultipleObjectsReturned from django.db.models import Count from django import setup as django_setup +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'patchman.settings') +from django.conf import settings # noqa django_setup() -from datetime import date, datetime -from taggit.models import TaggedItem - -from hosts.models import Host -from packages.models import Package, PackageName, PackageUpdate +from arch.utils import clean_architectures from errata.utils import update_errata, mark_errata_security_updates -from repos.models import Repository +from hosts.models import Host from modules.models import Module -from arch.models import PackageArchitecture, MachineArchitecture +from packages.utils import clean_packages, clean_packageupdates, clean_packagenames +from repos.models import Repository +from repos.utils import clean_repos from reports.models import Report from security.utils import update_cves, update_cwes from util import print_nocr, create_pbar, update_pbar, set_verbosity, get_verbosity, tz_aware_datetime -from patchman.signals import info_message, warning_message, error_message, debug_message, progress_info_s, \ - progress_update_s +from patchman.signals import info_message def get_host(host=None, action='Performing action'): @@ -140,93 +138,6 @@ def list_hosts(hosts=None): host.show() -def clean_packages(): - """ Remove packages that are no longer in use - """ - packages = Package.objects.filter(mirror__isnull=True, - host__isnull=True, - erratum__isnull=True, - module__isnull=True) - plen = packages.count() - if plen == 0: - info_message.send(sender=None, text='No orphaned Packages found.') - else: - create_pbar(f'Removing {plen} orphaned Packages:', plen) - for i, o in enumerate(packages): - p = Package.objects.get(name=o.name, - epoch=o.epoch, - version=o.version, - release=o.release, - arch=o.arch, - packagetype=o.packagetype) - p.delete() - update_pbar(i + 1) - - -def clean_arches(): - """ Remove architectures that are no longer in use - """ - parches = PackageArchitecture.objects.filter(package__isnull=True) - plen = parches.count() - - if plen == 0: - text = 'No orphaned Package Architectures found.' - info_message.send(sender=None, text=text) - else: - create_pbar(f'Removing {plen} orphaned P Arches:', plen) - for i, p in enumerate(parches): - a = PackageArchitecture.objects.get(name=p.name) - a.delete() - update_pbar(i + 1) - - marches = MachineArchitecture.objects.filter( - host__isnull=True, - repository__isnull=True, - ) - mlen = marches.count() - - if mlen == 0: - text = 'No orphaned Machine Architectures found.' - info_message.send(sender=None, text=text) - else: - create_pbar(f'Removing {mlen} orphaned M Arches:', mlen) - for i, m in enumerate(marches): - a = MachineArchitecture.objects.get(name=m.name) - a.delete() - update_pbar(i + 1) - - -def clean_package_names(): - """ Remove package names that are no longer in use - """ - names = PackageName.objects.filter(package__isnull=True) - nlen = names.count() - - if nlen == 0: - info_message.send(sender=None, text='No orphaned Package names found.') - else: - create_pbar(f'Removing {nlen} unused Package names:', nlen) - for i, packagename in enumerate(names): - packagename.delete() - update_pbar(i + 1) - - -def clean_repos(): - """ Remove repositories that contain no mirrors - """ - repos = Repository.objects.filter(mirror__isnull=True) - rlen = repos.count() - - if rlen == 0: - text = 'No Repositories with zero Mirrors found.' - info_message.send(sender=None, text=text) - else: - create_pbar(f'Removing {rlen} empty Repos:', rlen) - for i, repo in enumerate(repos): - repo.delete() - update_pbar(i + 1) - - def clean_reports(s_host=None): """ Delete old reports for all hosts, specify host for a single host. Reports with non existent hosts are only removed when no host is @@ -243,8 +154,7 @@ def clean_reports(s_host=None): reports = Report.objects.filter(accessed__lt=timestamp) rlen = reports.count() if rlen != 0: - create_pbar(f'Removing {rlen} extraneous Reports:', - rlen) + create_pbar(f'Removing {rlen} extraneous Reports:', rlen) for i, report in enumerate(reports): report.delete() update_pbar(i + 1) @@ -266,28 +176,6 @@ def clean_modules(): update_pbar(i + 1) -def clean_tags(): - """ Delete unused tags - """ - tagged_items = list(TaggedItem.objects.all()) - to_delete = [] - - for t in tagged_items: - hostid = t.object_id - try: - # tags are only used for hosts for now - Host.objects.get(pk=hostid) - except Host.DoesNotExist: - to_delete.append(t) - - tlen = len(to_delete) - if tlen != 0: - create_pbar(f'Removing {tlen} unused tagged items', tlen) - for i, t in enumerate(to_delete): - t.delete() - update_pbar(i + 1) - - def host_updates_alt(host=None): """ Find updates for all hosts, specify host for a single host """ @@ -428,11 +316,11 @@ def toggle_host_hro(hosts=None, host_repos_only=True): """ Toggle host_repos_only for a host or matching pattern of hosts """ if host_repos_only: - toggle = "Setting" + toggle = 'Setting' else: - toggle = "Unsetting" + toggle = 'Unsetting' if hosts: - matching_hosts = get_hosts(hosts, f"{toggle} host_repos_only") + matching_hosts = get_hosts(hosts, f'{toggle} host_repos_only') for host in matching_hosts: info_message.send(sender=None, text=str(host)) host.host_repos_only = host_repos_only @@ -443,11 +331,11 @@ def toggle_host_check_dns(hosts=None, check_dns=True): """ Toggle check_dns for a host or matching pattern of hosts """ if check_dns: - toggle = "Setting" + toggle = 'Setting' else: - toggle = "Unsetting" + toggle = 'Unsetting' if hosts: - matching_hosts = get_hosts(hosts, f"{toggle} check_dns") + matching_hosts = get_hosts(hosts, f'{toggle} check_dns') for host in matching_hosts: info_message.send(sender=None, text=str(host)) host.check_dns = check_dns @@ -490,42 +378,16 @@ def process_reports(host=None, force=False): report.process(find_updates=False) -def clean_updates(): - """ Removes PackageUpdate objects that are no longer - linked to any hosts - """ - package_updates = list(PackageUpdate.objects.all()) - - for update in package_updates: - if update.host_set.count() == 0: - text = f'Removing unused update {update}' - info_message.send(sender=None, text=text) - update.delete() - for duplicate in package_updates: - if update.oldpackage == duplicate.oldpackage and \ - update.newpackage == duplicate.newpackage and \ - update.security == duplicate.security and \ - update.id != duplicate.id: - text = f'Removing duplicate update: {update}' - info_message.send(sender=None, text=text) - for host in duplicate.host_set.all(): - host.updates.remove(duplicate) - host.updates.add(update) - host.save() - duplicate.delete() - - def dbcheck(): """ Runs all clean_* functions to check database consistency """ - clean_updates() + clean_packageupdates() clean_packages() - clean_package_names() - clean_arches() + clean_packagenames() + clean_architectures() clean_repos() clean_modules() - clean_updates() - clean_tags() + clean_packageupdates() def collect_args(): diff --git a/util/tasks.py b/util/tasks.py new file mode 100644 index 00000000..50c849d0 --- /dev/null +++ b/util/tasks.py @@ -0,0 +1,36 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from celery import shared_task + +from arch.utils import clean_architectures +from modules.utils import clean_modules +from packages.utils import clean_packages, clean_packageupdates, clean_packagenames +from repos.utils import clean_repos + + +@shared_task +def clean_database(): + """ Task to check the database and remove orphaned objects + Runs all clean_* functions to check database consistency + """ + clean_packageupdates() + clean_packages() + clean_packagenames() + clean_architectures() + clean_repos() + clean_modules() + clean_packageupdates() From 4c0f7431ab57ff9b06466de61eb5065169e2cfc4 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 26 Feb 2025 11:10:35 -0500 Subject: [PATCH 059/199] ubuntu errata concurrent processing --- errata/sources/distros/ubuntu.py | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/errata/sources/distros/ubuntu.py b/errata/sources/distros/ubuntu.py index 3dbc97d2..ea2684ee 100644 --- a/errata/sources/distros/ubuntu.py +++ b/errata/sources/distros/ubuntu.py @@ -14,6 +14,7 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see +import concurrent.futures import csv import os import json @@ -70,13 +71,24 @@ def parse_usn_data(data): advisories = json.loads(data) accepted_releases = get_accepted_ubuntu_codenames() elen = len(advisories) - ptext = f'Processing {elen} Errata:' + ptext = f'Processing {elen} Ubuntu Errata:' progress_info_s.send(sender=None, ptext=ptext, plen=elen) - for i, (usn_id, advisory) in enumerate(advisories.items()): - progress_update_s.send(sender=None, index=i + 1) + i = 0 + with concurrent.futures.ProcessPoolExecutor(max_workers=20) as executor: + futures = [executor.submit(process_usn, usn_id, advisory, accepted_releases) for usn_id, advisory in advisories.items()] + for future in concurrent.futures.as_completed(futures): + i += 1 + progress_update_s.send(sender=None, index=i + 1) + + +def process_usn(usn_id, advisory, accepted_releases): + """ Process a single USN advisory + """ + from errata.utils import get_or_create_erratum + try: affected_releases = advisory.get('releases', {}).keys() if not release_is_affected(affected_releases, accepted_releases): - continue + return name = f'USN-{usn_id}' issue_date = int(advisory.get('timestamp')) synopsis = advisory.get('title') @@ -89,6 +101,8 @@ def parse_usn_data(data): add_ubuntu_erratum_osreleases(e, affected_releases, accepted_releases) add_ubuntu_erratum_references(e, usn_id, advisory) add_ubuntu_erratum_packages(e, advisory) + except Exception as ex: + print(ex) def add_ubuntu_erratum_osreleases(e, affected_releases, accepted_releases): From 4f628791e783dd5c017011d7946fea19b60f8368 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Thu, 13 Feb 2025 10:00:57 -0500 Subject: [PATCH 060/199] concurrrent arch linux errata processing --- errata/sources/distros/arch.py | 34 ++++++++++++++++++++++++++++++---- 1 file changed, 30 insertions(+), 4 deletions(-) diff --git a/errata/sources/distros/arch.py b/errata/sources/distros/arch.py index 4c8f6a4b..dca77880 100644 --- a/errata/sources/distros/arch.py +++ b/errata/sources/distros/arch.py @@ -14,12 +14,14 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see +import concurrent.futures import json from operatingsystems.models import OSRelease, OSVariant from packages.models import Package from packages.utils import find_evr, get_matching_packages from util import get_url, download_url +from patchman.signals import error_message, progress_info_s, progress_update_s def update_arch_errata(): @@ -28,7 +30,7 @@ def update_arch_errata(): """ add_arch_linux_osrelease() advisories = download_arch_errata() - parse_arch_errata(advisories) + parse_arch_errata_concurrently(advisories) def download_arch_errata(): @@ -43,9 +45,31 @@ def download_arch_errata(): def parse_arch_errata(advisories): """ Parse Arch Linux Errata Advisories """ - from errata.utils import get_or_create_erratum osrelease = OSRelease.objects.get(name='Arch Linux') for advisory in advisories: + process_arch_erratum(advisory, osrelease) + + +def parse_arch_errata_concurrently(advisories): + """ Parse Arch Linux Errata Advisories + """ + osrelease = OSRelease.objects.get(name='Arch Linux') + elen = len(advisories) + ptext = 'Processing Arch Linux Advisories:' + progress_info_s.send(sender=None, ptext=ptext, plen=elen) + i = 0 + with concurrent.futures.ProcessPoolExecutor(max_workers=3) as executor: + futures = [executor.submit(process_arch_erratum, advisory, osrelease) for advisory in advisories] + for future in concurrent.futures.as_completed(futures): + i += 1 + progress_update_s.send(sender=None, index=i + 1) + + +def process_arch_erratum(advisory, osrelease): + """ Process a single Arch Linux Erratum + """ + from errata.utils import get_or_create_erratum + try: name = advisory.get('name') issue_date = advisory.get('date') package = advisory.get('package') @@ -60,6 +84,8 @@ def parse_arch_errata(advisories): e.osreleases.add(osrelease) add_arch_erratum_references(e, advisory) add_arch_erratum_packages(e, advisory) + except Exception as exc: + error_message.send(sender=None, text=exc) def add_arch_linux_osrelease(): @@ -82,8 +108,8 @@ def add_arch_erratum_references(e, advisory): e.add_reference('ASA', url) raw_url = f'{url}/raw' res = get_url(raw_url) - raw_data = download_url(res, f'Downloading Arch Linux Erratum Reference: {raw_url}') - parse_arch_erratum_raw(e, raw_data.decode()) + data = res.content + parse_arch_erratum_raw(e, data.decode()) def parse_arch_erratum_raw(e, data): From b88fc61b2371179c044edf40b0fbdc841703552d Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 26 Feb 2025 17:56:39 -0500 Subject: [PATCH 061/199] add helper function to get settings --- errata/sources/distros/alma.py | 12 ++++++------ errata/sources/distros/centos.py | 12 ++++++------ errata/sources/distros/debian.py | 14 +++++++------- errata/sources/distros/ubuntu.py | 17 ++++++++--------- hosts/templatetags/report_alert.py | 12 ++++++------ repos/models.py | 12 ++++++------ repos/utils.py | 12 ++++++------ util/__init__.py | 12 ++++++++++++ util/templatetags/common.py | 12 ++++++------ util/views.py | 12 ++++++------ 10 files changed, 69 insertions(+), 58 deletions(-) diff --git a/errata/sources/distros/alma.py b/errata/sources/distros/alma.py index 6d7d6ed0..a3c85b6f 100644 --- a/errata/sources/distros/alma.py +++ b/errata/sources/distros/alma.py @@ -17,11 +17,10 @@ import json from django.db import transaction -from django.conf import settings from packages.models import Package from packages.utils import get_or_create_package, parse_package_string -from util import get_url, download_url, has_setting_of_type +from util import get_url, download_url, get_setting_of_type from patchman.signals import progress_info_s, progress_update_s @@ -32,10 +31,11 @@ def update_alma_errata(): and process advisories """ default_alma_releases = [8, 9] - if has_setting_of_type('ALMA_RELEASES', list): - alma_releases = settings.ALMA_RELEASES - else: - alma_releases = default_alma_releases + alma_releases = get_setting_of_type( + setting_name='ALMA_RELEASES', + setting_type=list, + default=default_alma_releases, + ) for release in alma_releases: advisories = download_alma_advisories(release) process_alma_errata(release, advisories) diff --git a/errata/sources/distros/centos.py b/errata/sources/distros/centos.py index 43f73556..aca5a48c 100644 --- a/errata/sources/distros/centos.py +++ b/errata/sources/distros/centos.py @@ -17,13 +17,12 @@ import re from lxml import etree -from django.conf import settings from django.db import transaction from packages.models import Package from packages.utils import parse_package_string, get_or_create_package from patchman.signals import error_message, progress_info_s, progress_update_s -from util import bunzip2, get_url, download_url, get_sha1, has_setting_of_type +from util import bunzip2, get_url, download_url, get_sha1, get_setting_of_type def update_centos_errata(): @@ -152,10 +151,11 @@ def accepted_centos_release(releases): """ Check if we accept the releases that the erratum pertains to If any release is accepted we return True, else False """ - if has_setting_of_type('MIN_CENTOS_RELEASE', int): - min_release = settings.MIN_CENTOS_RELEASE - else: - min_release = 7 + min_release = get_setting_of_type( + setting_name='MIN_CENTOS_RELEASE', + setting_type=int, + default=7, + ) acceptable_release = False for release in releases: if int(release) >= min_release: diff --git a/errata/sources/distros/debian.py b/errata/sources/distros/debian.py index 60d03ae0..322fda63 100644 --- a/errata/sources/distros/debian.py +++ b/errata/sources/distros/debian.py @@ -20,14 +20,13 @@ from debian.deb822 import Dsc from io import StringIO -from django.conf import settings from django.db.utils import IntegrityError from operatingsystems.models import OSRelease from packages.models import Package from packages.utils import get_or_create_package, find_evr -from util import get_url, download_url, has_setting_of_type -from patchman.signals import progress_info_s, progress_update_s +from util import get_url, download_url, get_setting_of_type +from patchman.signals import error_message, progress_info_s, progress_update_s def update_debian_errata(): @@ -178,10 +177,11 @@ def get_accepted_debian_codenames(): Can be overridden by specifying DEBIAN_CODENAMES in settings """ default_codenames = ['bookworm', 'bullseye'] - if has_setting_of_type('DEBIAN_CODENAMES', list): - accepted_codenames = settings.DEBIAN_CODENAMES - else: - accepted_codenames = default_codenames + accepted_codenames = get_setting_of_type( + setting_name='DEBIAN_CODENAMES', + setting_type=list, + default_value=default_codenames, + ) return accepted_codenames diff --git a/errata/sources/distros/ubuntu.py b/errata/sources/distros/ubuntu.py index ea2684ee..55597951 100644 --- a/errata/sources/distros/ubuntu.py +++ b/errata/sources/distros/ubuntu.py @@ -21,13 +21,11 @@ from io import StringIO from urllib.parse import urlparse -from django.conf import settings - from operatingsystems.models import OSRelease, OSVariant from packages.models import Package, PackageName from packages.utils import get_or_create_package, parse_package_string, find_evr -from patchman.signals import progress_info_s, progress_update_s, error_message -from util import get_url, download_url, get_sha256, bunzip2, has_setting_of_type +from util import get_url, download_url, get_sha256, bunzip2, get_setting_of_type +from patchman.signals import error_message, progress_info_s, progress_update_s def update_ubuntu_errata(): @@ -181,11 +179,12 @@ def get_accepted_ubuntu_codenames(): """ Get acceptable Ubuntu OS codenames Can be overridden by specifying UBUNTU_CODENAMES in settings """ - default_codenames = ['bionic', 'focal', 'jammy', 'noble'] - if has_setting_of_type('UBUNTU_CODENAMES', list): - accepted_codenames = settings.UBUNTU_CODENAMES - else: - accepted_codenames = default_codenames + default_codenames = ['focal', 'jammy', 'noble'] + accepted_codenames = get_setting_of_type( + setting_name='UBUNTU_CODENAMES', + setting_type=list, + default_value=default_codenames, + ) return accepted_codenames diff --git a/hosts/templatetags/report_alert.py b/hosts/templatetags/report_alert.py index b005a056..3a3e3a9a 100644 --- a/hosts/templatetags/report_alert.py +++ b/hosts/templatetags/report_alert.py @@ -17,13 +17,12 @@ from datetime import timedelta -from django.conf import settings from django.template import Library from django.utils.html import format_html from django.templatetags.static import static from django.utils import timezone -from util import has_setting_of_type +from util import get_setting_of_type register = Library() @@ -32,10 +31,11 @@ def report_alert(lastreport): html = '' alert_icon = static('img/icon-alert.gif') - if has_setting_of_type('DAYS_WITHOUT_REPORT', int): - days = settings.DAYS_WITHOUT_REPORT - else: - days = 14 + days = get_setting_of_type( + setting_name='DAYS_WITHOUT_REPORT', + setting_type=int, + default=14, + ) if lastreport < (timezone.now() - timedelta(days=days)): html = f'Outdated Report' return format_html(html) diff --git a/repos/models.py b/repos/models.py index dd3156e0..f00db4bd 100644 --- a/repos/models.py +++ b/repos/models.py @@ -15,13 +15,12 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -from django.conf import settings from django.db import models from django.urls import reverse from arch.models import MachineArchitecture from packages.models import Package -from util import has_setting_of_type +from util import get_setting_of_type from repos.utils import refresh_deb_repo, refresh_rpm_repo, refresh_arch_repo, refresh_gentoo_repo, \ update_mirror_packages @@ -169,10 +168,11 @@ def fail(self): text = f'No usable mirror found at {self.url}' error_message.send(sender=None, text=text) default_max_mirror_failures = 28 - if has_setting_of_type('MAX_MIRROR_FAILURES', int): - max_mirror_failures = settings.MAX_MIRROR_FAILURES - else: - max_mirror_failures = default_max_mirror_failures + max_mirror_failures = get_setting_of_type( + setting_name='MAX_MIRROR_FAILURES', + setting_type=int, + default=default_max_mirror_failures + ) self.fail_count = self.fail_count + 1 if max_mirror_failures == -1: text = f'Mirror has failed {self.fail_count} times, but MAX_MIRROR_FAILURES=-1, not disabling refresh' diff --git a/repos/utils.py b/repos/utils.py index a832312e..c95e1127 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -31,7 +31,6 @@ from fnmatch import fnmatch from tenacity import RetryError -from django.conf import settings from django.db import IntegrityError, DatabaseError, transaction from django.db.models import Q @@ -39,7 +38,7 @@ from packages.models import Package, PackageString from packages.utils import parse_package_string, get_or_create_package, find_evr, \ convert_package_to_packagestring, convert_packagestring_to_package -from util import get_url, download_url, response_is_valid, extract, get_checksum, Checksum, has_setting_of_type +from util import get_url, download_url, response_is_valid, extract, get_checksum, Checksum, get_setting_of_type from patchman.signals import progress_info_s, progress_update_s, \ info_message, warning_message, error_message, debug_message @@ -986,10 +985,11 @@ def find_best_repo(package, hostrepos): def get_max_mirrors(): """ Find the max number of mirrors for refresh """ - if has_setting_of_type('MAX_MIRRORS', int): - max_mirrors = settings.MAX_MIRRORS - else: - max_mirrors = 5 + max_mirrors = get_setting_of_type( + setting_name='MAX_MIRRORS', + setting_type=int, + default=5, + ) return max_mirrors diff --git a/util/__init__.py b/util/__init__.py index 3855ad7a..f3bf549a 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -170,6 +170,18 @@ def has_setting_of_type(setting_name, expected_type): return isinstance(setting_value, expected_type) +def get_setting_of_type(setting_name, setting_type, default): + """ Checks if the Django settings module has the specified attribute + and if it is of the expected type + Returns the value if the setting exists and is of the expected type, default otherwise. + """ + if has_setting_of_type(setting_name, setting_type): + setting_value = getattr(settings, setting_name) + return setting_value + else: + return default + + def gunzip(contents): """ gunzip contents in memory and return the data """ diff --git a/util/templatetags/common.py b/util/templatetags/common.py index 497c4714..ad441df9 100644 --- a/util/templatetags/common.py +++ b/util/templatetags/common.py @@ -22,14 +22,13 @@ from datetime import datetime, timedelta from urllib.parse import urlencode -from django.conf import settings from django.template import Library from django.template.loader import get_template from django.utils.html import format_html from django.templatetags.static import static from django.core.paginator import Paginator -from util import has_setting_of_type +from util import get_setting_of_type register = Library() @@ -103,8 +102,9 @@ def searchform(terms): @register.simple_tag def reports_timedelta(): - if has_setting_of_type('DAYS_WITHOUT_REPORT', int): - days = settings.DAYS_WITHOUT_REPORT - else: - days = 14 + days = get_setting_of_type( + setting_name='DAYS_WITHOUT_REPORT', + setting_type=int, + default=14, + ) return naturaltime(datetime.now() - timedelta(days=days)) diff --git a/util/views.py b/util/views.py index bdc5687c..25567272 100644 --- a/util/views.py +++ b/util/views.py @@ -17,7 +17,6 @@ from datetime import datetime, timedelta -from django.conf import settings from django.shortcuts import render from django.contrib.auth.decorators import login_required from django.contrib.sites.models import Site @@ -28,7 +27,7 @@ from repos.models import Repository, Mirror from packages.models import Package from reports.models import Report -from util import has_setting_of_type +from util import get_setting_of_type @login_required @@ -46,10 +45,11 @@ def dashboard(request): packages = Package.objects.all() # host issues - if has_setting_of_type('DAYS_WITHOUT_REPORT', int): - days = settings.DAYS_WITHOUT_REPORT - else: - days = 14 + days = get_setting_of_type( + setting_name='DAYS_WITHOUT_REPORT', + setting_type=int, + default=14, + ) last_report_delta = datetime.now() - timedelta(days=days) stale_hosts = hosts.filter(lastreport__lt=last_report_delta) norepo_hosts = hosts.filter(repos__isnull=True, osvariant__osrelease__repos__isnull=True) # noqa From fdd50b918258f79d9d1272c8d647cb7a20b206fd Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 26 Feb 2025 17:59:08 -0500 Subject: [PATCH 062/199] celery task updates --- errata/tasks.py | 46 +++++++++++++++++++++++++++------- errata/utils.py | 39 +--------------------------- etc/patchman/local_settings.py | 2 +- sbin/patchman | 3 ++- security/tasks.py | 4 +-- 5 files changed, 43 insertions(+), 51 deletions(-) diff --git a/errata/tasks.py b/errata/tasks.py index 438c33e0..7c962f6e 100644 --- a/errata/tasks.py +++ b/errata/tasks.py @@ -16,23 +16,51 @@ from celery import shared_task -from errata.models import Erratum from security.tasks import update_cves, update_cwes +from util import get_setting_of_type +from errata.sources.distros.arch import update_arch_errata +from errata.sources.distros.alma import update_alma_errata +from errata.sources.distros.debian import update_debian_errata +from errata.sources.distros.centos import update_centos_errata +from errata.sources.distros.rocky import update_rocky_errata +from errata.sources.distros.ubuntu import update_ubuntu_errata -@shared_task -def update_erratum(erratum_id): - """ Task to update an erratum +def update_errata(): + """ Update all distros errata """ - erratum = Erratum.objects.get(id=erratum_id) - erratum.update() + errata_os_updates = get_setting_of_type( + setting_name='ERRATA_OS_UPDATES', + setting_type=list, + default=['rocky', 'alma', 'arch', 'ubuntu', 'debian', 'rhel', 'suse', 'amazon'], + ) +# if 'arch' in errata_os_updates: +# update_arch_errata() +# if 'alma' in errata_os_updates: +# update_alma_errata() + if 'rocky' in errata_os_updates: + update_rocky_errata() + if 'debian' in errata_os_updates: + update_debian_errata() + if 'ubuntu' in errata_os_updates: + update_ubuntu_errata() + if 'rhel' in errata_os_updates: + # update_rhel_errata() + pass + if 'suse' in errata_os_updates: + # update_suse_errata() + pass + if 'amazon' in errata_os_updates: + # update_amazon_errata() + pass + if 'centos' in errata_os_updates: + update_centos_errata() @shared_task -def update_errata(): +def update_errata_and_cves(): """ Task to update all errata """ - for e in Erratum.objects.all(): - update_erratum.delay(e.id) + update_errata.delay() update_cves.delay() update_cwes.delay() diff --git a/errata/utils.py b/errata/utils.py index 20e5f742..94d9fcef 100644 --- a/errata/utils.py +++ b/errata/utils.py @@ -16,50 +16,13 @@ from urllib.parse import urlparse -from django.conf import settings from django.db import transaction -from util import tz_aware_datetime, has_setting_of_type +from util import tz_aware_datetime from errata.models import Erratum -from errata.sources.distros.arch import update_arch_errata -from errata.sources.distros.alma import update_alma_errata -from errata.sources.distros.debian import update_debian_errata -from errata.sources.distros.centos import update_centos_errata -from errata.sources.distros.rocky import update_rocky_errata -from errata.sources.distros.ubuntu import update_ubuntu_errata from patchman.signals import progress_info_s, progress_update_s -def update_errata(): - """ Update all distros errata - """ - if has_setting_of_type('ERRATA_OS_UPDATES', list): - errata_os_updates = settings.ERRATA_OS_UPDATES - else: - errata_os_updates = ['rocky', 'alma', 'arch', 'ubuntu', 'debian', 'rhel', 'suse', 'amazon'] - if 'arch' in errata_os_updates: - update_arch_errata() - if 'alma' in errata_os_updates: - update_alma_errata() - if 'rocky' in errata_os_updates: - update_rocky_errata() - if 'debian' in errata_os_updates: - update_debian_errata() - if 'ubuntu' in errata_os_updates: - update_ubuntu_errata() - if 'rhel' in errata_os_updates: - # update_rhel_errata() - pass - if 'suse' in errata_os_updates: - # update_suse_errata() - pass - if 'amazon' in errata_os_updates: - # update_amazon_errata() - pass - if 'centos' in errata_os_updates: - update_centos_errata() - - def get_or_create_erratum(name, e_type, issue_date, synopsis): """ Get or create an Erratum object. Returns the object and created """ diff --git a/etc/patchman/local_settings.py b/etc/patchman/local_settings.py index 5a0c4985..74a696a1 100644 --- a/etc/patchman/local_settings.py +++ b/etc/patchman/local_settings.py @@ -65,7 +65,7 @@ 'schedule': crontab(hour=4, minute=00), }, 'update_errata_cves_cwes_every_12_hours': { - 'task': 'errata.tasks.update_errata', + 'task': 'errata.tasks.update_errata_and_cves', 'schedule': timedelta(hours=12), }, 'run_database_maintenance_daily': { diff --git a/sbin/patchman b/sbin/patchman index 91020c88..edb169d0 100755 --- a/sbin/patchman +++ b/sbin/patchman @@ -31,7 +31,8 @@ from django.conf import settings # noqa django_setup() from arch.utils import clean_architectures -from errata.utils import update_errata, mark_errata_security_updates +from errata.utils import mark_errata_security_updates +from errata.tasks import update_errata from hosts.models import Host from modules.models import Module from packages.utils import clean_packages, clean_packageupdates, clean_packagenames diff --git a/security/tasks.py b/security/tasks.py index d99b9579..9250a4ae 100644 --- a/security/tasks.py +++ b/security/tasks.py @@ -24,7 +24,7 @@ def update_cve(cve_id): """ Task to update a CVE """ cve = CVE.objects.get(id=cve_id) - cve.update() + cve.download_cve_data() @shared_task @@ -40,7 +40,7 @@ def update_cwe(cwe_id): """ Task to update a CWE """ cwe = CWE.objects.get(id=cwe_id) - cwe.update() + cwe.download_cwe_data() @shared_task From 52cd127ebb7593577dfdc19374a501b715b21989 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 26 Feb 2025 17:59:53 -0500 Subject: [PATCH 063/199] update manage.py to match upstream --- manage.py | 30 ++++++++++++++---------------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/manage.py b/manage.py index 47e0d31c..19bd04b4 100755 --- a/manage.py +++ b/manage.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2019-2021 Marcus Furlong +# Copyright 2019-2025 Marcus Furlong # # This file is part of Patchman. # @@ -19,21 +19,19 @@ import os import sys -if __name__ == '__main__': - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "patchman.settings") + +def main(): + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'patchman.settings') try: from django.core.management import execute_from_command_line - except ImportError: - # The above import may fail for some other reason. Ensure that the - # issue is really that Django is missing to avoid masking other - # exceptions on Python 2. - try: - import django # noqa - except ImportError: - raise ImportError( - "Couldn't import Django. Are you sure it's installed and " - "available on your PYTHONPATH environment variable? Did you " - "forget to activate a virtual environment?" - ) - raise + except ImportError as exc: + raise ImportError( + 'Could not import Django. Are you sure it is installed and ' + 'available on your PYTHONPATH environment variable? Did you ' + 'forget to activate a virtual environment?' + ) from exc execute_from_command_line(sys.argv) + + +if __name__ == '__main__': + main() From c910a2c927cdbd839240d594263eed078c4feabf Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 26 Feb 2025 18:01:36 -0500 Subject: [PATCH 064/199] use BEGIN IMMEDIATE on sqlite3 --- etc/patchman/local_settings.py | 6 +++++- patchman/sqlite3/base.py | 11 +++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 patchman/sqlite3/base.py diff --git a/etc/patchman/local_settings.py b/etc/patchman/local_settings.py index 74a696a1..ab9ceb1b 100644 --- a/etc/patchman/local_settings.py +++ b/etc/patchman/local_settings.py @@ -8,8 +8,12 @@ DATABASES = { 'default': { - 'ENGINE': 'django.db.backends.sqlite3', +# 'ENGINE': 'django.db.backends.sqlite3', # noqa disabled until django 5.1 is in use, see https://blog.pecar.me/django-sqlite-dblock + 'ENGINE': 'patchman.sqlite3', 'NAME': '/var/lib/patchman/db/patchman.db', + 'OPTIONS': { + 'timeout': 30 + } } } diff --git a/patchman/sqlite3/base.py b/patchman/sqlite3/base.py new file mode 100644 index 00000000..308e0563 --- /dev/null +++ b/patchman/sqlite3/base.py @@ -0,0 +1,11 @@ +# temporary fix for 'database is locked' error on sqlite3 +# can be removed when using django 5.1 and BEGIN IMMEDIATE in OPTIONS +# see https://blog.pecar.me/django-sqlite-dblock for more details + +from django.db.backends.sqlite3 import base + + +class DatabaseWrapper(base.DatabaseWrapper): + def _start_transaction_under_autocommit(self): + # Acquire a write lock immediately for transactions + self.cursor().execute('BEGIN IMMEDIATE') From 91d2d93bb833c3ed35c9a78fa49878688d68ab92 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 26 Feb 2025 18:05:05 -0500 Subject: [PATCH 065/199] remove extra space --- repos/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/repos/views.py b/repos/views.py index 5b0fa955..0f575d30 100644 --- a/repos/views.py +++ b/repos/views.py @@ -84,7 +84,7 @@ def repo_list(request): page = paginator.page(paginator.num_pages) filter_list = [] - filter_list.append(Filter(request, 'OS Release', 'osrelease', OSRelease.objects.filter(repos__in=repos))) + filter_list.append(Filter(request, 'OS Release', 'osrelease', OSRelease.objects.filter(repos__in=repos))) filter_list.append(Filter(request, 'Enabled', 'enabled', {'true': 'Yes', 'false': 'No'})) filter_list.append(Filter(request, 'Security', 'security', {'true': 'Yes', 'false': 'No'})) filter_list.append(Filter(request, 'Repo Type', 'repotype', Repository.REPO_TYPES)) From 1402ec2a9af0135f5b5941c5e819071d3dfabd04 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 26 Feb 2025 17:58:22 -0500 Subject: [PATCH 066/199] concurrent errata processing updates --- errata/sources/distros/alma.py | 68 +++++++++++++++++++++++--------- errata/sources/distros/arch.py | 25 +++++++++--- errata/sources/distros/debian.py | 68 ++++++++++++++++++++++++++------ errata/sources/distros/rocky.py | 68 ++++++++++++++++++-------------- errata/sources/distros/ubuntu.py | 46 +++++++++++++++------ errata/tasks.py | 8 ++-- 6 files changed, 201 insertions(+), 82 deletions(-) diff --git a/errata/sources/distros/alma.py b/errata/sources/distros/alma.py index a3c85b6f..629fa6a0 100644 --- a/errata/sources/distros/alma.py +++ b/errata/sources/distros/alma.py @@ -14,6 +14,7 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see +import concurrent.futures import json from django.db import transaction @@ -24,7 +25,7 @@ from patchman.signals import progress_info_s, progress_update_s -def update_alma_errata(): +def update_alma_errata(concurrent_processing=True): """ Update Alma Linux advisories from errata.almalinux.org: https://errata.almalinux.org/8/errata.full.json https://errata.almalinux.org/9/errata.full.json @@ -38,7 +39,7 @@ def update_alma_errata(): ) for release in alma_releases: advisories = download_alma_advisories(release) - process_alma_errata(release, advisories) + process_alma_errata(release, advisories, concurrent_processing) def download_alma_advisories(release): @@ -52,29 +53,58 @@ def download_alma_advisories(release): return advisories -def process_alma_errata(release, advisories): +def process_alma_errata(release, advisories, concurrent_processing): """ Process Alma Linux Errata """ - from errata.utils import get_or_create_erratum + if concurrent_processing: + process_alma_errata_concurrently(release, advisories) + else: + process_alma_errata_serially(release, advisories) + + +def process_alma_errata_serially(release, advisories): + """ Process Alma Linux Errata serially + """ elen = len(advisories) - ptext = f'Processing {elen} Errata:' + ptext = f'Processing {elen} Alma Errata:' progress_info_s.send(sender=None, ptext=ptext, plen=elen) for i, advisory in enumerate(advisories): + process_alma_erratum(release, advisory) progress_update_s.send(sender=None, index=i + 1) - erratum_name = advisory.get('id') - issue_date = advisory.get('issued_date') - synopsis = advisory.get('title') - e_type = advisory.get('type') - e, created = get_or_create_erratum( - name=erratum_name, - e_type=e_type, - issue_date=issue_date, - synopsis=synopsis, - ) - add_alma_erratum_osreleases(e, release) - add_alma_erratum_references(e, advisory) - add_alma_erratum_packages(e, advisory) - add_alma_erratum_modules(e, advisory) + + +def process_alma_errata_concurrently(release, advisories): + """ Process Alma Linux Errata concurrently + """ + elen = len(advisories) + ptext = f'Processing {elen} Alma Errata:' + progress_info_s.send(sender=None, ptext=ptext, plen=elen) + i = 0 + with concurrent.futures.ProcessPoolExecutor(max_workers=10) as executor: + futures = [executor.submit(process_alma_erratum, release, advisory) for advisory in advisories] + for future in concurrent.futures.as_completed(futures): + i += 1 + progress_update_s.send(sender=None, index=i + 1) + + +def process_alma_erratum(release, advisory): + """ Process a single Alma Linux Erratum + """ + from errata.utils import get_or_create_erratum + erratum_name = advisory.get('id') + issue_date = advisory.get('issued_date') + synopsis = advisory.get('title') + e_type = advisory.get('type') + e, created = get_or_create_erratum( + name=erratum_name, + e_type=e_type, + issue_date=issue_date, + synopsis=synopsis, + ) + add_alma_erratum_osreleases(e, release) + add_alma_erratum_references(e, advisory) + add_alma_erratum_packages(e, advisory) + add_alma_erratum_modules(e, advisory) def add_alma_erratum_osreleases(e, release): diff --git a/errata/sources/distros/arch.py b/errata/sources/distros/arch.py index dca77880..a3e296d4 100644 --- a/errata/sources/distros/arch.py +++ b/errata/sources/distros/arch.py @@ -24,13 +24,13 @@ from patchman.signals import error_message, progress_info_s, progress_update_s -def update_arch_errata(): +def update_arch_errata(concurrent_processing=False): """ Update Arch Linux Errata from the following sources: https://security.archlinux.org/advisories.json """ add_arch_linux_osrelease() advisories = download_arch_errata() - parse_arch_errata_concurrently(advisories) + parse_arch_errata(advisories, concurrent_processing) def download_arch_errata(): @@ -42,20 +42,33 @@ def download_arch_errata(): return json.loads(advisories) -def parse_arch_errata(advisories): +def parse_arch_errata(advisories, concurrent_processing): """ Parse Arch Linux Errata Advisories """ + if concurrent_processing: + parse_arch_errata_concurrently(advisories) + else: + parse_arch_errata_serially(advisories) + + +def parse_arch_errata_serially(advisories): + """ Parse Arch Linux Errata Advisories serially + """ osrelease = OSRelease.objects.get(name='Arch Linux') - for advisory in advisories: + elen = len(advisories) + ptext = f'Processing {elen} Arch Advisories:' + progress_info_s.send(sender=None, ptext=ptext, plen=elen) + for i, advisory in enumerate(advisories): process_arch_erratum(advisory, osrelease) + progress_update_s.send(sender=None, index=i + 1) def parse_arch_errata_concurrently(advisories): - """ Parse Arch Linux Errata Advisories + """ Parse Arch Linux Errata Advisories concurrently """ osrelease = OSRelease.objects.get(name='Arch Linux') elen = len(advisories) - ptext = 'Processing Arch Linux Advisories:' + ptext = f'Processing {elen} Arch Advisories:' progress_info_s.send(sender=None, ptext=ptext, plen=elen) i = 0 with concurrent.futures.ProcessPoolExecutor(max_workers=3) as executor: diff --git a/errata/sources/distros/debian.py b/errata/sources/distros/debian.py index 322fda63..31454f11 100644 --- a/errata/sources/distros/debian.py +++ b/errata/sources/distros/debian.py @@ -14,11 +14,13 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see +import concurrent.futures import csv import re from datetime import datetime from debian.deb822 import Dsc from io import StringIO +from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential from django.db.utils import IntegrityError @@ -29,7 +31,7 @@ from patchman.signals import error_message, progress_info_s, progress_update_s -def update_debian_errata(): +def update_debian_errata(concurrent_processing=True): """ Update Debian errata using: https://salsa.debian.org/security-tracker-team/security-tracker/raw/master/data/DSA/list https://salsa.debian.org/security-tracker-team/security-tracker/raw/master/data/DSA/list @@ -39,7 +41,9 @@ def update_debian_errata(): dsas = download_debian_dsa_advisories() dlas = download_debian_dla_advisories() advisories = dsas + dlas - process_debian_errata(advisories) + accepted_codenames = get_accepted_debian_codenames() + errata = parse_debian_errata(advisories, accepted_codenames) + create_debian_errata(errata, accepted_codenames, concurrent_processing) def download_debian_dsa_advisories(): @@ -60,12 +64,11 @@ def download_debian_dla_advisories(): return data.decode() -def process_debian_errata(advisories): - """ Parse a Debian DSA/DLA file for security advisories +def parse_debian_errata(advisories, accepted_codenames): + """ Parse Debian DSA/DLA files for security advisories """ distro_pattern = re.compile(r'^\t\[(.+?)\] - .*') title_pattern = re.compile(r'^\[(.+?)\] (.+?) (.+?)[ ]+[-]+ (.*)') - accepted_codenames = get_accepted_debian_codenames() errata = [] e = {'packages': {}, 'cve_ids': [], 'releases': []} for line in advisories.splitlines(): @@ -88,7 +91,7 @@ def process_debian_errata(advisories): e['packages'][release].append(parse_debian_erratum_packages(line, accepted_codenames)) # add the last one errata = add_errata_by_codename(errata, e, accepted_codenames) - create_debian_errata(errata, accepted_codenames) + return errata def add_errata_by_codename(errata, e, accepted_codenames): @@ -115,13 +118,45 @@ def parse_debian_erratum_advisory(e, match): return e -def create_debian_errata(errata, accepted_codenames): - from errata.utils import get_or_create_erratum +def create_debian_errata(errata, accepted_codenames, concurrent_processing): + """ Create Debian Errata + """ + if concurrent_processing: + create_debian_errata_concurrently(errata, accepted_codenames) + else: + create_debian_errata_serially(errata, accepted_codenames) + + +def create_debian_errata_serially(errata, accepted_codenames): + """ Create Debian Errata Serially + """ elen = len(errata) text = f'Processing {elen} Debian Errata:' progress_info_s.send(sender=None, ptext=text, plen=elen) for i, erratum in enumerate(errata): + process_debian_erratum(erratum, accepted_codenames) progress_update_s.send(sender=None, index=i + 1) + + +def create_debian_errata_concurrently(errata, accepted_codenames): + """ Create Debian Errata concurrently + """ + elen = len(errata) + text = f'Processing {elen} Debian Errata:' + progress_info_s.send(sender=None, ptext=text, plen=elen) + i = 0 + with concurrent.futures.ProcessPoolExecutor(max_workers=200) as executor: + futures = [executor.submit(process_debian_erratum, erratum, accepted_codenames) for erratum in errata] + for future in concurrent.futures.as_completed(futures): + i += 1 + progress_update_s.send(sender=None, index=i + 1) + + +def process_debian_erratum(erratum, accepted_codenames): + """ Process a single Debian Erratum + """ + try: + from errata.utils import get_or_create_erratum erratum_name = erratum.get('name') e, created = get_or_create_erratum( name=erratum_name, @@ -139,6 +174,8 @@ def create_debian_errata(errata, accepted_codenames): e.osreleases.add(osrelease) for package in packages: process_debian_erratum_affected_packages(e, package) + except Exception as exc: + error_message.send(sender=None, text=exc) def parse_debian_erratum_packages(line, accepted_codenames): @@ -152,9 +189,14 @@ def parse_debian_erratum_packages(line, accepted_codenames): if codename in accepted_codenames: source_package = match.group(2) source_version = match.group(3) - return download_debian_package_dsc(codename, source_package, source_version) + return [codename, source_package, source_version] +@retry( + retry=retry_if_exception_type(ConnectionError), + stop=stop_after_attempt(10), + wait=wait_exponential(multiplier=1, min=2, max=15), +) def download_debian_package_dsc(codename, package, version): """ Download a DSC file for the given source package From this we can determine which packages are built from @@ -163,12 +205,12 @@ def download_debian_package_dsc(codename, package, version): dsc_pattern = re.compile(r'.*"(http.*dsc)"') source_url = f'https://packages.debian.org/source/{codename}/{package}' res = get_url(source_url) - data = download_url(res, f'debian src {package}-{version}', 60) + data = res.content dscs = re.findall(dsc_pattern, data.decode()) if dscs: dsc_url = dscs[0] res = get_url(dsc_url) - data = download_url(res, f'debian dsc {package}-{version}', 60) + data = res.content return Dsc(data.decode()) @@ -217,9 +259,11 @@ def create_debian_os_releases(codename_to_version): osrelease.save() -def process_debian_erratum_affected_packages(e, dsc): +def process_debian_erratum_affected_packages(e, package_data): """ Process packages affected by Debian errata """ + codename, source_package, source_version = package_data + dsc = download_debian_package_dsc(codename, source_package, source_version) if not dsc: return epoch, ver, rel = find_evr(str(dsc.get_version())) diff --git a/errata/sources/distros/rocky.py b/errata/sources/distros/rocky.py index 663a8689..1baa37fb 100644 --- a/errata/sources/distros/rocky.py +++ b/errata/sources/distros/rocky.py @@ -27,14 +27,14 @@ from patchman.signals import progress_info_s, progress_update_s -def update_rocky_errata(): +def update_rocky_errata(concurrent_processing=True): """ Update Rocky Linux errata """ rocky_errata_api_host = 'https://apollo.build.resf.org' rocky_errata_api_url = '/api/v3/' if check_rocky_errata_endpoint_health(rocky_errata_api_host): - advisories = download_rocky_advisories_concurrently(rocky_errata_api_host, rocky_errata_api_url) - process_rocky_errata_concurrently(advisories) + advisories = download_rocky_advisories(rocky_errata_api_host, rocky_errata_api_url, concurrent_processing) + process_rocky_errata(advisories, concurrent_processing) def check_rocky_errata_endpoint_health(rocky_errata_api_host): @@ -62,9 +62,18 @@ def check_rocky_errata_endpoint_health(rocky_errata_api_host): return False -def download_rocky_advisories(rocky_errata_api_host, rocky_errata_api_url): +def download_rocky_advisories(rocky_errata_api_host, rocky_errata_api_url, concurrent_processing): """ Download Rocky Linux advisories and return the list """ + if concurrent_processing: + return download_rocky_advisories_concurrently(rocky_errata_api_host, rocky_errata_api_url) + else: + return download_rocky_advisories_serially(rocky_errata_api_host, rocky_errata_api_url) + + +def download_rocky_advisories_serially(rocky_errata_api_host, rocky_errata_api_url): + """ Download Rocky Linux advisories serially and return the list + """ rocky_errata_advisories_url = rocky_errata_api_host + rocky_errata_api_url + 'advisories/' headers = {'Accept': 'application/json'} page = 1 @@ -127,14 +136,34 @@ def get_rocky_advisory(rocky_errata_advisories_url, page): return advisories_dict.get('advisories') +def process_rocky_errata(advisories, concurrent_processing): + """ Process Rocky Linux Errata + """ + if concurrent_processing: + process_rocky_errata_concurrently(advisories) + else: + process_rocky_errata_serially(advisories) + + +def process_rocky_errata_serially(advisories): + """ Process Rocky Linux errata serially + """ + elen = len(advisories) + ptext = f'Processing {elen} Rocky Errata:' + progress_info_s.send(sender=None, ptext=ptext, plen=elen) + for i, advisory in enumerate(advisories): + process_rocky_erratum(advisory) + progress_update_s.send(sender=None, index=i + 1) + + def process_rocky_errata_concurrently(advisories): """ Process Rocky Linux errata concurrently """ elen = len(advisories) - ptext = f'Processing {elen} Errata:' + ptext = f'Processing {elen} Rocky Errata:' progress_info_s.send(sender=None, ptext=ptext, plen=elen) i = 0 - with concurrent.futures.ProcessPoolExecutor(max_workers=10) as executor: + with concurrent.futures.ProcessPoolExecutor(max_workers=100) as executor: futures = [executor.submit(process_rocky_erratum, advisory) for advisory in advisories] for future in concurrent.futures.as_completed(futures): i += 1 @@ -150,30 +179,7 @@ def process_rocky_erratum(advisory): """ Process a single Rocky Linux erratum """ from errata.utils import get_or_create_erratum - erratum_name = advisory.get('name') - e_type = advisory.get('kind').lower().replace(' ', '') - issue_date = advisory.get('published_at') - synopsis = advisory.get('synopsis') - e, created = get_or_create_erratum( - name=erratum_name, - e_type=e_type, - issue_date=issue_date, - synopsis=synopsis, - ) - add_rocky_erratum_references(e, advisory) - add_rocky_erratum_oses(e, advisory) - add_rocky_erratum_packages(e, advisory) - - -def process_rocky_errata(advisories): - """ Process Rocky Linux errata - """ - from errata.utils import get_or_create_erratum - elen = len(advisories) - ptext = f'Processing {elen} Errata:' - progress_info_s.send(sender=None, ptext=ptext, plen=elen) - for i, advisory in enumerate(advisories): - progress_update_s.send(sender=None, index=i + 1) + try: erratum_name = advisory.get('name') e_type = advisory.get('kind').lower().replace(' ', '') issue_date = advisory.get('published_at') @@ -187,6 +193,8 @@ def process_rocky_errata(advisories): add_rocky_erratum_references(e, advisory) add_rocky_erratum_oses(e, advisory) add_rocky_erratum_packages(e, advisory) + except Exception as exc: + error_message.send(sender=None, text=exc) def add_rocky_erratum_references(e, advisory): diff --git a/errata/sources/distros/ubuntu.py b/errata/sources/distros/ubuntu.py index 55597951..c0b30467 100644 --- a/errata/sources/distros/ubuntu.py +++ b/errata/sources/distros/ubuntu.py @@ -28,7 +28,7 @@ from patchman.signals import error_message, progress_info_s, progress_update_s -def update_ubuntu_errata(): +def update_ubuntu_errata(concurrent_processing=False): """ Update Ubuntu errata """ codenames = retrieve_ubuntu_codenames() @@ -38,8 +38,7 @@ def update_ubuntu_errata(): expected_checksum = download_ubuntu_usn_db_checksum() actual_checksum = get_sha256(data) if actual_checksum == expected_checksum: - extracted = bunzip2(data).decode() - parse_usn_data(extracted) + parse_usn_data(data, concurrent_processing) else: e = 'Ubuntu USN DB checksum mismatch, skipping Ubuntu errata parsing\n' e += f'{actual_checksum} (actual) != {expected_checksum} (expected)' @@ -62,18 +61,39 @@ def download_ubuntu_usn_db_checksum(): return download_url(res, 'Downloading Ubuntu Errata Checksum:').decode().split()[0] -def parse_usn_data(data): +def parse_usn_data(data, concurrent_processing): """ Parse the Ubuntu USN data """ - from errata.utils import get_or_create_erratum - advisories = json.loads(data) accepted_releases = get_accepted_ubuntu_codenames() + extracted = bunzip2(data).decode() + advisories = json.loads(extracted) + if concurrent_processing: + parse_usn_data_concurrently(advisories, accepted_releases) + else: + parse_usn_data_serially(advisories, accepted_releases) + + +def parse_usn_data_serially(advisories, accepted_releases): + """ Parse the Ubuntu USN data serially + """ + elen = len(advisories) + ptext = f'Processing {elen} Ubuntu Errata:' + progress_info_s.send(sender=None, ptext=ptext, plen=elen) + for i, (usn_id, advisory) in enumerate(advisories.items()): + process_usn(usn_id, advisory, accepted_releases) + progress_update_s.send(sender=None, index=i + 1) + + +def parse_usn_data_concurrently(advisories, accepted_releases): + """ Parse the Ubuntu USN data concurrently + """ elen = len(advisories) ptext = f'Processing {elen} Ubuntu Errata:' progress_info_s.send(sender=None, ptext=ptext, plen=elen) i = 0 - with concurrent.futures.ProcessPoolExecutor(max_workers=20) as executor: - futures = [executor.submit(process_usn, usn_id, advisory, accepted_releases) for usn_id, advisory in advisories.items()] + with concurrent.futures.ProcessPoolExecutor(max_workers=10) as executor: + futures = [executor.submit(process_usn, usn_id, advisory, accepted_releases) + for usn_id, advisory in advisories.items()] for future in concurrent.futures.as_completed(futures): i += 1 progress_update_s.send(sender=None, index=i + 1) @@ -96,11 +116,15 @@ def process_usn(usn_id, advisory, accepted_releases): issue_date=issue_date, synopsis=synopsis, ) - add_ubuntu_erratum_osreleases(e, affected_releases, accepted_releases) + add_ubuntu_erratum_osreleases( + e, + affected_releases, + accepted_releases, + ) add_ubuntu_erratum_references(e, usn_id, advisory) add_ubuntu_erratum_packages(e, advisory) - except Exception as ex: - print(ex) + except Exception as exc: + error_message.send(sender=None, text=exc) def add_ubuntu_erratum_osreleases(e, affected_releases, accepted_releases): diff --git a/errata/tasks.py b/errata/tasks.py index 7c962f6e..9983ce70 100644 --- a/errata/tasks.py +++ b/errata/tasks.py @@ -34,10 +34,10 @@ def update_errata(): setting_type=list, default=['rocky', 'alma', 'arch', 'ubuntu', 'debian', 'rhel', 'suse', 'amazon'], ) -# if 'arch' in errata_os_updates: -# update_arch_errata() -# if 'alma' in errata_os_updates: -# update_alma_errata() + if 'arch' in errata_os_updates: + update_arch_errata() + if 'alma' in errata_os_updates: + update_alma_errata() if 'rocky' in errata_os_updates: update_rocky_errata() if 'debian' in errata_os_updates: From 9b1c69b4b6218880729b63e1c0813803d9613d4a Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 26 Feb 2025 23:34:35 -0500 Subject: [PATCH 067/199] simplify report cleaning --- etc/patchman/local_settings.py | 4 ++ hosts/models.py | 24 ++++++++---- hosts/utils.py | 27 +------------- .../migrations/0003_remove_report_accessed.py | 17 +++++++++ reports/models.py | 1 - reports/tasks.py | 13 +++++++ repos/utils.py | 11 +++--- sbin/patchman | 37 ++++--------------- util/__init__.py | 6 +++ 9 files changed, 70 insertions(+), 70 deletions(-) create mode 100644 reports/migrations/0003_remove_report_accessed.py diff --git a/etc/patchman/local_settings.py b/etc/patchman/local_settings.py index ab9ceb1b..3a68feb3 100644 --- a/etc/patchman/local_settings.py +++ b/etc/patchman/local_settings.py @@ -76,4 +76,8 @@ 'task': 'util.tasks.clean_database', 'schedule': crontab(hour=6, minute=00), }, + 'remove_old_reports': { + 'task': 'reports.tasks.remove_reports_with_no_hosts', + 'schedule': timedelta(days=7), + }, } diff --git a/hosts/models.py b/hosts/models.py index e1d6d4a8..f45c830d 100644 --- a/hosts/models.py +++ b/hosts/models.py @@ -26,16 +26,16 @@ from rpm import labelCompare from taggit.managers import TaggableManager -from packages.models import Package, PackageUpdate -from domains.models import Domain -from repos.models import Repository -from operatingsystems.models import OSVariant from arch.models import MachineArchitecture +from domains.models import Domain +from hosts.utils import update_rdns from modules.models import Module -from patchman.signals import info_message, error_message +from operatingsystems.models import OSVariant +from packages.models import Package, PackageUpdate from packages.utils import get_or_create_package_update +from patchman.signals import info_message, error_message +from repos.models import Repository from repos.utils import find_best_repo -from hosts.utils import update_rdns, remove_reports class Host(models.Model): @@ -121,8 +121,16 @@ def check_rdns(self): info_message.send(sender=None, text='Reverse DNS check disabled') - def clean_reports(self, timestamp): - remove_reports(self, timestamp) + def clean_reports(self): + """ Remove all but the last 3 reports for a host + """ + from reports.models import Report + reports = Report.objects.filter(host=self).order_by('-created')[3:] + rlen = reports.count() + for report in Report.objects.filter(host=self).order_by('-created')[3:]: + report.delete() + if rlen > 0: + info_message.send(sender=None, text=f'{self.hostname}: removed {rlen} old reports') def get_host_repo_packages(self): if self.host_repos_only: diff --git a/hosts/utils.py b/hosts/utils.py index b0a8e675..4198408e 100644 --- a/hosts/utils.py +++ b/hosts/utils.py @@ -19,13 +19,12 @@ from django.db import DatabaseError -from patchman.signals import progress_info_s, progress_update_s, error_message +from patchman.signals import error_message def update_rdns(host): """ Update the reverse DNS for a host """ - try: reversedns = str(gethostbyaddr(host.ipaddress)[0]) except (gaierror, herror): @@ -36,27 +35,3 @@ def update_rdns(host): host.save() except DatabaseError as e: error_message.send(sender=None, text=e) - - -def remove_reports(host, timestamp): - """ Remove all but the last 3 reports for a host - """ - - from reports.models import Report - - reports = Report.objects.filter(host=host).order_by('-created')[:3] - report_ids = [] - - for report in reports: - report_ids.append(report.id) - report.accessed = timestamp - report.save() - - del_reports = Report.objects.filter(host=host).exclude(id__in=report_ids) - - rlen = del_reports.count() - ptext = f'Cleaning {rlen} old reports' - progress_info_s.send(sender=None, ptext=ptext, plen=rlen) - for i, report in enumerate(del_reports): - report.delete() - progress_update_s.send(sender=None, index=i + 1) diff --git a/reports/migrations/0003_remove_report_accessed.py b/reports/migrations/0003_remove_report_accessed.py new file mode 100644 index 00000000..406fc6a1 --- /dev/null +++ b/reports/migrations/0003_remove_report_accessed.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.19 on 2025-02-27 04:16 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('reports', '0002_report_modules'), + ] + + operations = [ + migrations.RemoveField( + model_name='report', + name='accessed', + ), + ] diff --git a/reports/models.py b/reports/models.py index 816891bd..e7e1503b 100644 --- a/reports/models.py +++ b/reports/models.py @@ -32,7 +32,6 @@ class Report(models.Model): created = models.DateTimeField(auto_now_add=True) - accessed = models.DateTimeField(auto_now_add=True) host = models.CharField(max_length=255, null=True) domain = models.CharField(max_length=255, null=True) tags = models.CharField(max_length=255, null=True, default='') diff --git a/reports/tasks.py b/reports/tasks.py index 46131155..db9e4103 100755 --- a/reports/tasks.py +++ b/reports/tasks.py @@ -19,7 +19,9 @@ from django.db.utils import OperationalError +from hosts.models import Host from reports.models import Report +from util import info_message @shared_task(bind=True, autoretry_for=(OperationalError,), retry_backoff=True, retry_kwargs={'max_retries': 5}) @@ -37,3 +39,14 @@ def process_reports(): reports = Report.objects.filter(processed=False) for report in reports: process_report.delay(report.id) + + +@shared_task +def clean_reports_with_no_hosts(): + """ Task to clean processed reports where the host no longer exists + """ + for report in Report.objects.filter(processed=True): + if not Host.objects.filter(hostname=report.host).exists(): + text = f'Deleting report {report.id} for Host `{report.host}` as the host no longer exists' + info_message.send(sender=None, text=text) + report.delete() diff --git a/repos/utils.py b/repos/utils.py index c95e1127..837fd783 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -38,7 +38,8 @@ from packages.models import Package, PackageString from packages.utils import parse_package_string, get_or_create_package, find_evr, \ convert_package_to_packagestring, convert_packagestring_to_package -from util import get_url, download_url, response_is_valid, extract, get_checksum, Checksum, get_setting_of_type +from util import get_url, download_url, response_is_valid, extract, get_checksum, Checksum, get_setting_of_type, \ + get_datetime_now from patchman.signals import progress_info_s, progress_update_s, \ info_message, warning_message, error_message, debug_message @@ -641,7 +642,7 @@ def refresh_arch_repo(repo): """ max_mirrors = get_max_mirrors() fname = f'{repo.arch}/{repo.repo_id}.db' - ts = datetime.now().astimezone().replace(microsecond=0) + ts = get_datetime_now() enabled_mirrors = repo.mirror_set.filter(refresh=True, enabled=True) for i, mirror in enumerate(enabled_mirrors): @@ -801,7 +802,7 @@ def refresh_gentoo_repo(repo): else: refresh_gentoo_overlay_repo(repo) repo_type = 'overlay' - ts = datetime.now().replace(microsecond=0) + ts = get_datetime_now() for mirror in repo.mirror_set.filter(mirrorlist=False, refresh=True): res = get_url(mirror.url + '.md5sum') data = download_url(res, 'Downloading repo info (1/2):') @@ -887,7 +888,7 @@ def refresh_rpm_repo(repo): check_for_metalinks(repo) max_mirrors = get_max_mirrors() - ts = datetime.now().astimezone().replace(microsecond=0) + ts = get_datetime_now() enabled_mirrors = repo.mirror_set.filter(mirrorlist=False, refresh=True, enabled=True) for i, mirror in enumerate(enabled_mirrors): if i >= max_mirrors: @@ -927,7 +928,7 @@ def refresh_deb_repo(repo): formats = ['Packages.xz', 'Packages.bz2', 'Packages.gz', 'Packages'] - ts = datetime.now().astimezone().replace(microsecond=0) + ts = get_datetime_now() enabled_mirrors = repo.mirror_set.filter(refresh=True, enabled=True) for mirror in enabled_mirrors: res = find_mirror_url(mirror.url, formats) diff --git a/sbin/patchman b/sbin/patchman index edb169d0..6d523221 100755 --- a/sbin/patchman +++ b/sbin/patchman @@ -35,10 +35,12 @@ from errata.utils import mark_errata_security_updates from errata.tasks import update_errata from hosts.models import Host from modules.models import Module +from modules.utils import clean_modules from packages.utils import clean_packages, clean_packageupdates, clean_packagenames from repos.models import Repository from repos.utils import clean_repos from reports.models import Report +from reports.tasks import clean_reports_with_no_hosts from security.utils import update_cves, update_cwes from util import print_nocr, create_pbar, update_pbar, set_verbosity, get_verbosity, tz_aware_datetime from patchman.signals import info_message @@ -139,42 +141,17 @@ def list_hosts(hosts=None): host.show() -def clean_reports(s_host=None): +def clean_reports(hoststr=None): """ Delete old reports for all hosts, specify host for a single host. Reports with non existent hosts are only removed when no host is specified. """ - hosts = get_hosts(s_host, 'Cleaning Reports') - timestamp = date.today() - + hosts = get_hosts(hoststr, 'Cleaning Reports') for host in hosts: - info_message.send(sender=None, text=str(host)) - host.clean_reports(timestamp) - - if s_host is None: - reports = Report.objects.filter(accessed__lt=timestamp) - rlen = reports.count() - if rlen != 0: - create_pbar(f'Removing {rlen} extraneous Reports:', rlen) - for i, report in enumerate(reports): - report.delete() - update_pbar(i + 1) - + host.clean_reports() -def clean_modules(): - """ Delete modules that have no host or no repo - """ - modules = Module.objects.filter(host__isnull=True, repo__isnull=True) - mlen = modules.count() - - if mlen == 0: - text = 'No orphaned Modules found.' - info_message.send(sender=None, text=text) - else: - create_pbar(f'Removing {mlen} empty Modules:', mlen) - for i, module in enumerate(modules): - module.delete() - update_pbar(i + 1) + if not hoststr: + clean_reports_with_no_hosts() def host_updates_alt(host=None): diff --git a/util/__init__.py b/util/__init__.py index f3bf549a..abf27a1d 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -291,3 +291,9 @@ def tz_aware_datetime(date): if not parsed_date.tzinfo: parsed_date = make_aware(parsed_date) return parsed_date + + +def get_datetime_now(): + """ Return the current timezone-aware datetime removing microseconds + """ + return datetime.now().astimezone().replace(microsecond=0) From ad943174136ea7c6bcebb457300edb8fdaaeaa2d Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 26 Feb 2025 23:51:16 -0500 Subject: [PATCH 068/199] switch to tqdm --- debian/control | 2 +- patchman/receivers.py | 9 +++++---- repos/utils.py | 1 - requirements.txt | 2 +- sbin/patchman | 7 ++----- setup.cfg | 2 +- util/__init__.py | 38 ++++++-------------------------------- 7 files changed, 16 insertions(+), 45 deletions(-) diff --git a/debian/control b/debian/control index 992929c5..2f0b5d10 100644 --- a/debian/control +++ b/debian/control @@ -16,7 +16,7 @@ Homepage: https://github.com/furlongm/patchman Depends: ${misc:Depends}, python3 (>= 3.10), python3-django (>= 3.2), python3-django-tagging, python3-django-extensions, python3-django-bootstrap3, python3-djangorestframework, python3-django-filters, python3-debian, - python3-rpm, python3-progressbar, python3-lxml, python3-defusedxml, + python3-rpm, python3-tqdm, python3-lxml, python3-defusedxml, python3-requests, python3-colorama, python3-magic, python3-humanize, python3-pip, python3-pymemcache, python3-yaml, memcached, libapache2-mod-wsgi-py3, apache2, python3-django-taggit diff --git a/patchman/receivers.py b/patchman/receivers.py index 9b636017..c0ee4a47 100644 --- a/patchman/receivers.py +++ b/patchman/receivers.py @@ -16,6 +16,7 @@ # along with Patchman. If not, see from colorama import init, Fore, Style +from tqdm import tqdm from django.dispatch import receiver @@ -53,7 +54,7 @@ def print_info_message(sender=None, **kwargs): """ text = str(kwargs.get('text')) if get_verbosity(): - print(Style.RESET_ALL + Fore.RESET + text) + tqdm.write(Style.RESET_ALL + Fore.RESET + text) @receiver(warning_message) @@ -62,7 +63,7 @@ def print_warning_message(**kwargs): """ text = str(kwargs.get('text')) if get_verbosity(): - print(Style.BRIGHT + Fore.YELLOW + text) + tqdm.write(Style.BRIGHT + Fore.YELLOW + text) @receiver(error_message) @@ -71,7 +72,7 @@ def print_error_message(**kwargs): """ text = str(kwargs.get('text')) if text: - print(Style.BRIGHT + Fore.RED + text) + tqdm.write(Style.BRIGHT + Fore.RED + text) @receiver(debug_message) @@ -80,4 +81,4 @@ def print_debug_message(**kwargs): """ text = str(kwargs.get('text')) if get_verbosity() and settings.DEBUG and text: - print(Style.BRIGHT + Fore.BLUE + text) + tqdm.write(Style.BRIGHT + Fore.BLUE + text) diff --git a/repos/utils.py b/repos/utils.py index 837fd783..339290c7 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -23,7 +23,6 @@ import tarfile import tempfile import yaml -from datetime import datetime from io import BytesIO from defusedxml.lxml import _etree as etree from debian.debian_support import Version diff --git a/requirements.txt b/requirements.txt index 5b16ddc2..32ce1225 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,6 @@ Django==4.2.19 django-taggit==4.0.0 django-extensions==3.2.1 django-bootstrap3==23.1 -progressbar==2.5 python-debian==0.1.49 lxml==5.2.2 defusedxml==0.7.1 @@ -20,3 +19,4 @@ tenacity==8.2.3 celery==5.4.0 redis==5.2.1 django-celery-beat==2.7.0 +tqdm==4.67.1 diff --git a/sbin/patchman b/sbin/patchman index 6d523221..64349760 100755 --- a/sbin/patchman +++ b/sbin/patchman @@ -20,7 +20,6 @@ import os import sys import argparse -from datetime import date, datetime from django.core.exceptions import MultipleObjectsReturned from django.db.models import Count @@ -34,7 +33,6 @@ from arch.utils import clean_architectures from errata.utils import mark_errata_security_updates from errata.tasks import update_errata from hosts.models import Host -from modules.models import Module from modules.utils import clean_modules from packages.utils import clean_packages, clean_packageupdates, clean_packagenames from repos.models import Repository @@ -42,7 +40,7 @@ from repos.utils import clean_repos from reports.models import Report from reports.tasks import clean_reports_with_no_hosts from security.utils import update_cves, update_cwes -from util import print_nocr, create_pbar, update_pbar, set_verbosity, get_verbosity, tz_aware_datetime +from util import set_verbosity, get_verbosity, get_datetime_now from patchman.signals import info_message @@ -159,7 +157,7 @@ def host_updates_alt(host=None): """ updated_hosts = [] hosts = get_hosts(host, 'Finding updates') - ts = tz_aware_datetime(datetime.now().replace(microsecond=0)) + ts = get_datetime_now() for host in hosts: info_message.send(sender=None, text=str(host)) if host not in updated_hosts: @@ -328,7 +326,6 @@ def dns_checks(host=None): for host in hosts: if get_verbosity(): text = f'{str(host)[0:25].ljust(25)}: ' - print_nocr(text) host.check_rdns() diff --git a/setup.cfg b/setup.cfg index 28f45479..6d213cbf 100644 --- a/setup.cfg +++ b/setup.cfg @@ -12,7 +12,7 @@ requires = /usr/bin/python3 python3-django-filter python3-debian python3-rpm - python3-progressbar2 + python3-tqdm python3-lxml python3-defusedxml python3-requests diff --git a/util/__init__.py b/util/__init__.py index abf27a1d..d396088b 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -15,19 +15,17 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -import sys import requests import bz2 import magic import zlib import lzma -from colorama import Fore, Style from datetime import datetime from enum import Enum from hashlib import md5, sha1, sha256, sha512 -from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential -from progressbar import Bar, ETA, Percentage, ProgressBar from requests.exceptions import HTTPError, Timeout, ConnectionError +from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential +from tqdm import tqdm from patchman.signals import error_message, info_message, debug_message @@ -36,23 +34,11 @@ from django.conf import settings -if ProgressBar.__dict__.get('maxval'): - pbar2 = False -else: - pbar2 = True - pbar = None verbose = None Checksum = Enum('Checksum', 'md5 sha sha1 sha256 sha512') -def print_nocr(text): - """ Print text without a carriage return - """ - print(text, end='') - sys.stdout.softspace = False - - def get_verbosity(): """ Get the global verbosity level """ @@ -73,14 +59,7 @@ def create_pbar(ptext, plength, ljust=35, **kwargs): global pbar, verbose if verbose and plength > 0: jtext = str(ptext).ljust(ljust) - if pbar2: - pbar = ProgressBar(widgets=[Style.RESET_ALL + Fore.YELLOW + jtext, - Percentage(), Bar(), ETA()], - max_value=plength).start() - else: - pbar = ProgressBar(widgets=[Style.RESET_ALL + Fore.YELLOW + jtext, - Percentage(), Bar(), ETA()], - maxval=plength).start() + pbar = tqdm(total=plength, desc=jtext, position=0, leave=True) return pbar @@ -89,14 +68,9 @@ def update_pbar(index, **kwargs): """ global pbar, verbose if verbose and pbar: - pbar.update(index) - if pbar2: - pmax = pbar.max_value - else: - pmax = pbar.maxval - if index >= pmax: - pbar.finish() - print_nocr(Fore.RESET + Style.RESET_ALL) + pbar.update(n=index-pbar.n) + if index >= pbar.total: + pbar.close() pbar = None From 1e476f12e88e98783d2439ce54b29fe85d45f323 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Thu, 27 Feb 2025 00:29:04 -0500 Subject: [PATCH 069/199] add task to find host updates --- etc/patchman/local_settings.py | 4 ++ hosts/tasks.py | 81 ++++++++++++++++++++++++++++++++++ 2 files changed, 85 insertions(+) create mode 100755 hosts/tasks.py diff --git a/etc/patchman/local_settings.py b/etc/patchman/local_settings.py index 3a68feb3..f71d42fd 100644 --- a/etc/patchman/local_settings.py +++ b/etc/patchman/local_settings.py @@ -80,4 +80,8 @@ 'task': 'reports.tasks.remove_reports_with_no_hosts', 'schedule': timedelta(days=7), }, + 'find_host_updates': { + 'task': 'hosts.tasks.find_all_host_updates_homogenous', + 'schedule': timedelta(hours=24), + }, } diff --git a/hosts/tasks.py b/hosts/tasks.py new file mode 100755 index 00000000..4d53d5ab --- /dev/null +++ b/hosts/tasks.py @@ -0,0 +1,81 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from celery import shared_task + +from django.db.models import Count + +from hosts.models import Host +from util import get_datetime_now +from patchman.signals import info_message + + +@shared_task +def find_host_updates(host_id): + """ Task to find updates for a host + """ + host = Host.objects.get(id=host_id) + host.find_updates() + + +@shared_task +def find_all_host_updates(): + """ Task to find updates for all hosts + """ + for host in Host.objects.all(): + find_host_updates.delay(host.id) + + +@shared_task +def find_all_host_updates_homogenous(): + """ Task to find updates for all hosts where hosts are expected to be homogenous + """ + updated_hosts = [] + ts = get_datetime_now() + for host in Host.objects.all(): + if host not in updated_hosts: + host.updated_at = ts + host.find_updates() + host.save() + + # only include hosts with the exact same number of packages + filtered_hosts = Host.objects.annotate( + packages_count=Count('packages')).filter( + packages_count=host.packages.count() + ) + # and exclude hosts with the current timestamp + filtered_hosts = filtered_hosts.exclude(updated_at=ts) + + packages = set(host.packages.all()) + repos = set(host.repos.all()) + updates = host.updates.all() + + phosts = [] + for fhost in filtered_hosts: + frepos = set(fhost.repos.all()) + if repos != frepos: + continue + fpackages = set(fhost.packages.all()) + if packages != fpackages: + continue + phosts.append(fhost) + + for phost in phosts: + phost.updates.set(updates) + phost.updated_at = ts + phost.save() + updated_hosts.append(phost) + info_message.send(sender=None, text=f'Added the same updates to {phost}') From 1c691bc9ad1f836320fdbd1f160d89108dbb6fb6 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Thu, 27 Feb 2025 15:19:52 -0500 Subject: [PATCH 070/199] bugfix for errata tasks --- errata/sources/distros/debian.py | 2 +- errata/sources/distros/ubuntu.py | 2 +- errata/tasks.py | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/errata/sources/distros/debian.py b/errata/sources/distros/debian.py index 31454f11..fa1f7610 100644 --- a/errata/sources/distros/debian.py +++ b/errata/sources/distros/debian.py @@ -222,7 +222,7 @@ def get_accepted_debian_codenames(): accepted_codenames = get_setting_of_type( setting_name='DEBIAN_CODENAMES', setting_type=list, - default_value=default_codenames, + default=default_codenames, ) return accepted_codenames diff --git a/errata/sources/distros/ubuntu.py b/errata/sources/distros/ubuntu.py index c0b30467..a6991693 100644 --- a/errata/sources/distros/ubuntu.py +++ b/errata/sources/distros/ubuntu.py @@ -207,7 +207,7 @@ def get_accepted_ubuntu_codenames(): accepted_codenames = get_setting_of_type( setting_name='UBUNTU_CODENAMES', setting_type=list, - default_value=default_codenames, + default=default_codenames, ) return accepted_codenames diff --git a/errata/tasks.py b/errata/tasks.py index 9983ce70..b90d2297 100644 --- a/errata/tasks.py +++ b/errata/tasks.py @@ -26,6 +26,7 @@ from errata.sources.distros.ubuntu import update_ubuntu_errata +@shared_task def update_errata(): """ Update all distros errata """ From 1824e477405d9e3376c7fae2a1c49fc42d57b365 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Thu, 27 Feb 2025 15:21:39 -0500 Subject: [PATCH 071/199] show date without time --- errata/templates/errata/erratum_detail.html | 2 +- errata/templates/errata/erratum_table.html | 2 +- security/templates/security/cve_detail.html | 8 ++++---- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/errata/templates/errata/erratum_detail.html b/errata/templates/errata/erratum_detail.html index 5394caea..dd640a47 100644 --- a/errata/templates/errata/erratum_detail.html +++ b/errata/templates/errata/erratum_detail.html @@ -19,7 +19,7 @@
    Repo NameIDRepo NameRepo ID Mirrors Enabled Security
    {{ repo }}{% if repo.id != None %} {{ repo.id }} {% endif %}{{ repo.mirror_set.count }}{% if repo.repo_id %} {{ repo.repo_id }} {% endif %} {{ repo.mirror_set.count }}
    {% yes_no_button_repo_en repo %}
    {% yes_no_button_repo_sec repo %}
    {% yes_no_img repo.auth_required %}
    - + diff --git a/errata/templates/errata/erratum_table.html b/errata/templates/errata/erratum_table.html index ad933947..e52fe8a3 100644 --- a/errata/templates/errata/erratum_table.html +++ b/errata/templates/errata/erratum_table.html @@ -17,7 +17,7 @@ - + diff --git a/security/templates/security/cve_detail.html b/security/templates/security/cve_detail.html index 81f866f6..16f5a3f8 100644 --- a/security/templates/security/cve_detail.html +++ b/security/templates/security/cve_detail.html @@ -20,10 +20,10 @@ - - - - + + + + - - - + + +
    Name {{ erratum.name }}
    Type {{ erratum.e_type }}
    Published Date{{ erratum.issue_date }}
    Published Date{{ erratum.issue_date|date|default_if_none:'' }}
    Synopsis {{ erratum.synopsis }}
    Packages Affected {{ erratum.packages.count }}
    {{ erratum.name }} {{ erratum.e_type }}{{ erratum.issue_date }}{{ erratum.issue_date|date|default_if_none:'' }} {{ erratum.synopsis }} {{ erratum.packages.count }} {{ erratum.osreleases.count }}
    CVE ID{{ cve.cve_id }}
    Title{{ cve.title }}
    Description{{ cve.description }}
    Reserved{{ cve.reserved_date|default_if_none:'' }}
    Rejected{{ cve.rejected_date|default_if_none:'' }}
    Published{{ cve.published_date|default_if_none:'' }}
    Updated{{ cve.updated_date|default_if_none:'' }}
    Reserved{{ cve.reserved_date|date|default_if_none:'' }}
    Rejected{{ cve.rejected_date|date|default_if_none:'' }}
    Published{{ cve.published_date|date|default_if_none:'' }}
    Updated{{ cve.updated_date|date|default_if_none:'' }}
    CVSS Scores From f96f0ac6f295a13263635a4c507ebc6ea982dba3 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Thu, 27 Feb 2025 15:25:48 -0500 Subject: [PATCH 072/199] make get requests args consistent --- errata/views.py | 10 +++---- hosts/templates/hosts/host_delete.html | 2 +- hosts/templates/hosts/host_detail.html | 2 +- hosts/views.py | 30 +++++++++---------- .../operatingsystemrelease_table.html | 4 +-- .../operatingsystemvariant_table.html | 2 +- .../operatingsystems/osrelease_delete.html | 6 ++-- .../operatingsystems/osrelease_detail.html | 6 ++-- .../operatingsystems/osvariant_delete.html | 2 +- .../operatingsystems/osvariant_detail.html | 2 +- operatingsystems/views.py | 6 ++-- packages/views.py | 12 ++++---- reports/views.py | 2 +- repos/templates/repos/repo_detail.html | 1 + repos/views.py | 15 +++++----- security/views.py | 6 ++-- 16 files changed, 53 insertions(+), 55 deletions(-) diff --git a/errata/views.py b/errata/views.py index 8b5fcb31..9b8ce52c 100644 --- a/errata/views.py +++ b/errata/views.py @@ -28,19 +28,19 @@ @login_required def erratum_list(request): - errata = Erratum.objects.select_related() + errata = Erratum.objects.select_related().order_by('name') if 'e_type' in request.GET: errata = errata.filter(e_type=request.GET['e_type']).distinct() if 'reference_id' in request.GET: - errata = errata.filter(references=int(request.GET['reference_id'])) + errata = errata.filter(references=request.GET['reference_id']) if 'cve_id' in request.GET: errata = errata.filter(cves__cve_id=request.GET['cve_id']) if 'package_id' in request.GET: - errata = errata.filter(packages=int(request.GET['package_id'])) + errata = errata.filter(packages=request.GET['package_id']) if 'search' in request.GET: terms = request.GET['search'].lower() @@ -76,13 +76,13 @@ def erratum_list(request): @login_required def erratumreference_list(request): - erefs = ErratumReference.objects.select_related() + erefs = ErratumReference.objects.select_related().order_by('er_type') if 'er_type' in request.GET: erefs = erefs.filter(er_type=request.GET['er_type']).distinct() if 'erratum_id' in request.GET: - erefs = erefs.filter(erratum__id=int(request.GET['erratum_id'])) + erefs = erefs.filter(erratum__id=request.GET['erratum_id']) if 'search' in request.GET: terms = request.GET['search'].lower() diff --git a/hosts/templates/hosts/host_delete.html b/hosts/templates/hosts/host_delete.html index 28c5ef3b..5f37d8ab 100644 --- a/hosts/templates/hosts/host_delete.html +++ b/hosts/templates/hosts/host_delete.html @@ -13,7 +13,7 @@
    - + diff --git a/hosts/templates/hosts/host_detail.html b/hosts/templates/hosts/host_detail.html index 73871d1f..7a32e913 100644 --- a/hosts/templates/hosts/host_detail.html +++ b/hosts/templates/hosts/host_detail.html @@ -23,7 +23,7 @@
    Hostname {{ host.hostname }}
    Domain {{ host.domain }}
    Domain {{ host.domain }}
    Reporting IP Address {{ host.ipaddress }}
    Reverse DNS {{ host.reversedns }}
    OS Release {{ host.osvariant.osrelease }}
    - + diff --git a/hosts/views.py b/hosts/views.py index 7ff4b66d..d9614eb5 100644 --- a/hosts/views.py +++ b/hosts/views.py @@ -39,26 +39,26 @@ def host_list(request): hosts = Host.objects.select_related() - if 'domain' in request.GET: - hosts = hosts.filter(domain=int(request.GET['domain'])) + if 'domain_id' in request.GET: + hosts = hosts.filter(domain=request.GET['domain_id']) if 'package_id' in request.GET: - hosts = hosts.filter(packages=int(request.GET['package_id'])) + hosts = hosts.filter(packages=request.GET['package_id']) if 'package' in request.GET: hosts = hosts.filter(packages__name__name=request.GET['package']) - if 'repo' in request.GET: - hosts = hosts.filter(repos=int(request.GET['repo'])) + if 'repo_id' in request.GET: + hosts = hosts.filter(repos=request.GET['repo_id']) - if 'arch' in request.GET: - hosts = hosts.filter(arch=int(request.GET['arch'])) + if 'arch_id' in request.GET: + hosts = hosts.filter(arch=request.GET['arch_id']) - if 'osvariant' in request.GET: - hosts = hosts.filter(osvariant=int(request.GET['osvariant'])) + if 'osvariant_id' in request.GET: + hosts = hosts.filter(osvariant=request.GET['osvariant_id']) - if 'osrelease' in request.GET: - hosts = hosts.filter(osvariant__osrelease=int(request.GET['osrelease'])) + if 'osrelease_id' in request.GET: + hosts = hosts.filter(osvariant__osrelease=request.GET['osrelease_id']) if 'tag' in request.GET: hosts = hosts.filter(tags__name__in=[request.GET['tag']]) @@ -92,10 +92,10 @@ def host_list(request): for tag in Tag.objects.all(): tags[tag.name] = tag.name filter_list.append(Filter(request, 'Tag', 'tag', tags)) - filter_list.append(Filter(request, 'Domain', 'domain', Domain.objects.all())) - filter_list.append(Filter(request, 'OS Variant', 'osvariant', OSVariant.objects.filter(host__in=hosts))) - filter_list.append(Filter(request, 'OS Release', 'osrelease', OSRelease.objects.filter(osvariant__host__in=hosts))) - filter_list.append(Filter(request, 'Architecture', 'arch', MachineArchitecture.objects.filter(host__in=hosts))) + filter_list.append(Filter(request, 'Domain', 'domain_id', Domain.objects.all())) + filter_list.append(Filter(request, 'OS Release', 'osrelease_id', OSRelease.objects.filter(osvariant__host__in=hosts))) + filter_list.append(Filter(request, 'OS Variant', 'osvariant_id', OSVariant.objects.filter(host__in=hosts))) + filter_list.append(Filter(request, 'Architecture', 'arch_id', MachineArchitecture.objects.filter(host__in=hosts))) filter_list.append(Filter(request, 'Reboot Required', 'reboot_required', {'true': 'Yes', 'false': 'No'})) filter_bar = FilterBar(request, filter_list) diff --git a/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html b/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html index 33ada936..82bb47ef 100644 --- a/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html +++ b/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html @@ -15,8 +15,8 @@ - - + + {% endfor %} diff --git a/operatingsystems/templates/operatingsystems/operatingsystemvariant_table.html b/operatingsystems/templates/operatingsystems/operatingsystemvariant_table.html index fe60af1b..3ef8403f 100644 --- a/operatingsystems/templates/operatingsystems/operatingsystemvariant_table.html +++ b/operatingsystems/templates/operatingsystems/operatingsystemvariant_table.html @@ -16,7 +16,7 @@ - + diff --git a/operatingsystems/templates/operatingsystems/osrelease_delete.html b/operatingsystems/templates/operatingsystems/osrelease_delete.html index 47b66c9b..358d9269 100644 --- a/operatingsystems/templates/operatingsystems/osrelease_delete.html +++ b/operatingsystems/templates/operatingsystems/osrelease_delete.html @@ -15,9 +15,9 @@ - - - + + +
    Hostname {{ host.hostname }}
    Domain {{ host.domain }}
    Domain {{ host.domain }}
    Reporting IP Address {{ host.ipaddress }}
    Reverse DNS {{ host.reversedns }}
    OS Release {{ host.osvariant.osrelease }}
    {{ osrelease }} {% if osrelease.codename %}{{ osrelease.cpe_name }}{% endif %} {% if osrelease.codename %}{{ osrelease.codename }}{% endif %}{{ osrelease.repos.count }}{{ osrelease.osvariant_set.count }}{{ osrelease.repos.count }}{{ osrelease.osvariant_set.count }}
    {{ osvariant }} {{ osvariant.arch }} {% if osvariant.codename %}{{ osvariant.codename }}{% else %}{% if osvariant.osrelease %}{{ osvariant.osrelease.codename }}{% endif %}{% endif %}{{ osvariant.host_set.count }}{{ osvariant.host_set.count }} {% if osvariant.osrelease %}{{ osvariant.osrelease }}{% endif %} {% if osvariant.osrelease.repos.count != None %}{{ osvariant.osrelease.repos.count }}{% else %}0{% endif %}
    Name{{ osrelease }}
    CPE Name{% if osrelease.cpe_name %}{{ osrelease.cpe_name }}{% endif %}
    Codename{% if osrelease.codename %}{{ osrelease.codename }}{% endif %}
    OS Variants{{ osrelease.osvariant_set.count }}
    Repositories{{ osrelease.repos.count }}
    Hosts{{ host_count }}
    OS Variants{{ osrelease.osvariant_set.count }}
    Repositories{{ osrelease.repos.count }}
    Hosts{{ host_count }}
    diff --git a/operatingsystems/templates/operatingsystems/osrelease_detail.html b/operatingsystems/templates/operatingsystems/osrelease_detail.html index a93446f2..9cfd1123 100644 --- a/operatingsystems/templates/operatingsystems/osrelease_detail.html +++ b/operatingsystems/templates/operatingsystems/osrelease_detail.html @@ -26,9 +26,9 @@
    Name{{ osrelease }}
    CPE Name{% if osrelease.cpe_name %}{{ osrelease.cpe_name }}{% endif %}
    Codename{% if osrelease.codename %}{{ osrelease.codename }}{% endif %}
    OS Variants{{ osrelease.osvariant_set.count }}
    Repositories{{ osrelease.repos.count }}
    Hosts{{ host_count }}
    OS Variants{{ osrelease.osvariant_set.count }}
    Repositories{{ osrelease.repos.count }}
    Hosts{{ host_count }}
    {% if user.is_authenticated and perms.is_admin %} diff --git a/operatingsystems/templates/operatingsystems/osvariant_delete.html b/operatingsystems/templates/operatingsystems/osvariant_delete.html index bfb556d3..7a891989 100644 --- a/operatingsystems/templates/operatingsystems/osvariant_delete.html +++ b/operatingsystems/templates/operatingsystems/osvariant_delete.html @@ -15,7 +15,7 @@ Name {{ osvariant.name }} Architecture {{ osvariant.arch }} Codename {{ osvariant.codename }} - Hosts{{ osvariant.host_set.count }} + Hosts{{ osvariant.host_set.count }} OS Release{% if osvariant.osrelease != None %} {{ osvariant.osrelease }} {% else %}No OS Release{% endif %} diff --git a/operatingsystems/templates/operatingsystems/osvariant_detail.html b/operatingsystems/templates/operatingsystems/osvariant_detail.html index 71034664..008a00b4 100644 --- a/operatingsystems/templates/operatingsystems/osvariant_detail.html +++ b/operatingsystems/templates/operatingsystems/osvariant_detail.html @@ -24,7 +24,7 @@ Name {{ osvariant.name }} Architecture {{ osvariant.arch }} Codename {{ osvariant.codename }} - Hosts{{ osvariant.host_set.count }} + Hosts{{ osvariant.host_set.count }} OS Release{% if osvariant.osrelease != None %} {{ osvariant.osrelease }} {% else %}No OS Release{% endif %} {% if user.is_authenticated and perms.is_admin %} diff --git a/operatingsystems/views.py b/operatingsystems/views.py index 9b9945c5..2b696f92 100644 --- a/operatingsystems/views.py +++ b/operatingsystems/views.py @@ -34,8 +34,8 @@ def osvariant_list(request): osvariants = OSVariant.objects.select_related() - if 'osrelease' in request.GET: - osvariants = osvariants.filter(osrelease=int(request.GET['osrelease'])) + if 'osrelease_id' in request.GET: + osvariants = osvariants.filter(osrelease=request.GET['osrelease_id']) if 'search' in request.GET: terms = request.GET['search'].lower() @@ -136,7 +136,7 @@ def osrelease_list(request): osreleases = OSRelease.objects.select_related() if 'erratum_id' in request.GET: - osreleases = osreleases.filter(erratum=int(request.GET['erratum_id'])) + osreleases = osreleases.filter(erratum=request.GET['erratum_id']) if 'search' in request.GET: terms = request.GET['search'].lower() diff --git a/packages/views.py b/packages/views.py index 66f3720f..fd66e7a6 100644 --- a/packages/views.py +++ b/packages/views.py @@ -32,8 +32,8 @@ def package_list(request): packages = Package.objects.select_related() - if 'arch' in request.GET: - packages = packages.filter(arch=request.GET['arch']).distinct() + if 'arch_id' in request.GET: + packages = packages.filter(arch=request.GET['arch_id']).distinct() if 'packagetype' in request.GET: packages = packages.filter(packagetype=request.GET['packagetype']).distinct() @@ -99,7 +99,7 @@ def package_list(request): filter_list.append(Filter(request, 'Installed on Hosts', 'installed_on_hosts', {'true': 'Yes', 'false': 'No'})) filter_list.append(Filter(request, 'Available in Repos', 'available_in_repos', {'true': 'Yes', 'false': 'No'})) filter_list.append(Filter(request, 'Package Type', 'packagetype', Package.PACKAGE_TYPES)) - filter_list.append(Filter(request, 'Architecture', 'arch', PackageArchitecture.objects.all())) + filter_list.append(Filter(request, 'Architecture', 'arch_id', PackageArchitecture.objects.all())) filter_bar = FilterBar(request, filter_list) return render(request, @@ -114,12 +114,10 @@ def package_name_list(request): packages = PackageName.objects.select_related() if 'arch' in request.GET: - packages = packages.filter( - package__arch=int(request.GET['arch'])).distinct() + packages = packages.filter(package__arch=request.GET['arch']).distinct() if 'packagetype' in request.GET: - packages = packages.filter( - package__packagetype=request.GET['packagetype']).distinct() + packages = packages.filter(package__packagetype=request.GET['packagetype']).distinct() if 'search' in request.GET: terms = request.GET['search'].lower() diff --git a/reports/views.py b/reports/views.py index fe8d6e7b..80e1a654 100644 --- a/reports/views.py +++ b/reports/views.py @@ -81,7 +81,7 @@ def report_list(request): reports = Report.objects.select_related() if 'host_id' in request.GET: - reports = reports.filter(hostname=int(request.GET['host_id'])) + reports = reports.filter(hostname=request.GET['host_id']) if 'processed' in request.GET: processed = request.GET['processed'] == 'true' diff --git a/repos/templates/repos/repo_detail.html b/repos/templates/repos/repo_detail.html index d182a085..59401ab2 100644 --- a/repos/templates/repos/repo_detail.html +++ b/repos/templates/repos/repo_detail.html @@ -29,6 +29,7 @@ Security {% yes_no_img repo.security 'Security' 'Not Security' %} Enabled {% yes_no_img repo.enabled 'Enabled' 'Not Enabled' %} Mirrors {{ repo.mirror_set.count }} + Hosts with this Repository {{ repo.host_set.count }} Requires Authentication {{ repo.auth_required }} {% if user.is_authenticated and perms.is_admin %} diff --git a/repos/views.py b/repos/views.py index 0f575d30..47e80d75 100644 --- a/repos/views.py +++ b/repos/views.py @@ -43,11 +43,11 @@ def repo_list(request): if 'repotype' in request.GET: repos = repos.filter(repotype=request.GET['repotype']) - if 'arch' in request.GET: - repos = repos.filter(arch=request.GET['arch']) + if 'arch_id' in request.GET: + repos = repos.filter(arch=request.GET['arch_id']) - if 'osrelease' in request.GET: - repos = repos.filter(osrelease=int(request.GET['osrelease'])) + if 'osrelease_id' in request.GET: + repos = repos.filter(osrelease=request.GET['osrelease_id']) if 'security' in request.GET: security = request.GET['security'] == 'true' @@ -58,8 +58,7 @@ def repo_list(request): repos = repos.filter(enabled=enabled) if 'package_id' in request.GET: - repos = repos.filter( - mirror__packages=int(request.GET['package_id'])) + repos = repos.filter(mirror__packages=request.GET['package_id']) if 'search' in request.GET: terms = request.GET['search'].lower() @@ -84,11 +83,11 @@ def repo_list(request): page = paginator.page(paginator.num_pages) filter_list = [] - filter_list.append(Filter(request, 'OS Release', 'osrelease', OSRelease.objects.filter(repos__in=repos))) + filter_list.append(Filter(request, 'OS Release', 'osrelease_id', OSRelease.objects.filter(repos__in=repos))) filter_list.append(Filter(request, 'Enabled', 'enabled', {'true': 'Yes', 'false': 'No'})) filter_list.append(Filter(request, 'Security', 'security', {'true': 'Yes', 'false': 'No'})) filter_list.append(Filter(request, 'Repo Type', 'repotype', Repository.REPO_TYPES)) - filter_list.append(Filter(request, 'Architecture', 'arch', + filter_list.append(Filter(request, 'Architecture', 'arch_id', MachineArchitecture.objects.filter(repository__in=repos))) filter_bar = FilterBar(request, filter_list) diff --git a/security/views.py b/security/views.py index 3ab25cc9..eec2bfa9 100644 --- a/security/views.py +++ b/security/views.py @@ -72,13 +72,13 @@ def cve_list(request): cves = CVE.objects.select_related() if 'erratum_id' in request.GET: - cves = cves.filter(erratum=int(request.GET['erratum_id'])) + cves = cves.filter(erratum=request.GET['erratum_id']) if 'reference_id' in request.GET: - cves = cves.filter(references=int(request.GET['reference_id'])) + cves = cves.filter(references=request.GET['reference_id']) if 'package_id' in request.GET: - cves = cves.filter(packages=int(request.GET['package_id'])) + cves = cves.filter(packages=request.GET['package_id']) if 'cwe_id' in request.GET: cves = cves.filter(cwes__cwe_id=request.GET['cwe_id']) From 7deabb287aafe93138b3a1d6a27dbb9725ded728 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Thu, 27 Feb 2025 15:26:44 -0500 Subject: [PATCH 073/199] add space before glyphicon --- security/templates/security/cve_detail.html | 8 ++++---- security/templates/security/cwe_detail.html | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/security/templates/security/cve_detail.html b/security/templates/security/cve_detail.html index 16f5a3f8..f3b1ce0b 100644 --- a/security/templates/security/cve_detail.html +++ b/security/templates/security/cve_detail.html @@ -36,7 +36,7 @@ CWEs {% for cwe in cve.cwes.all %} - {{ cwe.cwe_id }} - {{ cwe.name }}
    + {{ cwe.cwe_id }} - {{ cwe.name }} 
    {% endfor %} @@ -54,9 +54,9 @@ URLs - - - + + + {% for reference in cve.erratum_set.references.all %} diff --git a/security/templates/security/cwe_detail.html b/security/templates/security/cwe_detail.html index 5bab2a0d..c55ed683 100644 --- a/security/templates/security/cwe_detail.html +++ b/security/templates/security/cwe_detail.html @@ -11,7 +11,7 @@
    NIST
    MITRE
    osv.dev
    NIST
    MITRE
    osv.dev
    {{ reference.er_type }}
    - +
    CWE ID{{ cwe.cwe_id }}
    Name{{ cwe.name }}
    Name{{ cwe.name }}
    Description{{ cwe.description }}
    Affected CVEs{{ cwe.cve_set.count }}
    From c168a2b142d424023302e2d81c839893ef27daa4 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Thu, 27 Feb 2025 15:43:03 -0500 Subject: [PATCH 074/199] flake8 --- hosts/views.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/hosts/views.py b/hosts/views.py index d9614eb5..8de79dd7 100644 --- a/hosts/views.py +++ b/hosts/views.py @@ -93,7 +93,8 @@ def host_list(request): tags[tag.name] = tag.name filter_list.append(Filter(request, 'Tag', 'tag', tags)) filter_list.append(Filter(request, 'Domain', 'domain_id', Domain.objects.all())) - filter_list.append(Filter(request, 'OS Release', 'osrelease_id', OSRelease.objects.filter(osvariant__host__in=hosts))) + filter_list.append(Filter(request, 'OS Release', 'osrelease_id', + OSRelease.objects.filter(osvariant__host__in=hosts))) filter_list.append(Filter(request, 'OS Variant', 'osvariant_id', OSVariant.objects.filter(host__in=hosts))) filter_list.append(Filter(request, 'Architecture', 'arch_id', MachineArchitecture.objects.filter(host__in=hosts))) filter_list.append(Filter(request, 'Reboot Required', 'reboot_required', {'true': 'Yes', 'false': 'No'})) From 973105482308bf44c12c6c4a4ba6febea1cf849c Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Thu, 27 Feb 2025 20:51:54 -0500 Subject: [PATCH 075/199] improve package handling and duplicate removal --- packages/utils.py | 72 +++++++++++++++++++++-------------------------- repos/utils.py | 9 ++++-- util/tasks.py | 4 +-- 3 files changed, 40 insertions(+), 45 deletions(-) diff --git a/packages/utils.py b/packages/utils.py index 2b906118..f57345db 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -166,7 +166,6 @@ def get_or_create_package(name, epoch, version, release, arch, p_type): """ Get or create a Package object. Returns the object. Returns None if the package is the pseudo package gpg-pubkey, or if it cannot create it """ - package = None name = name.lower() if name == 'gpg-pubkey': return @@ -174,45 +173,21 @@ def get_or_create_package(name, epoch, version, release, arch, p_type): if epoch in [None, 0, '0']: epoch = '' - try: - with transaction.atomic(): - package_names = PackageName.objects.all() - p_name, c = package_names.get_or_create(name=name) - except IntegrityError as e: - error_message.send(sender=None, text=e) - p_name = package_names.get(name=name) - except DatabaseError as e: - error_message.send(sender=None, text=e) + with transaction.atomic(): + package_name, c = PackageName.objects.get_or_create(name=name) - package_arches = PackageArchitecture.objects.all() with transaction.atomic(): - p_arch, c = package_arches.get_or_create(name=arch) - - packages = Package.objects.all() - potential_packages = packages.filter( - name=p_name, - arch=p_arch, - version=version, - release=release, - packagetype=p_type, - ).order_by('-epoch') - if potential_packages.exists(): - package = potential_packages[0] - if epoch and package.epoch != epoch: - package.epoch = epoch - with transaction.atomic(): - package.save() - else: - try: - with transaction.atomic(): - package = packages.create(name=p_name, - arch=p_arch, - epoch=epoch, - version=version, - release=release, - packagetype=p_type) - except DatabaseError as e: - error_message.send(sender=None, text=e) + package_arch, c = PackageArchitecture.objects.get_or_create(name=arch) + + with transaction.atomic(): + package, c = Package.objects.get_or_create( + name=package_name, + arch=package_arch, + epoch=epoch, + version=version, + release=release, + packagetype=p_type, + ) return package @@ -270,7 +245,7 @@ def get_or_create_package_update(oldpackage, newpackage, security): def get_matching_packages(name, epoch, version, release, p_type): - """ Get packges matching certain criteria + """ Get packages matching certain criteria Returns the matching packages or None """ try: @@ -308,8 +283,9 @@ def clean_packageupdates(): duplicate.delete() -def clean_packages(): +def clean_packages(remove_duplicates=False): """ Remove packages that are no longer in use + Optionally check for duplicate packages and remove those too """ packages = Package.objects.filter( mirror__isnull=True, @@ -323,6 +299,22 @@ def clean_packages(): else: info_message.send(sender=None, text=f'Removing {plen} orphaned Packages') packages.delete() + if remove_duplicates: + info_message.send(sender=None, text='Checking for duplicate Packages...') + for package in Package.objects.all(): + potential_duplicates = Package.objects.filter( + name=package.name, + arch=package.arch, + epoch=package.epoch, + version=package.version, + release=package.release, + packagetype=package.packagetype, + ) + if potential_duplicates.count() > 1: + for dupe in potential_duplicates: + if dupe.id != package.id: + info_message.send(sender=None, text=f'Removing duplicate Package {dupe}') + dupe.delete() def clean_packagenames(): diff --git a/repos/utils.py b/repos/utils.py index 339290c7..5ed97f61 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -99,9 +99,12 @@ def update_mirror_packages(mirror, packages): progress_info_s.send(sender=None, ptext=ptext, plen=nlen) for i, strpackage in enumerate(new): progress_update_s.send(sender=None, index=i + 1) - package = convert_packagestring_to_package(strpackage) - with transaction.atomic(): - mirror_package, c = MirrorPackage.objects.get_or_create(mirror=mirror, package=package) + try: + package = convert_packagestring_to_package(strpackage) + with transaction.atomic(): + mirror_package, c = MirrorPackage.objects.get_or_create(mirror=mirror, package=package) + except Package.MultipleObjectsReturned: + error_message.send(sender=None, text=f'Duplicate package found in {mirror}: {strpackage}') mirror.save() diff --git a/util/tasks.py b/util/tasks.py index 50c849d0..20e55db1 100644 --- a/util/tasks.py +++ b/util/tasks.py @@ -23,12 +23,12 @@ @shared_task -def clean_database(): +def clean_database(remove_duplicate_packages=False): """ Task to check the database and remove orphaned objects Runs all clean_* functions to check database consistency """ clean_packageupdates() - clean_packages() + clean_packages(remove_duplicates=remove_duplicate_packages) clean_packagenames() clean_architectures() clean_repos() From 9ed44d4f1b595bb62cc48337a7395cec4ad16f6a Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Fri, 28 Feb 2025 00:10:15 -0500 Subject: [PATCH 076/199] use thread pool for celery to allow child concurrency --- etc/systemd/patchman-celery.service | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/etc/systemd/patchman-celery.service b/etc/systemd/patchman-celery.service index 2da16299..805a3d19 100644 --- a/etc/systemd/patchman-celery.service +++ b/etc/systemd/patchman-celery.service @@ -8,7 +8,7 @@ Type=simple User=patchman-celery Group=patchman-celery EnvironmentFile=/etc/patchman/celery -ExecStart=/usr/bin/celery --broker redis://${REDIS_HOST}:${REDIS_PORT}/0 --app patchman worker --loglevel info --beat --scheduler django_celery_beat.schedulers:DatabaseScheduler --task-events +ExecStart=/usr/bin/celery --broker redis://${REDIS_HOST}:${REDIS_PORT}/0 --app patchman worker --loglevel info --beat --scheduler django_celery_beat.schedulers:DatabaseScheduler --task-events --pool threads [Install] WantedBy=multi-user.target From e4ea15dcc96ee170fe3f711ff359073207810ea8 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Fri, 28 Feb 2025 00:11:49 -0500 Subject: [PATCH 077/199] add button to find host updates from ui --- hosts/templates/hosts/host_detail.html | 1 + hosts/urls.py | 1 + hosts/views.py | 12 ++++++++++++ 3 files changed, 14 insertions(+) diff --git a/hosts/templates/hosts/host_detail.html b/hosts/templates/hosts/host_detail.html index 7a32e913..ea1267a5 100644 --- a/hosts/templates/hosts/host_detail.html +++ b/hosts/templates/hosts/host_detail.html @@ -58,6 +58,7 @@ {% if user.is_authenticated and perms.is_admin %} {% bootstrap_icon "trash" %} Delete this Host {% bootstrap_icon "edit" %} Edit this Host + {% bootstrap_icon "refresh" %} Find Updates for this Host {% endif %}
    diff --git a/hosts/urls.py b/hosts/urls.py index 94954c48..b1521135 100644 --- a/hosts/urls.py +++ b/hosts/urls.py @@ -26,4 +26,5 @@ path('/', views.host_detail, name='host_detail'), path('/delete/', views.host_delete, name='host_delete'), path('/edit/', views.host_edit, name='host_edit'), + path('/updates/', views.host_find_updates, name='host_updates'), ] diff --git a/hosts/views.py b/hosts/views.py index 8de79dd7..0fc83ffa 100644 --- a/hosts/views.py +++ b/hosts/views.py @@ -167,6 +167,18 @@ def host_delete(request, hostname): 'reports': reports}) +@login_required +def host_find_updates(request, hostname): + """ Find updates using a celery task + """ + from hosts.tasks import find_host_updates + host = get_object_or_404(Host, hostname=hostname) + find_host_updates.delay(host.id) + text = f'Finding updates for Host {host}' + messages.info(request, text) + return redirect(host.get_absolute_url()) + + class HostViewSet(viewsets.ModelViewSet): """ API endpoint that allows hosts to be viewed or edited. From 5d94250ef256a67e61f57575061c1a0cda97f115 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Fri, 28 Feb 2025 00:12:22 -0500 Subject: [PATCH 078/199] imports at top of functions --- reports/views.py | 2 +- repos/views.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/reports/views.py b/reports/views.py index 80e1a654..792123ea 100644 --- a/reports/views.py +++ b/reports/views.py @@ -132,10 +132,10 @@ def report_detail(request, report_id): def report_process(request, report_id): """ Process a report using a celery task """ + from reports.tasks import process_report report = get_object_or_404(Report, id=report_id) report.processed = False report.save() - from reports.tasks import process_report process_report.delay(report.id) text = f'Report {report} is being processed' messages.info(request, text) diff --git a/repos/views.py b/repos/views.py index 47e80d75..e20771de 100644 --- a/repos/views.py +++ b/repos/views.py @@ -377,8 +377,8 @@ def repo_toggle_security(request, repo_id): def repo_refresh(request, repo_id): """ Refresh a repo using a celery task """ - repo = get_object_or_404(Repository, id=repo_id) from repos.tasks import refresh_repo + repo = get_object_or_404(Repository, id=repo_id) refresh_repo.delay(repo.id) text = f'Repostory {repo} is being refreshed' messages.info(request, text) From e5b0b494c9340cda371d0ccd6e49fe017ef1574a Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Fri, 28 Feb 2025 00:13:33 -0500 Subject: [PATCH 079/199] improve mirror list processing --- repos/utils.py | 12 ++++++++---- util/__init__.py | 2 +- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/repos/utils.py b/repos/utils.py index 5ed97f61..d83ab34f 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -166,7 +166,7 @@ def find_mirror_url(stored_mirror_url, formats): try: res = get_url(mirror_url) except RetryError: - return + continue if res is not None and res.ok: return res @@ -265,13 +265,17 @@ def get_mirrorlist_urls(url): except RetryError: return if response_is_valid(res): - if res.headers.get('content-type') == 'text/plain': + try: data = download_url(res, 'Downloading repo info:') if data is None: return mirror_urls = re.findall(r'^http[s]*://.*$|^ftp://.*$', data.decode('utf-8'), re.MULTILINE) if mirror_urls: return mirror_urls + else: + debug_message.send(sender=None, text=f'Not a mirrorlist: {url}') + except Exception as e: + error_message.send(sender=None, text=f'Error attempting to parse a mirror list: {e} {url}') def add_mirrors_from_urls(repo, mirror_urls): @@ -281,6 +285,7 @@ def add_mirrors_from_urls(repo, mirror_urls): for mirror_url in mirror_urls: mirror_url = mirror_url.replace('$ARCH', repo.arch.name) mirror_url = mirror_url.replace('$basearch', repo.arch.name) + mirror_url = mirror_url.rstrip('/') q = Q(mirrorlist=False, refresh=True, enabled=True) existing = repo.mirror_set.filter(q).count() if existing >= max_mirrors: @@ -304,8 +309,7 @@ def check_for_mirrorlists(repo): mirror.mirrorlist = True mirror.last_access_ok = True mirror.save() - text = f'Found mirrorlist - {mirror.url}' - info_message.send(sender=None, text=text) + info_message.send(sender=None, text=f'Found mirrorlist - {mirror.url}') add_mirrors_from_urls(repo, mirror_urls) diff --git a/util/__init__.py b/util/__init__.py index d396088b..e1ed4be7 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -116,7 +116,7 @@ def get_url(url, headers={}, params={}): debug_message.send(sender=None, text=f'Trying {url} headers:{headers} params:{params}') response = requests.get(url, headers=headers, params=params, stream=True, timeout=30) debug_message.send(sender=None, text=f'{response.status_code}: {response.headers}') - if response.status_code == 404: + if response.status_code in [403, 404]: return response response.raise_for_status() except requests.exceptions.TooManyRedirects: From 442441e80b0dc8a2e30b91b98a55da756b44c771 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Fri, 28 Feb 2025 00:14:18 -0500 Subject: [PATCH 080/199] handle amazon linux repos not adding package count --- repos/utils.py | 64 +++++++++++++++++++++++--------------------------- 1 file changed, 29 insertions(+), 35 deletions(-) diff --git a/repos/utils.py b/repos/utils.py index d83ab34f..4363d8d1 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -384,45 +384,39 @@ def extract_yum_packages(data, url): """ extracted = extract(data, url) ns = 'http://linux.duke.edu/metadata/common' - m_context = etree.iterparse(BytesIO(extracted), - tag=f'{{{ns}}}metadata') + m_context = etree.iterparse(BytesIO(extracted), tag=f'{{{ns}}}metadata') plen = int(next(m_context)[1].get('packages')) - p_context = etree.iterparse(BytesIO(extracted), - tag=f'{{{ns}}}package') + p_context = etree.iterparse(BytesIO(extracted), tag=f'{{{ns}}}package') packages = set() - if plen > 0: - ptext = 'Extracting packages: ' - progress_info_s.send(sender=None, ptext=ptext, plen=plen) + ptext = 'Extracting packages: ' + progress_info_s.send(sender=None, ptext=ptext, plen=plen) - for i, p_data in enumerate(p_context): - elem = p_data[1] - progress_update_s.send(sender=None, index=i + 1) - name = elem.xpath('//ns:name', - namespaces={'ns': ns})[0].text.lower() - arch = elem.xpath('//ns:arch', - namespaces={'ns': ns})[0].text - fullversion = elem.xpath('//ns:version', - namespaces={'ns': ns})[0] - epoch = fullversion.get('epoch') - version = fullversion.get('ver') - release = fullversion.get('rel') - elem.clear() - while elem.getprevious() is not None: - del elem.getparent()[0] - - if name != '' and version != '' and arch != '': - if epoch == '0': - epoch = '' - package = PackageString(name=name, - epoch=epoch, - version=version, - release=release, - arch=arch, - packagetype='R') - packages.add(package) - else: - info_message.send(sender=None, text='No packages found in repo') + for i, p_data in enumerate(p_context): + elem = p_data[1] + progress_update_s.send(sender=None, index=i + 1) + name = elem.xpath('//ns:name', namespaces={'ns': ns})[0].text.lower() + arch = elem.xpath('//ns:arch', namespaces={'ns': ns})[0].text + fullversion = elem.xpath('//ns:version', namespaces={'ns': ns})[0] + epoch = fullversion.get('epoch') + version = fullversion.get('ver') + release = fullversion.get('rel') + elem.clear() + while elem.getprevious() is not None: + del elem.getparent()[0] + + if name != '' and version != '' and arch != '': + if epoch == '0': + epoch = '' + package = PackageString( + name=name, + epoch=epoch, + version=version, + release=release, + arch=arch, + packagetype='R', + ) + packages.add(package) return packages From 8a6f74a9b4851cd018dfbed4a0585dc06a2bb27f Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Fri, 28 Feb 2025 00:14:54 -0500 Subject: [PATCH 081/199] use bootstrap template tags consistently --- security/templates/security/cve_detail.html | 10 ++++++---- security/templates/security/cve_table.html | 8 ++++---- security/templates/security/cwe_detail.html | 4 +++- 3 files changed, 13 insertions(+), 9 deletions(-) diff --git a/security/templates/security/cve_detail.html b/security/templates/security/cve_detail.html index f3b1ce0b..52c622a9 100644 --- a/security/templates/security/cve_detail.html +++ b/security/templates/security/cve_detail.html @@ -1,5 +1,7 @@ {% extends "base.html" %} +{% load common bootstrap3 %} + {% block page_title %}CVE - {{ cve }} {% endblock %} {% block breadcrumbs %} {{ block.super }}
  • Security
  • CVEs
  • {{ cve }}
  • {% endblock %} @@ -36,7 +38,7 @@ CWEs {% for cwe in cve.cwes.all %} - {{ cwe.cwe_id }} - {{ cwe.name }} 
    + {{ cwe.cwe_id }} - {{ cwe.name }} {% bootstrap_icon "link" %}
    {% endfor %} @@ -54,9 +56,9 @@ URLs - - - + + + {% for reference in cve.erratum_set.references.all %} diff --git a/security/templates/security/cve_table.html b/security/templates/security/cve_table.html index 493b1b98..63347b1e 100644 --- a/security/templates/security/cve_table.html +++ b/security/templates/security/cve_table.html @@ -1,4 +1,4 @@ -{% load common %} +{% load common bootstrap3 %}
    NIST
    MITRE
    osv.dev
    NIST {% bootstrap_icon "link" %}
    MITRE {% bootstrap_icon "link" %}
    osv.dev {% bootstrap_icon "link" %}
    {{ reference.er_type }}
    @@ -19,9 +19,9 @@ diff --git a/security/templates/security/cwe_detail.html b/security/templates/security/cwe_detail.html index c55ed683..d22587cf 100644 --- a/security/templates/security/cwe_detail.html +++ b/security/templates/security/cwe_detail.html @@ -1,5 +1,7 @@ {% extends "base.html" %} +{% load common bootstrap3 %} + {% block page_title %}CWE - {{ cwe }} {% endblock %} {% block breadcrumbs %} {{ block.super }}
  • Security
  • CWEs
  • {{ cwe }}
  • {% endblock %} @@ -11,7 +13,7 @@
    {{ cve.cve_id }} - NIST    - MITRE    - osv.dev + NIST {% bootstrap_icon "link" %}   + MITRE {% bootstrap_icon "link" %}   + osv.dev {% bootstrap_icon "link" %} {{ cve.description|truncatechars:60 }} {% for score in cve.cvss_scores.all %} {{ score.score }} {% endfor %}
    - +
    CWE ID{{ cwe.cwe_id }}
    Name{{ cwe.name }}
    Name{{ cwe.name }} {% bootstrap_icon "link" %}
    Description{{ cwe.description }}
    Affected CVEs{{ cwe.cve_set.count }}
    From eec921d99ea46f22ef7b749b1a5f53acd69cbdfb Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Fri, 28 Feb 2025 00:22:16 -0500 Subject: [PATCH 082/199] pyflakes --- .../0004_remove_host_tags_host_tags.py | 2 +- sbin/patchman | 4 +-- sbin/patchman-manage | 30 +++++++++---------- 3 files changed, 16 insertions(+), 20 deletions(-) diff --git a/hosts/migrations/0004_remove_host_tags_host_tags.py b/hosts/migrations/0004_remove_host_tags_host_tags.py index 053de91a..84e7affe 100644 --- a/hosts/migrations/0004_remove_host_tags_host_tags.py +++ b/hosts/migrations/0004_remove_host_tags_host_tags.py @@ -4,7 +4,7 @@ from django.db import migrations import taggit.managers try: - import tagging + import tagging # noqa except ImportError: pass diff --git a/sbin/patchman b/sbin/patchman index 64349760..78cbad3b 100755 --- a/sbin/patchman +++ b/sbin/patchman @@ -40,7 +40,7 @@ from repos.utils import clean_repos from reports.models import Report from reports.tasks import clean_reports_with_no_hosts from security.utils import update_cves, update_cwes -from util import set_verbosity, get_verbosity, get_datetime_now +from util import set_verbosity, get_datetime_now from patchman.signals import info_message @@ -324,8 +324,6 @@ def dns_checks(host=None): """ hosts = get_hosts(host, 'Checking rDNS') for host in hosts: - if get_verbosity(): - text = f'{str(host)[0:25].ljust(25)}: ' host.check_rdns() diff --git a/sbin/patchman-manage b/sbin/patchman-manage index 2d2ef09f..19bd04b4 100755 --- a/sbin/patchman-manage +++ b/sbin/patchman-manage @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2019-2021 Marcus Furlong +# Copyright 2019-2025 Marcus Furlong # # This file is part of Patchman. # @@ -19,21 +19,19 @@ import os import sys -if __name__ == '__main__': - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "patchman.settings") + +def main(): + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'patchman.settings') try: from django.core.management import execute_from_command_line - except ImportError: - # The above import may fail for some other reason. Ensure that the - # issue is really that Django is missing to avoid masking other - # exceptions on Python 2. - try: - import django - except ImportError: - raise ImportError( - "Couldn't import Django. Are you sure it's installed and " - "available on your PYTHONPATH environment variable? Did you " - "forget to activate a virtual environment?" - ) - raise + except ImportError as exc: + raise ImportError( + 'Could not import Django. Are you sure it is installed and ' + 'available on your PYTHONPATH environment variable? Did you ' + 'forget to activate a virtual environment?' + ) from exc execute_from_command_line(sys.argv) + + +if __name__ == '__main__': + main() From 388ea4b362d5bc28c350b771b2c433c8cc7e93d2 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Fri, 28 Feb 2025 00:23:03 -0500 Subject: [PATCH 083/199] add migrations for tz-aware DataTimeFields --- hosts/migrations/0006_migrate_to_tz_aware.py | 21 +++++++++++++++++++ .../migrations/0004_migrate_to_tz_aware.py | 18 ++++++++++++++++ repos/migrations/0003_migrate_to_tz_aware.py | 18 ++++++++++++++++ 3 files changed, 57 insertions(+) create mode 100644 hosts/migrations/0006_migrate_to_tz_aware.py create mode 100644 reports/migrations/0004_migrate_to_tz_aware.py create mode 100644 repos/migrations/0003_migrate_to_tz_aware.py diff --git a/hosts/migrations/0006_migrate_to_tz_aware.py b/hosts/migrations/0006_migrate_to_tz_aware.py new file mode 100644 index 00000000..e36bbf1f --- /dev/null +++ b/hosts/migrations/0006_migrate_to_tz_aware.py @@ -0,0 +1,21 @@ +from django.db import migrations +from django.utils import timezone + +def make_datetimes_tz_aware(apps, schema_editor): + Host = apps.get_model('hosts', 'Host') + for host in Host.objects.all(): + if host.lastreport and timezone.is_naive(host.lastreport): + host.lastreport = timezone.make_aware(host.lastreport, timezone=timezone.get_default_timezone()) + host.save() + if host.updated_at and timezone.is_naive(host.updated_at): + host.updated_at = timezone.make_aware(host.updated_at, timezone=timezone.get_default_timezone()) + host.save() + +class Migration(migrations.Migration): + dependencies = [ + ('hosts', '0005_rename_os_host_osvariant'), + ] + + operations = [ + migrations.RunPython(make_datetimes_tz_aware, migrations.RunPython.noop), + ] diff --git a/reports/migrations/0004_migrate_to_tz_aware.py b/reports/migrations/0004_migrate_to_tz_aware.py new file mode 100644 index 00000000..98176510 --- /dev/null +++ b/reports/migrations/0004_migrate_to_tz_aware.py @@ -0,0 +1,18 @@ +from django.db import migrations +from django.utils import timezone + +def make_datetimes_tz_aware(apps, schema_editor): + Report = apps.get_model('reports', 'Report') + for report in Report.objects.all(): + if report.created and timezone.is_naive(report.created): + report.created = timezone.make_aware(report.created, timezone=timezone.get_default_timezone()) + report.save() + +class Migration(migrations.Migration): + dependencies = [ + ('reports', '0003_remove_report_accessed'), + ] + + operations = [ + migrations.RunPython(make_datetimes_tz_aware, migrations.RunPython.noop), + ] diff --git a/repos/migrations/0003_migrate_to_tz_aware.py b/repos/migrations/0003_migrate_to_tz_aware.py new file mode 100644 index 00000000..dddd78ba --- /dev/null +++ b/repos/migrations/0003_migrate_to_tz_aware.py @@ -0,0 +1,18 @@ +from django.db import migrations +from django.utils import timezone + +def make_datetimes_tz_aware(apps, schema_editor): + Mirror = apps.get_model('repos', 'Mirror') + for mirror in Mirror.objects.all(): + if mirror.timestamp and timezone.is_naive(mirror.timestamp): + mirror.timestamp = timezone.make_aware(mirror.timestamp, timezone=timezone.get_default_timezone()) + mirror.save() + +class Migration(migrations.Migration): + dependencies = [ + ('repos', '0002_alter_repository_repotype'), + ] + + operations = [ + migrations.RunPython(make_datetimes_tz_aware, migrations.RunPython.noop), + ] From 4b90b8f2daedd51359fcec7698f0c7d730fadbb4 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 12:49:51 -0500 Subject: [PATCH 084/199] update TODO list --- TODO | 12 ++++++------ repos/models.py | 3 ++- repos/utils.py | 8 ++++---- util/__init__.py | 4 ++-- 4 files changed, 14 insertions(+), 13 deletions(-) diff --git a/TODO b/TODO index 2c3e98bc..74df99ea 100644 --- a/TODO +++ b/TODO @@ -1,9 +1,7 @@ -* allow sending updates from Red Hat / SuSE machines -* web interface support for updating repos, finding updates * add checkrestart-style options to see which services need restarting -* CVE/OVAL apps +* OVAL/OSCAP apps * CA support (tinyca?) -* native python client, using apt/yum/debian libraries +* native python/go client, using apt/yum/debian libraries * record the history of installed packages on a host * also store package descriptions/tags/urls * check for unused repos @@ -11,5 +9,7 @@ * helper script to change paths (e.g. /usr/lib/python3/dist-packages/patchman) * Dockerfile/Dockerimage * compressed reports -* add cronjobs to built packages -* install celery/rabbit/memcache with packages +* add cronjobs to build packages +* dnf5 support +* proxy support +* GLSA support diff --git a/repos/models.py b/repos/models.py index f00db4bd..b82bb94a 100644 --- a/repos/models.py +++ b/repos/models.py @@ -176,10 +176,11 @@ def fail(self): self.fail_count = self.fail_count + 1 if max_mirror_failures == -1: text = f'Mirror has failed {self.fail_count} times, but MAX_MIRROR_FAILURES=-1, not disabling refresh' + error_message.send(sender=None, text=text) elif self.fail_count > max_mirror_failures: self.refresh = False text = f'Mirror has failed {self.fail_count} times (max={max_mirror_failures}), disabling refresh' - error_message.send(sender=None, text=text) + error_message.send(sender=None, text=text) self.last_access_ok = False self.save() diff --git a/repos/utils.py b/repos/utils.py index 4363d8d1..68539740 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -256,9 +256,8 @@ def get_metalink_urls(url): def get_mirrorlist_urls(url): - """ Checks if a given url returns a mirrorlist by checking if it is of - type text/plain and contains a list of urls. Returns a list of - mirrors if it is a mirrorlist. + """ Checks if a given url returns a mirrorlist by checking if it contains + a list of urls. Returns a list of mirrors if it is a mirrorlist. """ try: res = get_url(url) @@ -803,7 +802,7 @@ def refresh_gentoo_repo(repo): refresh_gentoo_overlay_repo(repo) repo_type = 'overlay' ts = get_datetime_now() - for mirror in repo.mirror_set.filter(mirrorlist=False, refresh=True): + for mirror in repo.mirror_set.filter(mirrorlist=False, refresh=True, enabled=True): res = get_url(mirror.url + '.md5sum') data = download_url(res, 'Downloading repo info (1/2):') if data is None: @@ -898,6 +897,7 @@ def refresh_rpm_repo(repo): res = find_mirror_url(mirror.url, formats) if not res: + mirror.fail() continue mirror_url = res.url diff --git a/util/__init__.py b/util/__init__.py index e1ed4be7..4d53e796 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -104,8 +104,8 @@ def download_url(response, text='', ljust=35): @retry( retry=retry_if_exception_type(HTTPError | Timeout | ConnectionError | ConnectionResetError), - stop=stop_after_attempt(5), - wait=wait_exponential(multiplier=1, min=2, max=15), + stop=stop_after_attempt(4), + wait=wait_exponential(multiplier=1, min=1, max=10), reraise=False, ) def get_url(url, headers={}, params={}): From cfcefa1f8a33a7ddbb21ae84c5b4f158bea4dc03 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 12:53:27 -0500 Subject: [PATCH 085/199] update erratum creation function --- errata/utils.py | 36 ++++++++++++++++++++++++++++-------- 1 file changed, 28 insertions(+), 8 deletions(-) diff --git a/errata/utils.py b/errata/utils.py index 94d9fcef..7c9f5f1e 100644 --- a/errata/utils.py +++ b/errata/utils.py @@ -20,19 +20,39 @@ from util import tz_aware_datetime from errata.models import Erratum -from patchman.signals import progress_info_s, progress_update_s +from patchman.signals import progress_info_s, progress_update_s, warning_message def get_or_create_erratum(name, e_type, issue_date, synopsis): """ Get or create an Erratum object. Returns the object and created """ - with transaction.atomic(): - e, created = Erratum.objects.get_or_create( - name=name, - e_type=e_type, - issue_date=tz_aware_datetime(issue_date), - synopsis=synopsis, - ) + try: + e = Erratum.objects.get(name=name) + tz_aware_issue_date = tz_aware_datetime(issue_date) + updated = False + if e.e_type != e_type: + warning_message.send(sender=None, text=f'Updating {name} type `{e.e_type}` -> `{e_type}`') + e.e_type = e_type + updated = True + if e.issue_date != tz_aware_issue_date: + warning_message.send(sender=None, text=f'Updating {name} issue date `{e.issue_date}` -> `{tz_aware_issue_date}`') + e.issue_date = tz_aware_issue_date + updated = True + if e.synopsis != synopsis: + warning_message.send(sender=None, text=f'Updating {name} synopsis `{e.synopsis}` -> `{synopsis}`') + e.synopsis = synopsis + updated = True + if updated: + e.save() + created = False + except Erratum.DoesNotExist: + with transaction.atomic(): + e, created = Erratum.objects.get_or_create( + name=name, + e_type=e_type, + issue_date=tz_aware_datetime(issue_date), + synopsis=synopsis, + ) return e, created From da1438f020900043beb006d8ae7afc8425d2d501 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 12:57:02 -0500 Subject: [PATCH 086/199] update progress bar type --- util/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/util/__init__.py b/util/__init__.py index 4d53e796..9f942f25 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -59,7 +59,7 @@ def create_pbar(ptext, plength, ljust=35, **kwargs): global pbar, verbose if verbose and plength > 0: jtext = str(ptext).ljust(ljust) - pbar = tqdm(total=plength, desc=jtext, position=0, leave=True) + pbar = tqdm(total=plength, desc=jtext, position=0, leave=True, ascii=' >=') return pbar From 002c541919832e223162b4c84576fb7d2b2357f5 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 12:57:24 -0500 Subject: [PATCH 087/199] don't retry ConnectionErrors --- util/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/util/__init__.py b/util/__init__.py index 9f942f25..b0a80b31 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -103,7 +103,7 @@ def download_url(response, text='', ljust=35): @retry( - retry=retry_if_exception_type(HTTPError | Timeout | ConnectionError | ConnectionResetError), + retry=retry_if_exception_type(HTTPError | Timeout | ConnectionResetError), stop=stop_after_attempt(4), wait=wait_exponential(multiplier=1, min=1, max=10), reraise=False, @@ -121,6 +121,8 @@ def get_url(url, headers={}, params={}): response.raise_for_status() except requests.exceptions.TooManyRedirects: error_message.send(sender=None, text=f'Too many redirects - {url}') + except ConnectionError: + error_message.send(sender=None, text=f'Connection error - {url}') return response From 02c1a523d76af46c0a59855f8c152581d062f41b Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 12:57:39 -0500 Subject: [PATCH 088/199] consistent formatting --- util/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/util/__init__.py b/util/__init__.py index b0a80b31..57308bae 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -204,7 +204,7 @@ def extract(data, fmt): m = magic.open(magic.MAGIC_MIME) m.load() mime = m.buffer(data).split(';')[0] - if (mime == 'application/x-xz' or fmt.endswith('xz')): + if mime == 'application/x-xz' or fmt.endswith('xz'): return unxz(data) elif mime == 'application/x-bzip2' or fmt.endswith('bz2'): return bunzip2(data) From 6376a41b5057861cd74943f2083e1933884a2c22 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 12:58:01 -0500 Subject: [PATCH 089/199] add more cases to tz_aware_datetime --- util/__init__.py | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/util/__init__.py b/util/__init__.py index 57308bae..742239da 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -20,11 +20,12 @@ import magic import zlib import lzma -from datetime import datetime +from datetime import datetime, timezone from enum import Enum from hashlib import md5, sha1, sha256, sha512 from requests.exceptions import HTTPError, Timeout, ConnectionError from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential +from time import time from tqdm import tqdm from patchman.signals import error_message, info_message, debug_message @@ -254,16 +255,31 @@ def get_md5(data): return md5(data).hexdigest() +def is_epoch_time(timestamp): + """ Checks if an integer is likely a valid epoch timestamp. + Returns True if the integer is likely a valid epoch timestamp, False otherwise. + """ + try: + ts = int(timestamp) + except ValueError: + return False + current_time = int(time()) + lower_bound = 0 + upper_bound = current_time + 3600 * 24 * 365 # up to a year in the future + return lower_bound <= ts <= upper_bound + + def tz_aware_datetime(date): """ Ensure a datetime is timezone-aware Returns the tz-aware datetime object """ - if isinstance(date, int): - parsed_date = datetime.fromtimestamp(date) + if isinstance(date, int) or is_epoch_time(date): + parsed_date = datetime.fromtimestamp(int(date)) elif isinstance(date, str): parsed_date = parse_datetime(date) else: parsed_date = date + parsed_date = parsed_date.replace(tzinfo=timezone.utc) if not parsed_date.tzinfo: parsed_date = make_aware(parsed_date) return parsed_date From b47d2980913601f8ab9b5da1fb25084acdcea31e Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 13:04:52 -0500 Subject: [PATCH 090/199] add category to package duplicate detection --- packages/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/utils.py b/packages/utils.py index f57345db..a4d37f51 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -309,6 +309,7 @@ def clean_packages(remove_duplicates=False): version=package.version, release=package.release, packagetype=package.packagetype, + category=package.category, ) if potential_duplicates.count() > 1: for dupe in potential_duplicates: From 629bdb6fe84732d43f4c3f518e997f1327bad6ca Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 15:06:37 -0500 Subject: [PATCH 091/199] update progressbars --- errata/sources/distros/alma.py | 6 ++---- errata/sources/distros/arch.py | 6 ++---- errata/sources/distros/centos.py | 3 +-- errata/sources/distros/debian.py | 6 ++---- errata/sources/distros/rocky.py | 6 ++---- errata/sources/distros/ubuntu.py | 6 ++---- errata/utils.py | 3 +-- reports/utils.py | 15 ++++----------- repos/utils.py | 32 +++++++++++++++----------------- 9 files changed, 31 insertions(+), 52 deletions(-) diff --git a/errata/sources/distros/alma.py b/errata/sources/distros/alma.py index 629fa6a0..22e61c9e 100644 --- a/errata/sources/distros/alma.py +++ b/errata/sources/distros/alma.py @@ -66,8 +66,7 @@ def process_alma_errata_serially(release, advisories): """ Process Alma Linux Errata serially """ elen = len(advisories) - ptext = f'Processing {elen} Alma Errata:' - progress_info_s.send(sender=None, ptext=ptext, plen=elen) + progress_info_s.send(sender=None, ptext=f'Processing {elen} Alma Errata', plen=elen) for i, advisory in enumerate(advisories): process_alma_erratum(release, advisory) progress_update_s.send(sender=None, index=i + 1) @@ -77,8 +76,7 @@ def process_alma_errata_concurrently(release, advisories): """ Process Alma Linux Errata concurrently """ elen = len(advisories) - ptext = f'Processing {elen} Alma Errata:' - progress_info_s.send(sender=None, ptext=ptext, plen=elen) + progress_info_s.send(sender=None, ptext=f'Processing {elen} Alma Errata', plen=elen) i = 0 with concurrent.futures.ProcessPoolExecutor(max_workers=10) as executor: futures = [executor.submit(process_alma_erratum, release, advisory) for advisory in advisories] diff --git a/errata/sources/distros/arch.py b/errata/sources/distros/arch.py index a3e296d4..0e474159 100644 --- a/errata/sources/distros/arch.py +++ b/errata/sources/distros/arch.py @@ -56,8 +56,7 @@ def parse_arch_errata_serially(advisories): """ osrelease = OSRelease.objects.get(name='Arch Linux') elen = len(advisories) - ptext = f'Processing {elen} Arch Advisories:' - progress_info_s.send(sender=None, ptext=ptext, plen=elen) + progress_info_s.send(sender=None, ptext=f'Processing {elen} Arch Advisories', plen=elen) for i, advisory in enumerate(advisories): process_arch_erratum(advisory, osrelease) progress_update_s.send(sender=None, index=i + 1) @@ -68,8 +67,7 @@ def parse_arch_errata_concurrently(advisories): """ osrelease = OSRelease.objects.get(name='Arch Linux') elen = len(advisories) - ptext = f'Processing {elen} Arch Advisories:' - progress_info_s.send(sender=None, ptext=ptext, plen=elen) + progress_info_s.send(sender=None, ptext=f'Processing {elen} Arch Advisories', plen=elen) i = 0 with concurrent.futures.ProcessPoolExecutor(max_workers=3) as executor: futures = [executor.submit(process_arch_erratum, advisory, osrelease) for advisory in advisories] diff --git a/errata/sources/distros/centos.py b/errata/sources/distros/centos.py index aca5a48c..83655c03 100644 --- a/errata/sources/distros/centos.py +++ b/errata/sources/distros/centos.py @@ -69,8 +69,7 @@ def parse_centos_errata(data): result = etree.XML(data) errata_xml = result.findall('*') elen = len(errata_xml) - ptext = f'Processing {elen} Errata:' - progress_info_s.send(sender=None, ptext=ptext, plen=elen) + progress_info_s.send(sender=None, ptext=f'Processing {elen} CentOS Errata', plen=elen) for i, child in enumerate(errata_xml): progress_update_s.send(sender=None, index=i + 1) releases = get_centos_erratum_releases(child.findall('os_release')) diff --git a/errata/sources/distros/debian.py b/errata/sources/distros/debian.py index fa1f7610..d9183917 100644 --- a/errata/sources/distros/debian.py +++ b/errata/sources/distros/debian.py @@ -131,8 +131,7 @@ def create_debian_errata_serially(errata, accepted_codenames): """ Create Debian Errata Serially """ elen = len(errata) - text = f'Processing {elen} Debian Errata:' - progress_info_s.send(sender=None, ptext=text, plen=elen) + progress_info_s.send(sender=None, ptext=f'Processing {elen} Debian Errata', plen=elen) for i, erratum in enumerate(errata): process_debian_erratum(erratum, accepted_codenames) progress_update_s.send(sender=None, index=i + 1) @@ -142,8 +141,7 @@ def create_debian_errata_concurrently(errata, accepted_codenames): """ Create Debian Errata concurrently """ elen = len(errata) - text = f'Processing {elen} Debian Errata:' - progress_info_s.send(sender=None, ptext=text, plen=elen) + progress_info_s.send(sender=None, ptext=f'Processing {elen} Debian Errata', plen=elen) i = 0 with concurrent.futures.ProcessPoolExecutor(max_workers=200) as executor: futures = [executor.submit(process_debian_erratum, erratum, accepted_codenames) for erratum in errata] diff --git a/errata/sources/distros/rocky.py b/errata/sources/distros/rocky.py index 1baa37fb..4ef5e246 100644 --- a/errata/sources/distros/rocky.py +++ b/errata/sources/distros/rocky.py @@ -149,8 +149,7 @@ def process_rocky_errata_serially(advisories): """ Process Rocky Linux errata serially """ elen = len(advisories) - ptext = f'Processing {elen} Rocky Errata:' - progress_info_s.send(sender=None, ptext=ptext, plen=elen) + progress_info_s.send(sender=None, ptext=f'Processing {elen} Rocky Errata', plen=elen) for i, advisory in enumerate(advisories): process_rocky_erratum(advisory) progress_update_s.send(sender=None, index=i + 1) @@ -160,8 +159,7 @@ def process_rocky_errata_concurrently(advisories): """ Process Rocky Linux errata concurrently """ elen = len(advisories) - ptext = f'Processing {elen} Rocky Errata:' - progress_info_s.send(sender=None, ptext=ptext, plen=elen) + progress_info_s.send(sender=None, ptext=f'Processing {elen} Rocky Errata', plen=elen) i = 0 with concurrent.futures.ProcessPoolExecutor(max_workers=100) as executor: futures = [executor.submit(process_rocky_erratum, advisory) for advisory in advisories] diff --git a/errata/sources/distros/ubuntu.py b/errata/sources/distros/ubuntu.py index a6991693..8bb2ed50 100644 --- a/errata/sources/distros/ubuntu.py +++ b/errata/sources/distros/ubuntu.py @@ -77,8 +77,7 @@ def parse_usn_data_serially(advisories, accepted_releases): """ Parse the Ubuntu USN data serially """ elen = len(advisories) - ptext = f'Processing {elen} Ubuntu Errata:' - progress_info_s.send(sender=None, ptext=ptext, plen=elen) + progress_info_s.send(sender=None, ptext=f'Processing {elen} Ubuntu Errata', plen=elen) for i, (usn_id, advisory) in enumerate(advisories.items()): process_usn(usn_id, advisory, accepted_releases) progress_update_s.send(sender=None, index=i + 1) @@ -88,8 +87,7 @@ def parse_usn_data_concurrently(advisories, accepted_releases): """ Parse the Ubuntu USN data concurrently """ elen = len(advisories) - ptext = f'Processing {elen} Ubuntu Errata:' - progress_info_s.send(sender=None, ptext=ptext, plen=elen) + progress_info_s.send(sender=None, ptext=f'Processing {elen} Ubuntu Errata', plen=elen) i = 0 with concurrent.futures.ProcessPoolExecutor(max_workers=10) as executor: futures = [executor.submit(process_usn, usn_id, advisory, accepted_releases) diff --git a/errata/utils.py b/errata/utils.py index 7c9f5f1e..7d16aa76 100644 --- a/errata/utils.py +++ b/errata/utils.py @@ -119,8 +119,7 @@ def mark_errata_security_updates(): should be marked as a security update. """ elen = Erratum.objects.count() - ptext = f'Scanning {elen} Errata:' - progress_info_s.send(sender=None, ptext=ptext, plen=elen) + progress_info_s.send(sender=None, ptext=f'Scanning {elen} Errata', plen=elen) for i, e in enumerate(Erratum.objects.all()): progress_update_s.send(sender=None, index=i + 1) e.scan_for_security_updates() diff --git a/reports/utils.py b/reports/utils.py index 5c65d58a..8b82c5fe 100644 --- a/reports/utils.py +++ b/reports/utils.py @@ -37,9 +37,7 @@ def process_repos(report, host): host_repos = HostRepo.objects.filter(host=host) repos = parse_repos(report.repos) - progress_info_s.send(sender=None, - ptext=f'{str(host)[0:25]} repos', - plen=len(repos)) + progress_info_s.send(sender=None, ptext=f'{host} Repos', plen=len(repos)) for i, repo_str in enumerate(repos): repo, priority = process_repo(repo_str, report.arch) if repo: @@ -72,9 +70,7 @@ def process_modules(report, host): module_ids = [] modules = parse_modules(report.modules) - progress_info_s.send(sender=None, - ptext=f'{str(host)[0:25]} modules', - plen=len(modules)) + progress_info_s.send(sender=None, ptext=f'{host} Modules', plen=len(modules)) for i, module_str in enumerate(modules): module = process_module(module_str) if module: @@ -100,9 +96,7 @@ def process_packages(report, host): package_ids = [] packages = parse_packages(report.packages) - progress_info_s.send(sender=None, - ptext=f'{str(host)[0:25]} packages', - plen=len(packages)) + progress_info_s.send(sender=None, ptext=f'{host} Packages', plen=len(packages)) for i, pkg_str in enumerate(packages): package = process_package(pkg_str, report.protocol) if package: @@ -156,8 +150,7 @@ def add_updates(updates, host): host.updates.remove(host_update) ulen = len(updates) if ulen > 0: - ptext = f'{str(host)[0:25]} updates' - progress_info_s.send(sender=None, ptext=ptext, plen=ulen) + progress_info_s.send(sender=None, ptext=f'{host} Updates', plen=ulen) for i, (u, sec) in enumerate(updates.items()): update = process_update(host, u, sec) diff --git a/repos/utils.py b/repos/utils.py index 68539740..f20acfb1 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -39,8 +39,8 @@ convert_package_to_packagestring, convert_packagestring_to_package from util import get_url, download_url, response_is_valid, extract, get_checksum, Checksum, get_setting_of_type, \ get_datetime_now -from patchman.signals import progress_info_s, progress_update_s, \ - info_message, warning_message, error_message, debug_message +from patchman.signals import info_message, warning_message, error_message, debug_message, \ + progress_info_s, progress_update_s def get_or_create_repo(r_name, r_arch, r_type, r_id=None): @@ -76,9 +76,8 @@ def update_mirror_packages(mirror, packages): old = set() mirror_packages = mirror.packages.all() - mlen = mirror_packages.count() - ptext = 'Fetching existing packages:' - progress_info_s.send(sender=None, ptext=ptext, plen=mlen) + plen = mirror_packages.count() + progress_info_s.send(sender=None, ptext=f'Fetching {plen} existing Packages', plen=plen) for i, package in enumerate(mirror_packages): progress_update_s.send(sender=None, index=i + 1) strpackage = convert_package_to_packagestring(package) @@ -86,8 +85,7 @@ def update_mirror_packages(mirror, packages): removals = old.difference(packages) rlen = len(removals) - ptext = f'Removing {rlen} obsolete packages:' - progress_info_s.send(sender=None, ptext=ptext, plen=rlen) + progress_info_s.send(sender=None, ptext=f'Removing {rlen} obsolete Packages', plen=rlen) for i, strpackage in enumerate(removals): progress_update_s.send(sender=None, index=i + 1) package = convert_packagestring_to_package(strpackage) @@ -95,8 +93,7 @@ def update_mirror_packages(mirror, packages): new = packages.difference(old) nlen = len(new) - ptext = f'Adding {nlen} new packages:' - progress_info_s.send(sender=None, ptext=ptext, plen=nlen) + progress_info_s.send(sender=None, ptext=f'Adding {nlen} new Packages', plen=nlen) for i, strpackage in enumerate(new): progress_update_s.send(sender=None, index=i + 1) try: @@ -338,8 +335,12 @@ def extract_module_metadata(data, url, repo): try: modules_yaml = yaml.safe_load_all(extracted) except yaml.YAMLError as e: - error_message.send(sender=None, text=e) - for doc in modules_yaml: + error_message.send(sender=None, text=f'Error parsing modules.yaml: {e}') + + mlen = len(re.findall(r'---', yaml.dump(extracted.decode()))) + progress_info_s.send(sender=None, ptext=f'Extracting {mlen} Modules ', plen=mlen) + for i, doc in enumerate(modules_yaml): + progress_update_s.send(sender=None, index=i + 1) document = doc['document'] modulemd = doc['data'] if document == 'modulemd': @@ -432,8 +433,7 @@ def extract_deb_packages(data, url): packages = set() if plen > 0: - ptext = 'Extracting packages: ' - progress_info_s.send(sender=None, ptext=ptext, plen=plen) + progress_info_s.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) for i, stanza in enumerate(Packages.iter_paragraphs(extracted)): # https://github.com/furlongm/patchman/issues/55 if 'version' not in stanza: @@ -470,8 +470,7 @@ def extract_yast_packages(data): packages = set() if plen > 0: - ptext = 'Extracting packages: ' - progress_info_s.send(sender=None, ptext=ptext, plen=plen) + progress_info_s.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) for i, pkg in enumerate(pkgs): progress_update_s.send(sender=None, index=i + 1) @@ -497,8 +496,7 @@ def extract_arch_packages(data): packages = set() plen = len(tf.getnames()) if plen > 0: - ptext = 'Extracting packages: ' - progress_info_s.send(sender=None, ptext=ptext, plen=plen) + progress_info_s.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) for i, tarinfo in enumerate(tf): progress_update_s.send(sender=None, index=i + 1) if tarinfo.isfile(): From 9f8c18ed555d2faf364dcb8d3ac2ef15201baad0 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 15:06:59 -0500 Subject: [PATCH 092/199] update erratum creation --- errata/utils.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/errata/utils.py b/errata/utils.py index 7d16aa76..16e9d1bd 100644 --- a/errata/utils.py +++ b/errata/utils.py @@ -28,15 +28,15 @@ def get_or_create_erratum(name, e_type, issue_date, synopsis): """ try: e = Erratum.objects.get(name=name) - tz_aware_issue_date = tz_aware_datetime(issue_date) + issue_date_tz = tz_aware_datetime(issue_date) updated = False if e.e_type != e_type: warning_message.send(sender=None, text=f'Updating {name} type `{e.e_type}` -> `{e_type}`') e.e_type = e_type updated = True - if e.issue_date != tz_aware_issue_date: - warning_message.send(sender=None, text=f'Updating {name} issue date `{e.issue_date}` -> `{tz_aware_issue_date}`') - e.issue_date = tz_aware_issue_date + if e.issue_date != issue_date_tz: + warning_message.send(sender=None, text=f'Updating {name} issue date `{e.issue_date}` -> `{issue_date_tz}`') + e.issue_date = issue_date_tz updated = True if e.synopsis != synopsis: warning_message.send(sender=None, text=f'Updating {name} synopsis `{e.synopsis}` -> `{synopsis}`') From f28dc8d18bceb8cf1cd75f77e0e41c51d6531e6c Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 15:07:27 -0500 Subject: [PATCH 093/199] add task to update rpm errata --- errata/tasks.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/errata/tasks.py b/errata/tasks.py index b90d2297..e1b686c9 100644 --- a/errata/tasks.py +++ b/errata/tasks.py @@ -16,14 +16,20 @@ from celery import shared_task -from security.tasks import update_cves, update_cwes -from util import get_setting_of_type from errata.sources.distros.arch import update_arch_errata from errata.sources.distros.alma import update_alma_errata from errata.sources.distros.debian import update_debian_errata from errata.sources.distros.centos import update_centos_errata from errata.sources.distros.rocky import update_rocky_errata from errata.sources.distros.ubuntu import update_ubuntu_errata +from repos.models import Repository +from security.tasks import update_cves, update_cwes +from util import get_setting_of_type + + +def update_rpm_repo_errata(): + for repo in Repository.objects.filter(repotype=Repository.RPM): + repo.refresh_errata() @shared_task @@ -56,6 +62,7 @@ def update_errata(): pass if 'centos' in errata_os_updates: update_centos_errata() + update_rpm_repo_errata() @shared_task From 3108df3630267955fe6c07e69956c5919bdbbc27 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 15:12:32 -0500 Subject: [PATCH 094/199] formatting updates --- repos/forms.py | 3 +-- repos/models.py | 9 +------- repos/serializers.py | 8 +++---- repos/utils.py | 53 +++++++++++++++++++------------------------- util/__init__.py | 2 +- 5 files changed, 29 insertions(+), 46 deletions(-) diff --git a/repos/forms.py b/repos/forms.py index 7ca97206..84f75363 100644 --- a/repos/forms.py +++ b/repos/forms.py @@ -15,8 +15,7 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -from django.forms import ModelForm, ModelMultipleChoiceField, TextInput, \ - Form, ModelChoiceField, ValidationError +from django.forms import ModelForm, ModelMultipleChoiceField, TextInput, Form, ModelChoiceField, ValidationError from django.contrib.admin.widgets import FilteredSelectMultiple from repos.models import Repository, Mirror diff --git a/repos/models.py b/repos/models.py index b82bb94a..68d82723 100644 --- a/repos/models.py +++ b/repos/models.py @@ -133,9 +133,7 @@ class Mirror(models.Model): last_access_ok = models.BooleanField(default=False) file_checksum = models.CharField(max_length=255, blank=True, null=True) timestamp = models.DateTimeField(auto_now_add=True) - packages = models.ManyToManyField(Package, - blank=True, - through='MirrorPackage') + packages = models.ManyToManyField(Package, blank=True, through='MirrorPackage') mirrorlist = models.BooleanField(default=False) enabled = models.BooleanField(default=True) refresh = models.BooleanField(default=True) @@ -184,11 +182,6 @@ def fail(self): self.last_access_ok = False self.save() - def update_packages(self, packages): - """ Update the packages associated with a mirror - """ - update_mirror_packages(self, packages) - class MirrorPackage(models.Model): mirror = models.ForeignKey(Mirror, on_delete=models.CASCADE) diff --git a/repos/serializers.py b/repos/serializers.py index 548048f2..cbf74e19 100644 --- a/repos/serializers.py +++ b/repos/serializers.py @@ -22,16 +22,14 @@ class RepositorySerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Repository - fields = ('id', 'name', 'arch', 'security', 'repotype', 'enabled', - 'auth_required') + fields = ('id', 'name', 'arch', 'security', 'repotype', 'enabled', 'auth_required') class MirrorSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Mirror - fields = ('id', 'repo', 'url', 'last_access_ok', 'file_checksum', - 'timestamp', 'mirrorlist', 'enabled', 'refresh', - 'fail_count') + fields = ('id', 'repo', 'url', 'last_access_ok', 'packages_checksum', + 'timestamp', 'mirrorlist', 'enabled', 'refresh', 'fail_count') class MirrorPackageSerializer(serializers.HyperlinkedModelSerializer): diff --git a/repos/utils.py b/repos/utils.py index f20acfb1..1bcf7382 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -159,7 +159,7 @@ def find_mirror_url(stored_mirror_url, formats): if mirror_url.endswith(f): mirror_url = mirror_url[:-len(f)] mirror_url = mirror_url.rstrip('/') + '/' + fmt - debug_message.send(sender=None, text=f'Checking for mirror at {mirror_url}') + debug_message.send(sender=None, text=f'Checking for Mirror at {mirror_url}') try: res = get_url(mirror_url) except RetryError: @@ -262,16 +262,17 @@ def get_mirrorlist_urls(url): return if response_is_valid(res): try: - data = download_url(res, 'Downloading repo info:') + data = download_url(res, 'Downloading Repo data') if data is None: return mirror_urls = re.findall(r'^http[s]*://.*$|^ftp://.*$', data.decode('utf-8'), re.MULTILINE) if mirror_urls: + return mirror_urls else: debug_message.send(sender=None, text=f'Not a mirrorlist: {url}') except Exception as e: - error_message.send(sender=None, text=f'Error attempting to parse a mirror list: {e} {url}') + error_message.send(sender=None, text=f'Error attempting to parse a mirrorlist: {e} {url}') def add_mirrors_from_urls(repo, mirror_urls): @@ -285,13 +286,13 @@ def add_mirrors_from_urls(repo, mirror_urls): q = Q(mirrorlist=False, refresh=True, enabled=True) existing = repo.mirror_set.filter(q).count() if existing >= max_mirrors: - text = f'{existing} mirrors already exist (max={max_mirrors}), not adding any more' + text = f'{existing} Mirrors already exist (max={max_mirrors}), not adding any more' warning_message.send(sender=None, text=text) break from repos.models import Mirror m, c = Mirror.objects.get_or_create(repo=repo, url=mirror_url) if c: - text = f'Added mirror - {mirror_url}' + text = f'Added Mirror - {mirror_url}' info_message.send(sender=None, text=text) @@ -366,17 +367,11 @@ def extract_module_metadata(data, url, repo): package_ids = [] for package in packages: package_ids.append(package.id) - try: - with transaction.atomic(): - module.packages.add(package) - except IntegrityError as e: - error_message.send(sender=None, text=e) - except DatabaseError as e: - error_message.send(sender=None, text=e) - modules.add(module) + module.packages.add(package) for package in module.packages.all(): if package.id not in package_ids: module.packages.remove(package) + modules.add(module) def extract_yum_packages(data, url): @@ -644,7 +639,7 @@ def refresh_arch_repo(repo): enabled_mirrors = repo.mirror_set.filter(refresh=True, enabled=True) for i, mirror in enumerate(enabled_mirrors): if i >= max_mirrors: - text = f'{max_mirrors} mirrors already refreshed (max={max_mirrors}), skipping further refreshes' + text = f'{max_mirrors} Mirrors already refreshed (max={max_mirrors}), skipping further refreshes' warning_message.send(sender=None, text=text) break @@ -652,13 +647,13 @@ def refresh_arch_repo(repo): if not res: continue mirror_url = res.url - text = f'Found arch repo - {mirror_url}' + text = f'Found Arch Repo - {mirror_url}' info_message.send(sender=None, text=text) package_data = fetch_mirror_data( mirror=mirror, url=mirror_url, - text='Downloading repo info:') + text='Downloading Repo data') if not package_data: continue @@ -761,12 +756,10 @@ def extract_gentoo_packages(mirror, data): def extract_gentoo_overlay_packages(mirror): - from packages.utils import find_evr t = tempfile.mkdtemp() git.Repo.clone_from(mirror.url, t, branch='master', depth=1) packages = set() - with transaction.atomic(): - arch, c = PackageArchitecture.objects.get_or_create(name='any') + arch, c = PackageArchitecture.objects.get_or_create(name='any') for root, dirs, files in os.walk(t): for name in files: if fnmatch(name, '*.ebuild'): @@ -802,7 +795,7 @@ def refresh_gentoo_repo(repo): ts = get_datetime_now() for mirror in repo.mirror_set.filter(mirrorlist=False, refresh=True, enabled=True): res = get_url(mirror.url + '.md5sum') - data = download_url(res, 'Downloading repo info (1/2):') + data = download_url(res, 'Downloading Repo checksum') if data is None: mirror.fail() continue @@ -817,12 +810,12 @@ def refresh_gentoo_repo(repo): res = get_url(mirror.url) mirror.last_access_ok = response_is_valid(res) if mirror.last_access_ok: - data = download_url(res, 'Downloading repo info (2/2):') + data = download_url(res, 'Downloading Repo data') if data is None: mirror.fail() continue extracted = extract(data, mirror.url) - text = f'Found gentoo repo - {mirror.url}' + text = f'Found Gentoo Repo - {mirror.url}' info_message.send(sender=None, text=text) computed_checksum = get_checksum(data, Checksum.md5) if not mirror_checksum_is_valid(computed_checksum, checksum, mirror, 'package'): @@ -851,7 +844,7 @@ def refresh_yast_repo(mirror, data): package_data = fetch_mirror_data( mirror=mirror, url=package_url, - text='Downloading yast repo info:') + text='Downloading yast Repo data') if not package_data: return @@ -889,7 +882,7 @@ def refresh_rpm_repo(repo): enabled_mirrors = repo.mirror_set.filter(mirrorlist=False, refresh=True, enabled=True) for i, mirror in enumerate(enabled_mirrors): if i >= max_mirrors: - text = f'{max_mirrors} mirrors already refreshed (max={max_mirrors}), skipping further refreshes' + text = f'{max_mirrors} Mirrors already refreshed (max={max_mirrors}), skipping further refreshes' warning_message.send(sender=None, text=text) break @@ -902,16 +895,16 @@ def refresh_rpm_repo(repo): repo_data = fetch_mirror_data( mirror=mirror, url=mirror_url, - text='Downloading repo info:') + text='Downloading Repo data') if not repo_data: continue if mirror_url.endswith('content'): - text = f'Found yast rpm repo - {mirror_url}' + text = f'Found yast rpm Repo - {mirror_url}' info_message.send(sender=None, text=text) refresh_yast_repo(mirror, repo_data) else: - text = f'Found yum rpm repo - {mirror_url}' + text = f'Found yum rpm Repo - {mirror_url}' info_message.send(sender=None, text=text) refresh_yum_repo(mirror, repo_data, mirror_url, ts) mirror.timestamp = ts @@ -933,13 +926,13 @@ def refresh_deb_repo(repo): if not res: continue mirror_url = res.url - text = f'Found deb repo - {mirror_url}' + text = f'Found deb Repo - {mirror_url}' info_message.send(sender=None, text=text) package_data = fetch_mirror_data( mirror=mirror, url=mirror_url, - text='Downloading repo info:') + text='Downloading Repo data') if not package_data: continue @@ -987,7 +980,7 @@ def get_max_mirrors(): max_mirrors = get_setting_of_type( setting_name='MAX_MIRRORS', setting_type=int, - default=5, + default=3, ) return max_mirrors diff --git a/util/__init__.py b/util/__init__.py index 742239da..01b0cd96 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -20,7 +20,7 @@ import magic import zlib import lzma -from datetime import datetime, timezone +from datetime import timezone from enum import Enum from hashlib import md5, sha1, sha256, sha512 from requests.exceptions import HTTPError, Timeout, ConnectionError From 0528af12ac1430943717489264daa2f222ab3d09 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 15:16:21 -0500 Subject: [PATCH 095/199] rename checksum and add module/errata checksums --- repos/forms.py | 4 +-- ...e_file_checksum_mirror_package_checksum.py | 18 +++++++++++++ ...ckage_checksum_mirror_packages_checksum.py | 18 +++++++++++++ ...errata_checksum_mirror_modules_checksum.py | 23 ++++++++++++++++ repos/models.py | 8 +++--- repos/templates/repos/mirror_delete.html | 2 +- repos/templates/repos/mirror_detail.html | 2 +- repos/templates/repos/mirror_edit_repo.html | 2 +- repos/templates/repos/mirror_table.html | 2 +- repos/utils.py | 27 ++++++++++--------- repos/views.py | 4 +-- util/views.py | 6 ++--- 12 files changed, 89 insertions(+), 27 deletions(-) create mode 100644 repos/migrations/0004_rename_file_checksum_mirror_package_checksum.py create mode 100644 repos/migrations/0005_rename_package_checksum_mirror_packages_checksum.py create mode 100644 repos/migrations/0006_mirror_errata_checksum_mirror_modules_checksum.py diff --git a/repos/forms.py b/repos/forms.py index 84f75363..0800a5c3 100644 --- a/repos/forms.py +++ b/repos/forms.py @@ -90,9 +90,9 @@ class Media: def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['url'].widget = TextInput(attrs={'size': 150},) - self.fields['file_checksum'].widget = TextInput(attrs={'size': 100},) + self.fields['packages_checksum'].widget = TextInput(attrs={'size': 100},) class Meta: model = Mirror fields = ('repo', 'url', 'enabled', 'refresh', 'mirrorlist', - 'last_access_ok', 'fail_count', 'file_checksum') + 'last_access_ok', 'fail_count', 'packages_checksum') diff --git a/repos/migrations/0004_rename_file_checksum_mirror_package_checksum.py b/repos/migrations/0004_rename_file_checksum_mirror_package_checksum.py new file mode 100644 index 00000000..3a5c0d77 --- /dev/null +++ b/repos/migrations/0004_rename_file_checksum_mirror_package_checksum.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.19 on 2025-03-01 15:50 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('repos', '0003_migrate_to_tz_aware'), + ] + + operations = [ + migrations.RenameField( + model_name='mirror', + old_name='file_checksum', + new_name='package_checksum', + ), + ] diff --git a/repos/migrations/0005_rename_package_checksum_mirror_packages_checksum.py b/repos/migrations/0005_rename_package_checksum_mirror_packages_checksum.py new file mode 100644 index 00000000..8b25a20f --- /dev/null +++ b/repos/migrations/0005_rename_package_checksum_mirror_packages_checksum.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.19 on 2025-03-01 15:54 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('repos', '0004_rename_file_checksum_mirror_package_checksum'), + ] + + operations = [ + migrations.RenameField( + model_name='mirror', + old_name='package_checksum', + new_name='packages_checksum', + ), + ] diff --git a/repos/migrations/0006_mirror_errata_checksum_mirror_modules_checksum.py b/repos/migrations/0006_mirror_errata_checksum_mirror_modules_checksum.py new file mode 100644 index 00000000..67f13c36 --- /dev/null +++ b/repos/migrations/0006_mirror_errata_checksum_mirror_modules_checksum.py @@ -0,0 +1,23 @@ +# Generated by Django 4.2.19 on 2025-03-01 15:55 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('repos', '0005_rename_package_checksum_mirror_packages_checksum'), + ] + + operations = [ + migrations.AddField( + model_name='mirror', + name='errata_checksum', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AddField( + model_name='mirror', + name='modules_checksum', + field=models.CharField(blank=True, max_length=255, null=True), + ), + ] diff --git a/repos/models.py b/repos/models.py index 68d82723..5a1480ae 100644 --- a/repos/models.py +++ b/repos/models.py @@ -82,7 +82,7 @@ def refresh(self, force=False): if force: for mirror in self.mirror_set.all(): - mirror.file_checksum = None + mirror.packages_checksum = None mirror.save() if not self.auth_required: @@ -131,7 +131,9 @@ class Mirror(models.Model): repo = models.ForeignKey(Repository, on_delete=models.CASCADE) url = models.CharField(max_length=255, unique=True) last_access_ok = models.BooleanField(default=False) - file_checksum = models.CharField(max_length=255, blank=True, null=True) + packages_checksum = models.CharField(max_length=255, blank=True, null=True) + modules_checksum = models.CharField(max_length=255, blank=True, null=True) + errata_checksum = models.CharField(max_length=255, blank=True, null=True) timestamp = models.DateTimeField(auto_now_add=True) packages = models.ManyToManyField(Package, blank=True, through='MirrorPackage') mirrorlist = models.BooleanField(default=False) @@ -154,7 +156,7 @@ def show(self): """ text = f' {self.id} : {self.url}\n' text += ' last updated: ' - text += f'{self.timestamp} checksum: {self.file_checksum}\n' + text += f'{self.timestamp} checksum: {self.packages_checksum}\n' info_message.send(sender=None, text=text) def fail(self): diff --git a/repos/templates/repos/mirror_delete.html b/repos/templates/repos/mirror_delete.html index 2b203a15..4ecfb982 100644 --- a/repos/templates/repos/mirror_delete.html +++ b/repos/templates/repos/mirror_delete.html @@ -21,7 +21,7 @@ Last Access OK {% yes_no_img mirror.last_access_ok 'True' 'False' %} Fail Count {{ mirror.fail_count }} Timestamp {{ mirror.timestamp }} - Checksum {{ mirror.file_checksum }} + Checksum {{ mirror.packages_checksum }}
    diff --git a/repos/templates/repos/mirror_detail.html b/repos/templates/repos/mirror_detail.html index 9d772b95..dfe3da62 100644 --- a/repos/templates/repos/mirror_detail.html +++ b/repos/templates/repos/mirror_detail.html @@ -21,7 +21,7 @@ Last Access OK {% yes_no_img mirror.last_access_ok 'True' 'False' %} Fail Count {{ mirror.fail_count }} Timestamp {{ mirror.timestamp }} - Checksum {{ mirror.file_checksum }} + Checksum {{ mirror.packages_checksum }} {% if user.is_authenticated and perms.is_admin %} {% bootstrap_icon "trash" %} Delete this Mirror diff --git a/repos/templates/repos/mirror_edit_repo.html b/repos/templates/repos/mirror_edit_repo.html index a2e18e2a..1a785538 100644 --- a/repos/templates/repos/mirror_edit_repo.html +++ b/repos/templates/repos/mirror_edit_repo.html @@ -26,7 +26,7 @@ {% yes_no_img mirror.mirrorlist 'Yes' 'No' %} {% yes_no_img mirror.last_access_ok 'Yes' 'No' %} {{ mirror.timestamp }} - {{ mirror.file_checksum|truncatechars:16 }} + {{ mirror.packages_checksum|truncatechars:16 }} {% endfor %} diff --git a/repos/templates/repos/mirror_table.html b/repos/templates/repos/mirror_table.html index 2e09986c..e5b40129 100644 --- a/repos/templates/repos/mirror_table.html +++ b/repos/templates/repos/mirror_table.html @@ -30,7 +30,7 @@ {% yes_no_img mirror.mirrorlist 'Yes' 'No' %} {% yes_no_img mirror.last_access_ok 'Yes' 'No' %} {{ mirror.timestamp }} - {% if not mirror.mirrorlist %}{{ mirror.file_checksum|truncatechars:16 }}{% endif %} + {% if not mirror.mirrorlist %}{{ mirror.packages_checksum|truncatechars:16 }}{% endif %} {% bootstrap_icon "trash" %} Delete this Mirror {% bootstrap_icon "edit" %} Edit this Mirror diff --git a/repos/utils.py b/repos/utils.py index 1bcf7382..1ba088a3 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -593,12 +593,13 @@ def refresh_yum_repo(mirror, data, mirror_url, ts): if not package_data: return - if mirror.file_checksum == primary_checksum: - text = 'Mirror checksum has not changed, not refreshing package metadata' + if mirror.packages_checksum == checksum: + text = 'Mirror Packages checksum has not changed, skipping Package refresh' warning_message.send(sender=None, text=text) return else: - mirror.file_checksum = primary_checksum + mirror.packages_checksum = checksum + mirror.save() # only refresh X mirrors, where X = max_mirrors max_mirrors = get_max_mirrors() @@ -658,12 +659,12 @@ def refresh_arch_repo(repo): continue computed_checksum = get_checksum(package_data, Checksum.sha1) - if mirror.file_checksum == computed_checksum: - text = 'Mirror checksum has not changed, not refreshing package metadata' + if mirror.packages_checksum == computed_checksum: + text = 'Mirror checksum has not changed, not refreshing Package metadata' warning_message.send(sender=None, text=text) continue else: - mirror.file_checksum = computed_checksum + mirror.packages_checksum = computed_checksum packages = extract_arch_packages(package_data) update_mirror_packages(mirror, packages) @@ -803,8 +804,8 @@ def refresh_gentoo_repo(repo): if checksum is None: mirror.fail() continue - if mirror.file_checksum == checksum: - text = 'Mirror checksum has not changed, not refreshing package metadata' + if mirror.packages_checksum == checksum: + text = 'Mirror checksum has not changed, not refreshing Package metadata' warning_message.send(sender=None, text=text) continue res = get_url(mirror.url) @@ -821,7 +822,7 @@ def refresh_gentoo_repo(repo): if not mirror_checksum_is_valid(computed_checksum, checksum, mirror, 'package'): continue else: - mirror.file_checksum = checksum + mirror.packages_checksum = checksum if repo_type == 'main': packages = extract_gentoo_packages(mirror, extracted) elif repo_type == 'overlay': @@ -848,7 +849,7 @@ def refresh_yast_repo(mirror, data): if not package_data: return - mirror.file_checksum = 'yast' + mirror.packages_checksum = 'yast' packages = extract_yast_packages(package_data) if packages: update_mirror_packages(mirror, packages) @@ -937,12 +938,12 @@ def refresh_deb_repo(repo): continue computed_checksum = get_checksum(package_data, Checksum.sha1) - if mirror.file_checksum == computed_checksum: - text = 'Mirror checksum has not changed, not refreshing package metadata' + if mirror.packages_checksum == computed_checksum: + text = 'Mirror checksum has not changed, not refreshing Package metadata' warning_message.send(sender=None, text=text) continue else: - mirror.file_checksum = computed_checksum + mirror.packages_checksum = computed_checksum packages = extract_deb_packages(package_data, mirror_url) if not packages: diff --git a/repos/views.py b/repos/views.py index e20771de..199c834e 100644 --- a/repos/views.py +++ b/repos/views.py @@ -132,7 +132,7 @@ def move_mirrors(repo): if oldrepo.mirror_set.count() == 0: oldrepo.delete() - mirrors = Mirror.objects.select_related().order_by('file_checksum') + mirrors = Mirror.objects.select_related().order_by('packages_checksum') checksum = None if 'checksum' in request.GET: @@ -140,7 +140,7 @@ def move_mirrors(repo): if 'checksum' in request.POST: checksum = request.POST['checksum'] if checksum is not None: - mirrors = mirrors.filter(file_checksum=checksum) + mirrors = mirrors.filter(packages_checksum=checksum) if 'repo_id' in request.GET: mirrors = mirrors.filter(repo=request.GET['repo_id']) diff --git a/util/views.py b/util/views.py index 25567272..e9aed9ef 100644 --- a/util/views.py +++ b/util/views.py @@ -88,10 +88,10 @@ def dashboard(request): checksums = {} possible_mirrors = {} - for csvalue in Mirror.objects.all().values('file_checksum').distinct(): - checksum = csvalue['file_checksum'] + for csvalue in Mirror.objects.all().values('packages_checksum').distinct(): + checksum = csvalue['packages_checksum'] if checksum is not None and checksum != 'yast': - for mirror in Mirror.objects.filter(file_checksum=checksum): + for mirror in Mirror.objects.filter(packages_checksum=checksum): if mirror.packages.count() > 0: if checksum not in checksums: checksums[checksum] = [] From 9daa639a02b84f6f9d8bd6b9689e1ba25f250ba0 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 15:19:53 -0500 Subject: [PATCH 096/199] refactor repo handling to add rpm errata support --- repos/models.py | 6 +- repos/utils.py | 502 ++++++++++++++++++++++++++++++----------------- requirements.txt | 1 - 3 files changed, 329 insertions(+), 180 deletions(-) diff --git a/repos/models.py b/repos/models.py index 5a1480ae..3e367635 100644 --- a/repos/models.py +++ b/repos/models.py @@ -23,7 +23,7 @@ from util import get_setting_of_type from repos.utils import refresh_deb_repo, refresh_rpm_repo, refresh_arch_repo, refresh_gentoo_repo, \ - update_mirror_packages + refresh_yum_repo_errata from patchman.signals import info_message, warning_message, error_message @@ -101,6 +101,10 @@ def refresh(self, force=False): text = 'Repo requires certificate authentication, not updating' warning_message.send(sender=None, text=text) + def refresh_errata(self): + if self.repotype == Repository.RPM: + refresh_yum_repo_errata(self) + def disable(self): """ Disable a repo. This involves disabling each mirror, which stops it being considered for package updates, and disabling refresh for diff --git a/repos/utils.py b/repos/utils.py index 1ba088a3..b9e47884 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -24,7 +24,8 @@ import tempfile import yaml from io import BytesIO -from defusedxml.lxml import _etree as etree + +from defusedxml import ElementTree as ET from debian.debian_support import Version from debian.deb822 import Packages from fnmatch import fnmatch @@ -101,53 +102,35 @@ def update_mirror_packages(mirror, packages): with transaction.atomic(): mirror_package, c = MirrorPackage.objects.get_or_create(mirror=mirror, package=package) except Package.MultipleObjectsReturned: - error_message.send(sender=None, text=f'Duplicate package found in {mirror}: {strpackage}') + error_message.send(sender=None, text=f'Duplicate Package found in {mirror}: {strpackage}') mirror.save() -def get_primary_url(mirror_url, data): - +def get_repomd_url(mirror_url, data, url_type='primary'): if isinstance(data, str): - if data.startswith('Bad repo - not in list') or \ - data.startswith('Invalid repo'): + if data.startswith('Bad repo - not in list') or data.startswith('Invalid repo'): return None, None, None - ns = 'http://linux.duke.edu/metadata/repo' - try: - context = etree.parse(BytesIO(data), etree.XMLParser()) - except etree.XMLSyntaxError: - context = etree.parse(BytesIO(extract(data, 'gz')), etree.XMLParser()) - location = context.xpath("//ns:data[@type='primary']/ns:location/@href", - namespaces={'ns': ns})[0] - checksum = context.xpath("//ns:data[@type='primary']/ns:checksum", - namespaces={'ns': ns})[0].text - csum_type = context.xpath("//ns:data[@type='primary']/ns:checksum/@type", - namespaces={'ns': ns})[0] - url = str(mirror_url.rsplit('/', 2)[0]) + '/' + location - return url, checksum, csum_type - - -def get_modules_url(mirror_url, data): - if isinstance(data, str): - if data.startswith('Bad repo - not in list') or \ - data.startswith('Invalid repo'): - return None, None, None ns = 'http://linux.duke.edu/metadata/repo' + extracted = extract(data, mirror_url) + location = None try: - context = etree.parse(BytesIO(data), etree.XMLParser()) - except etree.XMLSyntaxError: - context = etree.parse(BytesIO(extract(data, 'gz')), etree.XMLParser()) - try: - location = context.xpath("//ns:data[@type='modules']/ns:location/@href", - namespaces={'ns': ns})[0] - except IndexError: + tree = ET.parse(BytesIO(extracted)) + root = tree.getroot() + for child in root: + if child.attrib.get('type') == url_type: + for grandchild in child: + if grandchild.tag == f'{{{ns}}}location': + location = grandchild.attrib.get('href') + if grandchild.tag == f'{{{ns}}}checksum': + checksum = grandchild.text + checksum_type = grandchild.attrib.get('type') + except ET.ParseError as e: + error_message.send(sender=None, text=(f'Error parsing repomd from {mirror_url}: {e}')) + if not location: return None, None, None - checksum = context.xpath("//ns:data[@type='modules']/ns:checksum", - namespaces={'ns': ns})[0].text - csum_type = context.xpath("//ns:data[@type='modules']/ns:checksum/@type", - namespaces={'ns': ns})[0] url = str(mirror_url.rsplit('/', 2)[0]) + '/' + location - return url, checksum, csum_type + return url, checksum, checksum_type def find_mirror_url(stored_mirror_url, formats): @@ -171,30 +154,34 @@ def find_mirror_url(stored_mirror_url, formats): def get_gentoo_mirror_urls(): """ Use the Gentoo API to find http(s) mirrors """ - res = get_url('https://api.gentoo.org/mirrors/distfiles.xml') + gentoo_distfiles_url = 'https://api.gentoo.org/mirrors/distfiles.xml' + res = get_url(gentoo_distfiles_url) if not res: return mirrors = {} - tree = etree.parse(BytesIO(res.content)) - root = tree.getroot() - for child in root: - if child.tag == 'mirrorgroup': - for k, v in child.attrib.items(): - if k == 'region': - region = v - elif k == 'country': - country = v - for mirror in child: - for element in mirror: - if element.tag == 'name': - name = element.text - mirrors[name] = {} - mirrors[name]['region'] = region - mirrors[name]['country'] = country - mirrors[name]['urls'] = [] - elif element.tag == 'uri': - if element.get('protocol') == 'http': - mirrors[name]['urls'].append(element.text) + try: + tree = ET.parse(BytesIO(res.content)) + root = tree.getroot() + for child in root: + if child.tag == 'mirrorgroup': + for k, v in child.attrib.items(): + if k == 'region': + region = v + elif k == 'country': + country = v + for mirror in child: + for element in mirror: + if element.tag == 'name': + name = element.text + mirrors[name] = {} + mirrors[name]['region'] = region + mirrors[name]['country'] = country + mirrors[name]['urls'] = [] + elif element.tag == 'uri': + if element.get('protocol') == 'http': + mirrors[name]['urls'].append(element.text) + except ET.ParseError as e: + error_message.send(sender=None, text=f'Error parsing {gentoo_distfiles_url}: {e}') mirror_urls = [] # for now, ignore region data and choose MAX_MIRRORS mirrors at random for _, v in mirrors.items(): @@ -206,21 +193,25 @@ def get_gentoo_mirror_urls(): def get_gentoo_overlay_mirrors(repo_name): """Get the gentoo overlay repos that match repo.id """ - res = get_url('https://api.gentoo.org/overlays/repositories.xml') + gentoo_overlays_url = 'https://api.gentoo.org/overlays/repositories.xml' + res = get_url(gentoo_overlays_url) if not res: return - tree = etree.parse(BytesIO(res.content)) - root = tree.getroot() mirrors = [] - for child in root: - if child.tag == 'repo': - found = False - for element in child: - if element.tag == 'name' and element.text == repo_name: - found = True - if found and element.tag == 'source': - if element.text.startswith('http'): - mirrors.append(element.text) + try: + tree = ET.parse(BytesIO(res.content)) + root = tree.getroot() + for child in root: + if child.tag == 'repo': + found = False + for element in child: + if element.tag == 'name' and element.text == repo_name: + found = True + if found and element.tag == 'source': + if element.text.startswith('http'): + mirrors.append(element.text) + except ET.ParseError as e: + error_message.send(sender=None, text=f'Error parsing {gentoo_overlays_url}: {e}') return mirrors @@ -237,19 +228,30 @@ def get_metalink_urls(url): res = get_url(url) except RetryError: return - if response_is_valid(res): - if 'content-type' in res.headers and \ - res.headers['content-type'] == 'application/metalink+xml': - data = download_url(res, 'Downloading repo info:') - ns = 'http://www.metalinker.org/' - try: - context = etree.parse(BytesIO(data), etree.XMLParser()) - except etree.XMLSyntaxError: - context = etree.parse(BytesIO(extract(data, 'gz')), - etree.XMLParser()) - xpath = "//ns:files/ns:file[@name='repomd.xml']/ns:resources/ns:url[@protocol='https']" # noqa - metalink_urls = context.xpath(xpath, namespaces={'ns': ns}) - return [x.text for x in metalink_urls] + if not response_is_valid(res): + return + if not res.headers.get('content-type') == 'application/metalink+xml': + return + metalink_urls = [] + data = download_url(res, 'Downloading metalink data') + extracted = extract(data, url) + ns = 'http://www.metalinker.org/' + try: + tree = ET.parse(BytesIO(extracted)) + root = tree.getroot() + for child in root: + if child.tag == f'{{{ns}}}files': + for grandchild in child: + if grandchild.tag == f'{{{ns}}}file': + for greatgrandchild in grandchild: + if greatgrandchild.tag == f'{{{ns}}}resources': + for greatgreatgrandchild in greatgrandchild: + if greatgreatgrandchild.tag == f'{{{ns}}}url': + if greatgreatgrandchild.attrib.get('protocol') in ['https', 'http']: + metalink_urls.append(greatgreatgrandchild.text) + except ET.ParseError as e: + error_message.send(sender=None, text=f'Error parsing metalink {url}: {e}') + return metalink_urls def get_mirrorlist_urls(url): @@ -379,39 +381,52 @@ def extract_yum_packages(data, url): """ extracted = extract(data, url) ns = 'http://linux.duke.edu/metadata/common' - m_context = etree.iterparse(BytesIO(extracted), tag=f'{{{ns}}}metadata') - plen = int(next(m_context)[1].get('packages')) - p_context = etree.iterparse(BytesIO(extracted), tag=f'{{{ns}}}package') packages = set() - - ptext = 'Extracting packages: ' - progress_info_s.send(sender=None, ptext=ptext, plen=plen) - - for i, p_data in enumerate(p_context): - elem = p_data[1] - progress_update_s.send(sender=None, index=i + 1) - name = elem.xpath('//ns:name', namespaces={'ns': ns})[0].text.lower() - arch = elem.xpath('//ns:arch', namespaces={'ns': ns})[0].text - fullversion = elem.xpath('//ns:version', namespaces={'ns': ns})[0] - epoch = fullversion.get('epoch') - version = fullversion.get('ver') - release = fullversion.get('rel') - elem.clear() - while elem.getprevious() is not None: - del elem.getparent()[0] - - if name != '' and version != '' and arch != '': - if epoch == '0': - epoch = '' - package = PackageString( - name=name, - epoch=epoch, - version=version, - release=release, - arch=arch, - packagetype='R', - ) - packages.add(package) + try: + context = ET.iterparse(BytesIO(extracted), events=('start', 'end')) + for event, elem in context: + if event == 'start': + if elem.tag == f'{{{ns}}}metadata': + plen = int(elem.attrib.get('packages')) + break + progress_info_s.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) + i = 0 + for event, elem in context: + if event == 'start': + if elem.tag == f'{{{ns}}}package': + if elem.attrib.get('type') == 'rpm': + name = version = release = arch = '' + elif event == 'end': + if elem.tag == f'{{{ns}}}name': + name = elem.text.lower() + elif elem.tag == f'{{{ns}}}arch': + arch = elem.text + elif elem.tag == f'{{{ns}}}version': + fullversion = elem + epoch = fullversion.get('epoch') + version = fullversion.get('ver') + release = fullversion.get('rel') + elif elem.tag == f'{{{ns}}}package': + if name and version and release and arch: + if epoch == '0': + epoch = '' + package = PackageString( + name=name, + epoch=epoch, + version=version, + release=release, + arch=arch, + packagetype='R', + ) + packages.add(package) + progress_update_s.send(sender=None, index=i + 1) + i += 1 + else: + text = f'Error parsing Package: {name} {epoch} {version} {release} {arch}' + error_message.send(sender=None, text=text) + elem.clear() + except ET.ParseError as e: + error_message.send(sender=None, text=f'Error parsing yum primary.xml from {url}: {e}') return packages @@ -578,19 +593,135 @@ def mirror_checksum_is_valid(computed, provided, mirror, metadata_type): return True -def refresh_yum_repo(mirror, data, mirror_url, ts): - """ Refresh package metadata for a yum-style rpm mirror - and add the packages to the mirror +def extract_updateinfo(data, url): + """ Parses updateinfo.xml and extracts package/errata information """ - primary_url, primary_checksum, primary_checksum_type = get_primary_url(mirror_url, data) - package_data = fetch_mirror_data( + print(url) + from errata.utils import get_or_create_erratum + extracted = extract(data, url) + updates = [] + try: + tree = ET.parse(BytesIO(extracted)) + root = tree.getroot() + elen = root.__len__() + progress_info_s.send(sender=None, ptext=f'Extracting {elen} rpm Errata', plen=elen) + for i, update in enumerate(root.findall('update')): + progress_update_s.send(sender=None, index=i + 1) + e_type = update.attrib.get('type') + name = update.find('id').text + synopsis = update.find('title').text + issue_date = update.find('issued').attrib.get('date') + e, created = get_or_create_erratum(name, e_type, issue_date, synopsis) + + xreferences = update.find('references') + for reference in xreferences.findall('reference'): + if reference.attrib.get('type') == 'cve': + cve_id = reference.attrib.get('id') + e.add_cve(cve_id) + else: + ref = reference.attrib.get('href') + e.add_reference('Link', ref) + + osrelease_name = None + release = update.find('release') + if release: + osrelease_name = release.text + + pkglist = update.find('pkglist') + packages = set() + for collection in pkglist.findall('collection'): + if not osrelease_name: + collection_name = collection.find('name') + if collection_name is not None: + osrelease_name = collection_name.text + from operatingsystems.models import OSRelease + if osrelease_name: + osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name) + e.osreleases.add(osrelease) + # TODO for opensuse, add if repo is associated with an os release + + for pkg in collection.findall('package'): + name = pkg.attrib.get('name') + epoch = pkg.attrib.get('epoch') + version = pkg.attrib.get('version') + release = pkg.attrib.get('release') + arch = pkg.attrib.get('arch') + package = get_or_create_package( + name=name.lower(), + epoch=epoch, + version=version, + release=release, + arch=arch, + p_type='R', + ) + packages.add(package) + e.add_packages(packages) + e.save() + except ET.ParseError as e: + error_message.send(sender=None, text=f'Error parsing updateinfo file: {e}') + return updates + + +def refresh_rpm_updateinfo(mirror, data, mirror_url): + url, checksum, checksum_type = get_repomd_url(mirror_url, data, url_type='updateinfo') + data = fetch_mirror_data( + mirror=mirror, + url=url, + checksum=checksum, + checksum_type=checksum_type, + text='Downloading Errata data', + metadata_type='updateinfo') + + if not mirror.last_access_ok: + return + + if mirror.modules_checksum == checksum: + text = 'Mirror Errata checksum has not changed, skipping Erratum refresh' + warning_message.send(sender=None, text=text) + return + else: + mirror.modules_checksum = checksum + mirror.save() + + extract_updateinfo(data, url) + + +def refresh_rpm_modules(mirror, data, mirror_url): + url, checksum, checksum_type = get_repomd_url(mirror_url, data, url_type='modules') + if url: + data = fetch_mirror_data( + mirror=mirror, + url=url, + checksum=checksum, + checksum_type=checksum_type, + text='Downloading Module data', + metadata_type='module') + + if not mirror.last_access_ok: + return + + if mirror.modules_checksum == checksum: + text = 'Mirror Modules checksum has not changed, skipping Module refresh' + warning_message.send(sender=None, text=text) + return + else: + mirror.modules_checksum = checksum + mirror.save() + + extract_module_metadata(data, url, mirror.repo) + + +def refresh_rpm_primary(mirror, data, mirror_url, ts): + url, checksum, checksum_type = get_repomd_url(mirror_url, data, url_type='primary') + data = fetch_mirror_data( mirror=mirror, - url=primary_url, - checksum=primary_checksum, - checksum_type=primary_checksum_type, - text='Downloading package info:', + url=url, + checksum=checksum, + checksum_type=checksum_type, + text='Downloading Package data', metadata_type='package') - if not package_data: + + if not mirror.last_access_ok: return if mirror.packages_checksum == checksum: @@ -603,31 +734,29 @@ def refresh_yum_repo(mirror, data, mirror_url, ts): # only refresh X mirrors, where X = max_mirrors max_mirrors = get_max_mirrors() - mirrors_q = Q(mirrorlist=False, refresh=True, enabled=True, timestamp=ts, file_checksum=primary_checksum) - have_checksum = mirror.repo.mirror_set.filter(mirrors_q).count() - if have_checksum >= max_mirrors: - text = f'{max_mirrors} mirrors already have this checksum, skipping refresh' + mirrors_q = Q(mirrorlist=False, refresh=True, enabled=True, timestamp=ts, packages_checksum=checksum) + have_checksum_and_ts = mirror.repo.mirror_set.filter(mirrors_q).count() + if have_checksum_and_ts >= max_mirrors: + text = f'{max_mirrors} Mirrors already have this checksum and timestamp, skipping Package refresh' info_message.send(sender=None, text=text) return - packages = extract_yum_packages(package_data, primary_url) + packages = extract_yum_packages(data, url) if packages: update_mirror_packages(mirror, packages) - packages.clear() - modules_url, modules_checksum, modules_checksum_type = get_modules_url(mirror_url, data) - if modules_url: - module_data = fetch_mirror_data( - mirror=mirror, - url=modules_url, - checksum=modules_checksum, - checksum_type=modules_checksum_type, - text='Downloading module info:', - metadata_type='module') - if module_data: - extract_module_metadata(module_data, modules_url, mirror.repo) - mirror.save() +def refresh_yum_repo(mirror, data, mirror_url, ts, errata_only): + """ Refresh package, module and updateinfo/errata data for a yum-style rpm Mirror + """ + if not errata_only: + refresh_rpm_primary(mirror, data, mirror_url, ts) + refresh_rpm_modules(mirror, data, mirror_url) + refresh_rpm_updateinfo(mirror, data, mirror_url) + + +def refresh_yum_repo_errata(repo): + refresh_rpm_repo_mirrors(repo, errata_only=True) def refresh_arch_repo(repo): @@ -724,35 +853,49 @@ def get_gentoo_ebuild_keywords(content): return keywords -def extract_gentoo_packages(mirror, data): - extracted_files = {} +def extract_gentoo_ebuilds(data): + extracted_ebuilds = {} with tarfile.open(fileobj=io.BytesIO(data), mode='r') as tar: for member in tar.getmembers(): - if member.isfile(): + if member.isfile() and member.name.endswith('ebuild') and not member.name.endswith('skel.ebuild'): file_content = tar.extractfile(member).read() - extracted_files[member.name] = file_content + extracted_ebuilds[member.name] = file_content + return extracted_ebuilds + + +def extract_gentoo_packages(mirror, data): + extracted_ebuilds = extract_gentoo_ebuilds(data) + return extract_gentoo_packages_from_ebuilds(extracted_ebuilds) + + +def extract_gentoo_packages_from_ebuilds(extracted_ebuilds): + if not extracted_ebuilds: + return + packages = set() - for path, content in extracted_files.items(): - if fnmatch(path, '*.ebuild'): - components = path.split(os.sep) - if len(components) < 4: - continue - category = components[1] - name = components[2] - evr = components[3].replace(f'{name}-', '').replace('.ebuild', '') - epoch, version, release = find_evr(evr) - arches = get_gentoo_ebuild_keywords(content) - for arch in arches: - package = PackageString( - name=name.lower(), - epoch=epoch, - version=version, - release=release, - arch=arch, - packagetype='G', - category=category, - ) - packages.add(package) + flen = len(extracted_ebuilds) + progress_info_s.send(sender=None, ptext=f'Processing {flen} ebuilds', plen=flen) + for i, (path, content) in enumerate(extracted_ebuilds.items()): + progress_update_s.send(sender=None, index=i + 1) + components = path.split(os.sep) + category = components[1] + name = components[2] + evr = components[3].replace(f'{name}-', '').replace('.ebuild', '') + epoch, version, release = find_evr(evr) + arches = get_gentoo_ebuild_keywords(content) + for arch in arches: + package = PackageString( + name=name.lower(), + epoch=epoch, + version=version, + release=release, + arch=arch, + packagetype='G', + category=category, + ) + packages.add(package) + plen = len(packages) + info_message.send(sender=None, text=f'Extracted {plen} Packages', plen=plen) return packages @@ -857,12 +1000,19 @@ def refresh_yast_repo(mirror, data): def refresh_rpm_repo(repo): - """ Refresh an rpm repo. - Checks if the repo url is a mirrorlist, and extracts mirrors if so. - If not, checks a number of common rpm repo formats to determine - which type of repo it is, and to determine the mirror urls. + """ Refresh an rpm repo (yum or yast) + Checks if the repo url is a mirrorlist or metalink, + and extracts mirrors if so, then refreshes the mirrors """ + check_for_mirrorlists(repo) + check_for_metalinks(repo) + refresh_rpm_repo_mirrors(repo) + +def refresh_rpm_repo_mirrors(repo, errata_only=False): + """ checks a number of common yum repo formats to determine + which type of repo it is, then refreshes the mirrors + """ formats = [ 'repodata/repomd.xml.xz', 'repodata/repomd.xml.bz2', @@ -874,10 +1024,6 @@ def refresh_rpm_repo(repo): 'suse/repodata/repomd.xml', 'content', ] - - check_for_mirrorlists(repo) - check_for_metalinks(repo) - max_mirrors = get_max_mirrors() ts = get_datetime_now() enabled_mirrors = repo.mirror_set.filter(mirrorlist=False, refresh=True, enabled=True) @@ -907,7 +1053,7 @@ def refresh_rpm_repo(repo): else: text = f'Found yum rpm Repo - {mirror_url}' info_message.send(sender=None, text=text) - refresh_yum_repo(mirror, repo_data, mirror_url, ts) + refresh_yum_repo(mirror, repo_data, mirror_url, ts, errata_only) mirror.timestamp = ts mirror.save() diff --git a/requirements.txt b/requirements.txt index 32ce1225..7b044b82 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,6 @@ django-taggit==4.0.0 django-extensions==3.2.1 django-bootstrap3==23.1 python-debian==0.1.49 -lxml==5.2.2 defusedxml==0.7.1 PyYAML==6.0.1 chardet==4.0.0 From 32cc5ec76b496ecdc9a8f7c234880b5a4b2e9ac0 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 15:26:37 -0500 Subject: [PATCH 097/199] add missing import --- util/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/util/__init__.py b/util/__init__.py index 01b0cd96..742239da 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -20,7 +20,7 @@ import magic import zlib import lzma -from datetime import timezone +from datetime import datetime, timezone from enum import Enum from hashlib import md5, sha1, sha256, sha512 from requests.exceptions import HTTPError, Timeout, ConnectionError From 5ea8baf18d84cb4a6ed66308e5dfb885ae8b6ab1 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 15:33:23 -0500 Subject: [PATCH 098/199] rename progress bars signals and receivers --- errata/sources/distros/alma.py | 10 ++++---- errata/sources/distros/arch.py | 10 ++++---- errata/sources/distros/centos.py | 6 ++--- errata/sources/distros/debian.py | 10 ++++---- errata/sources/distros/rocky.py | 14 +++++------ errata/sources/distros/ubuntu.py | 10 ++++---- errata/utils.py | 6 ++--- patchman/receivers.py | 10 ++++---- patchman/signals.py | 4 +-- reports/utils.py | 18 +++++++------- repos/utils.py | 42 ++++++++++++++++---------------- 11 files changed, 70 insertions(+), 70 deletions(-) diff --git a/errata/sources/distros/alma.py b/errata/sources/distros/alma.py index 22e61c9e..51cab49b 100644 --- a/errata/sources/distros/alma.py +++ b/errata/sources/distros/alma.py @@ -22,7 +22,7 @@ from packages.models import Package from packages.utils import get_or_create_package, parse_package_string from util import get_url, download_url, get_setting_of_type -from patchman.signals import progress_info_s, progress_update_s +from patchman.signals import pbar_start, pbar_update def update_alma_errata(concurrent_processing=True): @@ -66,23 +66,23 @@ def process_alma_errata_serially(release, advisories): """ Process Alma Linux Errata serially """ elen = len(advisories) - progress_info_s.send(sender=None, ptext=f'Processing {elen} Alma Errata', plen=elen) + pbar_start.send(sender=None, ptext=f'Processing {elen} Alma Errata', plen=elen) for i, advisory in enumerate(advisories): process_alma_erratum(release, advisory) - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) def process_alma_errata_concurrently(release, advisories): """ Process Alma Linux Errata concurrently """ elen = len(advisories) - progress_info_s.send(sender=None, ptext=f'Processing {elen} Alma Errata', plen=elen) + pbar_start.send(sender=None, ptext=f'Processing {elen} Alma Errata', plen=elen) i = 0 with concurrent.futures.ProcessPoolExecutor(max_workers=10) as executor: futures = [executor.submit(process_alma_erratum, release, advisory) for advisory in advisories] for future in concurrent.futures.as_completed(futures): i += 1 - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) def process_alma_erratum(release, advisory): diff --git a/errata/sources/distros/arch.py b/errata/sources/distros/arch.py index 0e474159..7b22ce12 100644 --- a/errata/sources/distros/arch.py +++ b/errata/sources/distros/arch.py @@ -21,7 +21,7 @@ from packages.models import Package from packages.utils import find_evr, get_matching_packages from util import get_url, download_url -from patchman.signals import error_message, progress_info_s, progress_update_s +from patchman.signals import error_message, pbar_start, pbar_update def update_arch_errata(concurrent_processing=False): @@ -56,10 +56,10 @@ def parse_arch_errata_serially(advisories): """ osrelease = OSRelease.objects.get(name='Arch Linux') elen = len(advisories) - progress_info_s.send(sender=None, ptext=f'Processing {elen} Arch Advisories', plen=elen) + pbar_start.send(sender=None, ptext=f'Processing {elen} Arch Advisories', plen=elen) for i, advisory in enumerate(advisories): process_arch_erratum(advisory, osrelease) - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) def parse_arch_errata_concurrently(advisories): @@ -67,13 +67,13 @@ def parse_arch_errata_concurrently(advisories): """ osrelease = OSRelease.objects.get(name='Arch Linux') elen = len(advisories) - progress_info_s.send(sender=None, ptext=f'Processing {elen} Arch Advisories', plen=elen) + pbar_start.send(sender=None, ptext=f'Processing {elen} Arch Advisories', plen=elen) i = 0 with concurrent.futures.ProcessPoolExecutor(max_workers=3) as executor: futures = [executor.submit(process_arch_erratum, advisory, osrelease) for advisory in advisories] for future in concurrent.futures.as_completed(futures): i += 1 - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) def process_arch_erratum(advisory, osrelease): diff --git a/errata/sources/distros/centos.py b/errata/sources/distros/centos.py index 83655c03..b2fbfdb8 100644 --- a/errata/sources/distros/centos.py +++ b/errata/sources/distros/centos.py @@ -21,7 +21,7 @@ from packages.models import Package from packages.utils import parse_package_string, get_or_create_package -from patchman.signals import error_message, progress_info_s, progress_update_s +from patchman.signals import error_message, pbar_start, pbar_update from util import bunzip2, get_url, download_url, get_sha1, get_setting_of_type @@ -69,9 +69,9 @@ def parse_centos_errata(data): result = etree.XML(data) errata_xml = result.findall('*') elen = len(errata_xml) - progress_info_s.send(sender=None, ptext=f'Processing {elen} CentOS Errata', plen=elen) + pbar_start.send(sender=None, ptext=f'Processing {elen} CentOS Errata', plen=elen) for i, child in enumerate(errata_xml): - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) releases = get_centos_erratum_releases(child.findall('os_release')) if not accepted_centos_release(releases): continue diff --git a/errata/sources/distros/debian.py b/errata/sources/distros/debian.py index d9183917..82eda94a 100644 --- a/errata/sources/distros/debian.py +++ b/errata/sources/distros/debian.py @@ -28,7 +28,7 @@ from packages.models import Package from packages.utils import get_or_create_package, find_evr from util import get_url, download_url, get_setting_of_type -from patchman.signals import error_message, progress_info_s, progress_update_s +from patchman.signals import error_message, pbar_start, pbar_update def update_debian_errata(concurrent_processing=True): @@ -131,23 +131,23 @@ def create_debian_errata_serially(errata, accepted_codenames): """ Create Debian Errata Serially """ elen = len(errata) - progress_info_s.send(sender=None, ptext=f'Processing {elen} Debian Errata', plen=elen) + pbar_start.send(sender=None, ptext=f'Processing {elen} Debian Errata', plen=elen) for i, erratum in enumerate(errata): process_debian_erratum(erratum, accepted_codenames) - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) def create_debian_errata_concurrently(errata, accepted_codenames): """ Create Debian Errata concurrently """ elen = len(errata) - progress_info_s.send(sender=None, ptext=f'Processing {elen} Debian Errata', plen=elen) + pbar_start.send(sender=None, ptext=f'Processing {elen} Debian Errata', plen=elen) i = 0 with concurrent.futures.ProcessPoolExecutor(max_workers=200) as executor: futures = [executor.submit(process_debian_erratum, erratum, accepted_codenames) for erratum in errata] for future in concurrent.futures.as_completed(futures): i += 1 - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) def process_debian_erratum(erratum, accepted_codenames): diff --git a/errata/sources/distros/rocky.py b/errata/sources/distros/rocky.py index 4ef5e246..bac0fd06 100644 --- a/errata/sources/distros/rocky.py +++ b/errata/sources/distros/rocky.py @@ -24,7 +24,7 @@ from packages.models import Package from packages.utils import parse_package_string, get_or_create_package from util import get_url, download_url, info_message, error_message -from patchman.signals import progress_info_s, progress_update_s +from patchman.signals import pbar_start, pbar_update def update_rocky_errata(concurrent_processing=True): @@ -113,7 +113,7 @@ def download_rocky_advisories_concurrently(rocky_errata_api_host, rocky_errata_a last_link = links.get('last') pages = int(last_link.split('=')[-1]) ptext = 'Downloading Rocky Linux Advisories:' - progress_info_s.send(sender=None, ptext=ptext, plen=pages) + pbar_start.send(sender=None, ptext=ptext, plen=pages) i = 0 with concurrent.futures.ThreadPoolExecutor(max_workers=100) as executor: futures = [executor.submit(get_rocky_advisory, rocky_errata_advisories_url, page) @@ -121,7 +121,7 @@ def download_rocky_advisories_concurrently(rocky_errata_api_host, rocky_errata_a for future in concurrent.futures.as_completed(futures): advisories += future.result() i += 1 - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) return advisories @@ -149,23 +149,23 @@ def process_rocky_errata_serially(advisories): """ Process Rocky Linux errata serially """ elen = len(advisories) - progress_info_s.send(sender=None, ptext=f'Processing {elen} Rocky Errata', plen=elen) + pbar_start.send(sender=None, ptext=f'Processing {elen} Rocky Errata', plen=elen) for i, advisory in enumerate(advisories): process_rocky_erratum(advisory) - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) def process_rocky_errata_concurrently(advisories): """ Process Rocky Linux errata concurrently """ elen = len(advisories) - progress_info_s.send(sender=None, ptext=f'Processing {elen} Rocky Errata', plen=elen) + pbar_start.send(sender=None, ptext=f'Processing {elen} Rocky Errata', plen=elen) i = 0 with concurrent.futures.ProcessPoolExecutor(max_workers=100) as executor: futures = [executor.submit(process_rocky_erratum, advisory) for advisory in advisories] for future in concurrent.futures.as_completed(futures): i += 1 - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) @retry( diff --git a/errata/sources/distros/ubuntu.py b/errata/sources/distros/ubuntu.py index 8bb2ed50..195460b4 100644 --- a/errata/sources/distros/ubuntu.py +++ b/errata/sources/distros/ubuntu.py @@ -25,7 +25,7 @@ from packages.models import Package, PackageName from packages.utils import get_or_create_package, parse_package_string, find_evr from util import get_url, download_url, get_sha256, bunzip2, get_setting_of_type -from patchman.signals import error_message, progress_info_s, progress_update_s +from patchman.signals import error_message, pbar_start, pbar_update def update_ubuntu_errata(concurrent_processing=False): @@ -77,24 +77,24 @@ def parse_usn_data_serially(advisories, accepted_releases): """ Parse the Ubuntu USN data serially """ elen = len(advisories) - progress_info_s.send(sender=None, ptext=f'Processing {elen} Ubuntu Errata', plen=elen) + pbar_start.send(sender=None, ptext=f'Processing {elen} Ubuntu Errata', plen=elen) for i, (usn_id, advisory) in enumerate(advisories.items()): process_usn(usn_id, advisory, accepted_releases) - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) def parse_usn_data_concurrently(advisories, accepted_releases): """ Parse the Ubuntu USN data concurrently """ elen = len(advisories) - progress_info_s.send(sender=None, ptext=f'Processing {elen} Ubuntu Errata', plen=elen) + pbar_start.send(sender=None, ptext=f'Processing {elen} Ubuntu Errata', plen=elen) i = 0 with concurrent.futures.ProcessPoolExecutor(max_workers=10) as executor: futures = [executor.submit(process_usn, usn_id, advisory, accepted_releases) for usn_id, advisory in advisories.items()] for future in concurrent.futures.as_completed(futures): i += 1 - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) def process_usn(usn_id, advisory, accepted_releases): diff --git a/errata/utils.py b/errata/utils.py index 16e9d1bd..3ae04c7a 100644 --- a/errata/utils.py +++ b/errata/utils.py @@ -20,7 +20,7 @@ from util import tz_aware_datetime from errata.models import Erratum -from patchman.signals import progress_info_s, progress_update_s, warning_message +from patchman.signals import pbar_start, pbar_update, warning_message def get_or_create_erratum(name, e_type, issue_date, synopsis): @@ -119,7 +119,7 @@ def mark_errata_security_updates(): should be marked as a security update. """ elen = Erratum.objects.count() - progress_info_s.send(sender=None, ptext=f'Scanning {elen} Errata', plen=elen) + pbar_start.send(sender=None, ptext=f'Scanning {elen} Errata', plen=elen) for i, e in enumerate(Erratum.objects.all()): - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) e.scan_for_security_updates() diff --git a/patchman/receivers.py b/patchman/receivers.py index c0ee4a47..771d927d 100644 --- a/patchman/receivers.py +++ b/patchman/receivers.py @@ -21,7 +21,7 @@ from django.dispatch import receiver from util import create_pbar, update_pbar, get_verbosity -from patchman.signals import progress_info_s, progress_update_s, \ +from patchman.signals import pbar_start, pbar_update, \ info_message, warning_message, error_message, debug_message from django.conf import settings @@ -29,8 +29,8 @@ init(autoreset=True) -@receiver(progress_info_s) -def progress_info_r(**kwargs): +@receiver(pbar_start) +def pbar_start_receiver(**kwargs): """ Receiver to create a progressbar """ ptext = kwargs.get('ptext') @@ -39,8 +39,8 @@ def progress_info_r(**kwargs): create_pbar(ptext, plen) -@receiver(progress_update_s) -def progress_update_r(**kwargs): +@receiver(pbar_update) +def pbar_update_receiver(**kwargs): """ Receiver to update a progressbar """ index = kwargs.get('index') diff --git a/patchman/signals.py b/patchman/signals.py index e2f967e3..917a48e4 100644 --- a/patchman/signals.py +++ b/patchman/signals.py @@ -17,8 +17,8 @@ from django.dispatch import Signal -progress_info_s = Signal() -progress_update_s = Signal() +pbar_start = Signal() +pbar_update = Signal() info_message = Signal() warning_message = Signal() error_message = Signal() diff --git a/reports/utils.py b/reports/utils.py index 8b82c5fe..f551c0ef 100644 --- a/reports/utils.py +++ b/reports/utils.py @@ -26,7 +26,7 @@ from packages.models import Package, PackageCategory from packages.utils import find_evr, get_or_create_package, get_or_create_package_update, parse_package_string from repos.utils import get_or_create_repo -from patchman.signals import progress_info_s, progress_update_s, error_message, info_message +from patchman.signals import pbar_start, pbar_update, error_message, info_message def process_repos(report, host): @@ -37,7 +37,7 @@ def process_repos(report, host): host_repos = HostRepo.objects.filter(host=host) repos = parse_repos(report.repos) - progress_info_s.send(sender=None, ptext=f'{host} Repos', plen=len(repos)) + pbar_start.send(sender=None, ptext=f'{host} Repos', plen=len(repos)) for i, repo_str in enumerate(repos): repo, priority = process_repo(repo_str, report.arch) if repo: @@ -56,7 +56,7 @@ def process_repos(report, host): hostrepo.save() except IntegrityError as e: error_message.send(sender=None, text=e) - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) for hostrepo in host_repos: if hostrepo.repo_id not in repo_ids: @@ -70,7 +70,7 @@ def process_modules(report, host): module_ids = [] modules = parse_modules(report.modules) - progress_info_s.send(sender=None, ptext=f'{host} Modules', plen=len(modules)) + pbar_start.send(sender=None, ptext=f'{host} Modules', plen=len(modules)) for i, module_str in enumerate(modules): module = process_module(module_str) if module: @@ -82,7 +82,7 @@ def process_modules(report, host): error_message.send(sender=None, text=e) except DatabaseError as e: error_message.send(sender=None, text=e) - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) for module in host.modules.all(): if module.id not in module_ids: @@ -96,7 +96,7 @@ def process_packages(report, host): package_ids = [] packages = parse_packages(report.packages) - progress_info_s.send(sender=None, ptext=f'{host} Packages', plen=len(packages)) + pbar_start.send(sender=None, ptext=f'{host} Packages', plen=len(packages)) for i, pkg_str in enumerate(packages): package = process_package(pkg_str, report.protocol) if package: @@ -112,7 +112,7 @@ def process_packages(report, host): if pkg_str[0].lower() != 'gpg-pubkey': text = f'No package returned for {pkg_str}' info_message.send(sender=None, text=text) - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) for package in host.packages.all(): if package.id not in package_ids: @@ -150,13 +150,13 @@ def add_updates(updates, host): host.updates.remove(host_update) ulen = len(updates) if ulen > 0: - progress_info_s.send(sender=None, ptext=f'{host} Updates', plen=ulen) + pbar_start.send(sender=None, ptext=f'{host} Updates', plen=ulen) for i, (u, sec) in enumerate(updates.items()): update = process_update(host, u, sec) if update: host.updates.add(update) - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) def parse_updates(updates_string, security): diff --git a/repos/utils.py b/repos/utils.py index b9e47884..5a43b91e 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -41,7 +41,7 @@ from util import get_url, download_url, response_is_valid, extract, get_checksum, Checksum, get_setting_of_type, \ get_datetime_now from patchman.signals import info_message, warning_message, error_message, debug_message, \ - progress_info_s, progress_update_s + pbar_start, pbar_update def get_or_create_repo(r_name, r_arch, r_type, r_id=None): @@ -78,25 +78,25 @@ def update_mirror_packages(mirror, packages): old = set() mirror_packages = mirror.packages.all() plen = mirror_packages.count() - progress_info_s.send(sender=None, ptext=f'Fetching {plen} existing Packages', plen=plen) + pbar_start.send(sender=None, ptext=f'Fetching {plen} existing Packages', plen=plen) for i, package in enumerate(mirror_packages): - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) strpackage = convert_package_to_packagestring(package) old.add(strpackage) removals = old.difference(packages) rlen = len(removals) - progress_info_s.send(sender=None, ptext=f'Removing {rlen} obsolete Packages', plen=rlen) + pbar_start.send(sender=None, ptext=f'Removing {rlen} obsolete Packages', plen=rlen) for i, strpackage in enumerate(removals): - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) package = convert_packagestring_to_package(strpackage) MirrorPackage.objects.filter(mirror=mirror, package=package).delete() new = packages.difference(old) nlen = len(new) - progress_info_s.send(sender=None, ptext=f'Adding {nlen} new Packages', plen=nlen) + pbar_start.send(sender=None, ptext=f'Adding {nlen} new Packages', plen=nlen) for i, strpackage in enumerate(new): - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) try: package = convert_packagestring_to_package(strpackage) with transaction.atomic(): @@ -341,9 +341,9 @@ def extract_module_metadata(data, url, repo): error_message.send(sender=None, text=f'Error parsing modules.yaml: {e}') mlen = len(re.findall(r'---', yaml.dump(extracted.decode()))) - progress_info_s.send(sender=None, ptext=f'Extracting {mlen} Modules ', plen=mlen) + pbar_start.send(sender=None, ptext=f'Extracting {mlen} Modules ', plen=mlen) for i, doc in enumerate(modules_yaml): - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) document = doc['document'] modulemd = doc['data'] if document == 'modulemd': @@ -389,7 +389,7 @@ def extract_yum_packages(data, url): if elem.tag == f'{{{ns}}}metadata': plen = int(elem.attrib.get('packages')) break - progress_info_s.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) + pbar_start.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) i = 0 for event, elem in context: if event == 'start': @@ -419,7 +419,7 @@ def extract_yum_packages(data, url): packagetype='R', ) packages.add(package) - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) i += 1 else: text = f'Error parsing Package: {name} {epoch} {version} {release} {arch}' @@ -443,7 +443,7 @@ def extract_deb_packages(data, url): packages = set() if plen > 0: - progress_info_s.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) + pbar_start.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) for i, stanza in enumerate(Packages.iter_paragraphs(extracted)): # https://github.com/furlongm/patchman/issues/55 if 'version' not in stanza: @@ -458,7 +458,7 @@ def extract_deb_packages(data, url): release = fullversion._BaseVersion__debian_revision if release is None: release = '' - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) package = PackageString(name=name, epoch=epoch, version=version, @@ -480,10 +480,10 @@ def extract_yast_packages(data): packages = set() if plen > 0: - progress_info_s.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) + pbar_start.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) for i, pkg in enumerate(pkgs): - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) name, version, release, arch = pkg.split() package = PackageString(name=name.lower(), epoch='', @@ -506,9 +506,9 @@ def extract_arch_packages(data): packages = set() plen = len(tf.getnames()) if plen > 0: - progress_info_s.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) + pbar_start.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) for i, tarinfo in enumerate(tf): - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) if tarinfo.isfile(): name_sec = ver_sec = arch_sec = False t = tf.extractfile(tarinfo).read() @@ -604,9 +604,9 @@ def extract_updateinfo(data, url): tree = ET.parse(BytesIO(extracted)) root = tree.getroot() elen = root.__len__() - progress_info_s.send(sender=None, ptext=f'Extracting {elen} rpm Errata', plen=elen) + pbar_start.send(sender=None, ptext=f'Extracting {elen} rpm Errata', plen=elen) for i, update in enumerate(root.findall('update')): - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) e_type = update.attrib.get('type') name = update.find('id').text synopsis = update.find('title').text @@ -874,9 +874,9 @@ def extract_gentoo_packages_from_ebuilds(extracted_ebuilds): packages = set() flen = len(extracted_ebuilds) - progress_info_s.send(sender=None, ptext=f'Processing {flen} ebuilds', plen=flen) + pbar_start.send(sender=None, ptext=f'Processing {flen} ebuilds', plen=flen) for i, (path, content) in enumerate(extracted_ebuilds.items()): - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) components = path.split(os.sep) category = components[1] name = components[2] From 850b53c93fa4b1e495a80570fa43ae8c5189dd1b Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 13:04:03 -0500 Subject: [PATCH 099/199] allow hosts with no tags --- hosts/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hosts/models.py b/hosts/models.py index f45c830d..c40944de 100644 --- a/hosts/models.py +++ b/hosts/models.py @@ -55,7 +55,7 @@ class Host(models.Model): updates = models.ManyToManyField(PackageUpdate, blank=True) reboot_required = models.BooleanField(default=False) host_repos_only = models.BooleanField(default=True) - tags = TaggableManager() + tags = TaggableManager(blank=True) updated_at = models.DateTimeField(default=timezone.now) from hosts.managers import HostManager From 28e6e092ab80a66d7fe71c0dad8f7269530c05e4 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 1 Mar 2025 15:50:13 -0500 Subject: [PATCH 100/199] add imports on same lines --- patchman/receivers.py | 3 +-- repos/utils.py | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/patchman/receivers.py b/patchman/receivers.py index 771d927d..5ec32cdd 100644 --- a/patchman/receivers.py +++ b/patchman/receivers.py @@ -21,8 +21,7 @@ from django.dispatch import receiver from util import create_pbar, update_pbar, get_verbosity -from patchman.signals import pbar_start, pbar_update, \ - info_message, warning_message, error_message, debug_message +from patchman.signals import pbar_start, pbar_update, info_message, warning_message, error_message, debug_message from django.conf import settings diff --git a/repos/utils.py b/repos/utils.py index 5a43b91e..f87536d5 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -40,8 +40,7 @@ convert_package_to_packagestring, convert_packagestring_to_package from util import get_url, download_url, response_is_valid, extract, get_checksum, Checksum, get_setting_of_type, \ get_datetime_now -from patchman.signals import info_message, warning_message, error_message, debug_message, \ - pbar_start, pbar_update +from patchman.signals import info_message, warning_message, error_message, debug_message, pbar_start, pbar_update def get_or_create_repo(r_name, r_arch, r_type, r_id=None): From 7fe2dcf01b7dfae9f26fd5ece455a4fd61269c3c Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 2 Mar 2025 21:43:12 -0500 Subject: [PATCH 101/199] fix order on zypper repos --- client/patchman-client | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/client/patchman-client b/client/patchman-client index 4f30d325..48f71355 100755 --- a/client/patchman-client +++ b/client/patchman-client @@ -526,11 +526,11 @@ get_repos() { echo 'Finding zypper repos...' fi if [ $(zypper -q --no-refresh lr --details | head -n 1 | grep Keep) ] ; then - zypper_lr_cols="2,3,8,10" + zypper_lr_cols='{print "${os}" $3 "|" $2 "|" $8 "|" $10}' else - zypper_lr_cols="2,3,7,9" + zypper_lr_cols='{print "${os}" $3 "|" $2 "|" $7 "|" $9}' fi - for i in $(zypper -q --no-refresh lr -E -u --details | grep -v ^$ | tail -n +3 | cut -d "|" -f ${zypper_lr_cols} | sed -e "s/ *|/ ${host_arch} |/" -e "s/\?[a-zA-Z0-9_-]* *$//" -e "s/^ /'/g" -e "s/ *| */' '/g" -e "s/ *$/'/g") ; do + for i in $(zypper -q --no-refresh lr -E -u --details | grep -v ^$ | tail -n +3 | awk -F"|" "${zypper_lr_cols}" | sed -e "s/\${os}/${PRETTY_NAME}/" -e "s/ *|/ ${host_arch} |/" -e "s/\?[a-zA-Z0-9_-]* *$//" -e "s/^/'/g" -e "s/ *| */' '/g" -e "s/ *$/'/g") ; do echo \'rpm\' ${i} >> "${tmpfile_rep}" id=$(echo ${i} | cut -d \' -f 4) suse_repo=$(echo ${i} | grep -e "https://updates.suse.com/.*") From d968a8bd1883f489d0d43700c89755def9d6660d Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 2 Mar 2025 22:52:16 -0500 Subject: [PATCH 102/199] add awk and additional rh cdn urls --- client/patchman-client | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/client/patchman-client b/client/patchman-client index 48f71355..d3329559 100755 --- a/client/patchman-client +++ b/client/patchman-client @@ -478,7 +478,7 @@ get_repos() { if [ "${priority}" == "" ] ; then priority=99 fi - redhat_repo=$(echo ${i} | grep -e "https://.*/XMLRPC.*\|https://cdn.redhat.com/.*") + redhat_repo=$(echo ${i} | grep -e "https://.*/XMLRPC.*\|https://cdn[-[a-z]*]*.redhat.com/.*") if [ ${?} == 0 ] || ${local_updates} ; then if ${verbose} ; then echo "Finding updates locally for ${id}" @@ -667,10 +667,11 @@ post_data() { } if ! check_command_exists which || \ + ! check_command_exists awk || \ ! check_command_exists mktemp || \ ! check_command_exists curl || \ ! check_command_exists flock ; then - echo "which, mktemp, flock or curl was not found, exiting." + echo "which, awk, mktemp, flock or curl was not found, exiting." exit 1 fi From c95ca88a01a4ad3c1463318f35dc70de5d6040ac Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 2 Mar 2025 23:40:06 -0500 Subject: [PATCH 103/199] use ID_LIKE for suse os detection --- client/patchman-client | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/patchman-client b/client/patchman-client index d3329559..7188a675 100755 --- a/client/patchman-client +++ b/client/patchman-client @@ -358,7 +358,7 @@ get_host_data() { os="${NAME}" elif [ "${ID}" == "gentoo" ] ; then os="${PRETTY_NAME} ${VERSION_ID}" - elif [[ "${ID}" =~ "suse" ]] ; then + elif [[ "${ID_LIKE}" =~ "suse" ]] ; then os="${PRETTY_NAME}" elif [ "${ID}" == "astra" ] ; then os="${NAME} $(cat /etc/astra_version)" From 87bcb2d00d2a0c44f76cb6956a80398b434448c1 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 3 Mar 2025 14:56:20 -0500 Subject: [PATCH 104/199] update TODO --- TODO | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/TODO b/TODO index 74df99ea..b5f49f0b 100644 --- a/TODO +++ b/TODO @@ -13,3 +13,9 @@ * dnf5 support * proxy support * GLSA support +* only use date for errata issue date? +* parallelize package extraction +* use django-tables2 +* autonaming for deb repos +* associate repos with gentoo hosts +* populate authenticated repos with package lists from hosts? From aed84da9c52fc20da46928a90a0c3d11f566660d Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 3 Mar 2025 14:57:35 -0500 Subject: [PATCH 105/199] allow errata fuzzy date-matching --- errata/utils.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/errata/utils.py b/errata/utils.py index 3ae04c7a..482e3967 100644 --- a/errata/utils.py +++ b/errata/utils.py @@ -29,13 +29,18 @@ def get_or_create_erratum(name, e_type, issue_date, synopsis): try: e = Erratum.objects.get(name=name) issue_date_tz = tz_aware_datetime(issue_date) + # if it's +/- 1 day we don't update it, just use whichever was the first one + # different sources are generated at different times + # e.g. yum updateinfo vs website errata info + days_delta = abs(e.issue_date.date() - issue_date_tz.date()).days updated = False if e.e_type != e_type: warning_message.send(sender=None, text=f'Updating {name} type `{e.e_type}` -> `{e_type}`') e.e_type = e_type updated = True - if e.issue_date != issue_date_tz: - warning_message.send(sender=None, text=f'Updating {name} issue date `{e.issue_date}` -> `{issue_date_tz}`') + if days_delta > 1: + text = f'Updating {name} issue date `{e.issue_date.date()}` -> `{issue_date_tz.date()}`' + warning_message.send(sender=None, text=text) e.issue_date = issue_date_tz updated = True if e.synopsis != synopsis: From 00c3cd72bb438ba9db6d7e433a3429b595a9858e Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 3 Mar 2025 14:58:16 -0500 Subject: [PATCH 106/199] standardize errata progressbar text --- errata/models.py | 9 ++++----- errata/sources/distros/alma.py | 6 +++--- errata/sources/distros/arch.py | 4 ++-- errata/sources/distros/debian.py | 2 +- errata/sources/distros/rocky.py | 12 ++++++------ errata/sources/distros/ubuntu.py | 8 ++++---- 6 files changed, 20 insertions(+), 21 deletions(-) diff --git a/errata/models.py b/errata/models.py index d744aadd..3031e0d6 100644 --- a/errata/models.py +++ b/errata/models.py @@ -94,9 +94,8 @@ def add_reference(self, e_type, url): from errata.utils import fixup_erratum_reference reference = fixup_erratum_reference({'er_type': e_type, 'url': url}) if reference: - with transaction.atomic(): - er, created = ErratumReference.objects.get_or_create( - er_type=reference.get('er_type'), - url=reference.get('url'), - ) + er, created = ErratumReference.objects.get_or_create( + er_type=reference.get('er_type'), + url=reference.get('url'), + ) self.references.add(er) diff --git a/errata/sources/distros/alma.py b/errata/sources/distros/alma.py index 51cab49b..3a79d47f 100644 --- a/errata/sources/distros/alma.py +++ b/errata/sources/distros/alma.py @@ -48,7 +48,7 @@ def download_alma_advisories(release): alma_errata_url = f'https://errata.almalinux.org/{release}/errata.full.json' headers = {'Accept': 'application/json', 'Cache-Control': 'no-cache, no-tranform'} res = get_url(alma_errata_url, headers=headers) - data = download_url(res, 'Downloading Alma Linux Errata:') + data = download_url(res, f'Downloading Alma {release} Errata') advisories = json.loads(data).get('data') return advisories @@ -66,7 +66,7 @@ def process_alma_errata_serially(release, advisories): """ Process Alma Linux Errata serially """ elen = len(advisories) - pbar_start.send(sender=None, ptext=f'Processing {elen} Alma Errata', plen=elen) + pbar_start.send(sender=None, ptext=f'Processing {elen} Alma {release} Errata', plen=elen) for i, advisory in enumerate(advisories): process_alma_erratum(release, advisory) pbar_update.send(sender=None, index=i + 1) @@ -76,7 +76,7 @@ def process_alma_errata_concurrently(release, advisories): """ Process Alma Linux Errata concurrently """ elen = len(advisories) - pbar_start.send(sender=None, ptext=f'Processing {elen} Alma Errata', plen=elen) + pbar_start.send(sender=None, ptext=f'Processing {elen} Alma {release} Errata', plen=elen) i = 0 with concurrent.futures.ProcessPoolExecutor(max_workers=10) as executor: futures = [executor.submit(process_alma_erratum, release, advisory) for advisory in advisories] diff --git a/errata/sources/distros/arch.py b/errata/sources/distros/arch.py index 7b22ce12..791e1705 100644 --- a/errata/sources/distros/arch.py +++ b/errata/sources/distros/arch.py @@ -38,7 +38,7 @@ def download_arch_errata(): https://security.archlinux.org/advisories.json """ res = get_url('https://security.archlinux.org/advisories.json') - advisories = download_url(res, 'Downloading Arch Linux Advisories:') + advisories = download_url(res, 'Downloading Arch Advisories') return json.loads(advisories) @@ -150,7 +150,7 @@ def add_arch_erratum_packages(e, advisory): group_id = advisory.get('group') group_url = f'https://security.archlinux.org/group/{group_id}.json' res = get_url(group_url) - data = download_url(res) + data = res.content group = json.loads(data) packages = group.get('packages') affected = group.get('affected') diff --git a/errata/sources/distros/debian.py b/errata/sources/distros/debian.py index 82eda94a..e19af829 100644 --- a/errata/sources/distros/debian.py +++ b/errata/sources/distros/debian.py @@ -230,7 +230,7 @@ def retrieve_debian_codenames(): """ distro_info_url = 'https://debian.pages.debian.net/distro-info-data/debian.csv' res = get_url(distro_info_url) - debian_csv = download_url(res, 'Downloading Debian distro info:') + debian_csv = download_url(res, 'Downloading Debian distro data') reader = csv.DictReader(StringIO(debian_csv.decode())) codename_to_version = {} for row in reader: diff --git a/errata/sources/distros/rocky.py b/errata/sources/distros/rocky.py index bac0fd06..ba5d7263 100644 --- a/errata/sources/distros/rocky.py +++ b/errata/sources/distros/rocky.py @@ -48,15 +48,15 @@ def check_rocky_errata_endpoint_health(rocky_errata_api_host): try: health = json.loads(data) if health.get('status') == 'ok': - s = f'Rocky Linux Errata API healthcheck OK: {rocky_errata_healthcheck_url}' + s = f'Rocky Errata API healthcheck OK: {rocky_errata_healthcheck_url}' info_message.send(sender=None, text=s) return True else: - s = f'Rocky Linux Errata API healthcheck FAILED: {rocky_errata_healthcheck_url}' + s = f'Rocky Errata API healthcheck FAILED: {rocky_errata_healthcheck_url}' error_message.send(sender=None, text=s) return False except Exception as e: - s = f'Rocky Linux Errata API healthcheck exception occured: {rocky_errata_healthcheck_url}\n' + s = f'Rocky Errata API healthcheck exception occured: {rocky_errata_healthcheck_url}\n' s += str(e) error_message.send(sender=None, text=s) return False @@ -82,7 +82,7 @@ def download_rocky_advisories_serially(rocky_errata_api_host, rocky_errata_api_u params = {'page': 1, 'size': 100} while True: res = get_url(rocky_errata_advisories_url, headers=headers, params=params) - data = download_url(res, f'Rocky Linux Advisories {page}{"/"+pages if pages else ""}') + data = download_url(res, f'Rocky Advisories {page}{"/"+pages if pages else ""}') advisories_dict = json.loads(data) advisories += advisories_dict.get('advisories') links = advisories_dict.get('links') @@ -107,12 +107,12 @@ def download_rocky_advisories_concurrently(rocky_errata_api_host, rocky_errata_a advisories = [] params = {'page': 1, 'size': 100} res = get_url(rocky_errata_advisories_url, headers=headers, params=params) - data = download_url(res, 'Rocky Linux Advisories Page 1') + data = download_url(res, 'Rocky Advisories Page 1') advisories_dict = json.loads(data) links = advisories_dict.get('links') last_link = links.get('last') pages = int(last_link.split('=')[-1]) - ptext = 'Downloading Rocky Linux Advisories:' + ptext = 'Downloading Rocky Advisories' pbar_start.send(sender=None, ptext=ptext, plen=pages) i = 0 with concurrent.futures.ThreadPoolExecutor(max_workers=100) as executor: diff --git a/errata/sources/distros/ubuntu.py b/errata/sources/distros/ubuntu.py index 195460b4..f0a518cc 100644 --- a/errata/sources/distros/ubuntu.py +++ b/errata/sources/distros/ubuntu.py @@ -50,7 +50,7 @@ def download_ubuntu_usn_db(): """ ubuntu_usn_db_json_url = 'https://usn.ubuntu.com/usn-db/database.json.bz2' res = get_url(ubuntu_usn_db_json_url) - return download_url(res, 'Downloading Ubuntu Errata:') + return download_url(res, 'Downloading Ubuntu Errata') def download_ubuntu_usn_db_checksum(): @@ -58,7 +58,7 @@ def download_ubuntu_usn_db_checksum(): """ ubuntu_usn_db_checksum_url = 'https://usn.ubuntu.com/usn-db/database.json.bz2.sha256' res = get_url(ubuntu_usn_db_checksum_url) - return download_url(res, 'Downloading Ubuntu Errata Checksum:').decode().split()[0] + return download_url(res, 'Downloading Ubuntu Errata Checksum').decode().split()[0] def parse_usn_data(data, concurrent_processing): @@ -89,7 +89,7 @@ def parse_usn_data_concurrently(advisories, accepted_releases): elen = len(advisories) pbar_start.send(sender=None, ptext=f'Processing {elen} Ubuntu Errata', plen=elen) i = 0 - with concurrent.futures.ProcessPoolExecutor(max_workers=10) as executor: + with concurrent.futures.ProcessPoolExecutor(max_workers=20) as executor: futures = [executor.submit(process_usn, usn_id, advisory, accepted_releases) for usn_id, advisory in advisories.items()] for future in concurrent.futures.as_completed(futures): @@ -215,7 +215,7 @@ def retrieve_ubuntu_codenames(): """ distro_info_url = 'https://debian.pages.debian.net/distro-info-data/ubuntu.csv' res = get_url(distro_info_url) - ubuntu_csv = download_url(res, 'Downloading Ubuntu distro info:') + ubuntu_csv = download_url(res, 'Downloading Ubuntu distro data') reader = csv.DictReader(StringIO(ubuntu_csv.decode())) codename_to_version = {} for row in reader: From c25bc8fb3c7951e5a26eabfc1bb3fcf032d1eb42 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 3 Mar 2025 14:58:37 -0500 Subject: [PATCH 107/199] rename function --- errata/tasks.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/errata/tasks.py b/errata/tasks.py index e1b686c9..c49418d9 100644 --- a/errata/tasks.py +++ b/errata/tasks.py @@ -27,7 +27,10 @@ from util import get_setting_of_type -def update_rpm_repo_errata(): +@shared_task +def update_yum_repo_errata(): + """ Update all yum repos errata + """ for repo in Repository.objects.filter(repotype=Repository.RPM): repo.refresh_errata() From fc3f3a83a3994ed9fc0bcc3e381245c55cd33d47 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 3 Mar 2025 14:59:39 -0500 Subject: [PATCH 108/199] add yum errata checking to defaults --- errata/tasks.py | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/errata/tasks.py b/errata/tasks.py index c49418d9..4421c860 100644 --- a/errata/tasks.py +++ b/errata/tasks.py @@ -42,8 +42,10 @@ def update_errata(): errata_os_updates = get_setting_of_type( setting_name='ERRATA_OS_UPDATES', setting_type=list, - default=['rocky', 'alma', 'arch', 'ubuntu', 'debian', 'rhel', 'suse', 'amazon'], + default=['yum', 'rocky', 'alma', 'arch', 'ubuntu', 'debian'], ) + if 'yum' in errata_os_updates: + update_yum_repo_errata() if 'arch' in errata_os_updates: update_arch_errata() if 'alma' in errata_os_updates: @@ -54,18 +56,8 @@ def update_errata(): update_debian_errata() if 'ubuntu' in errata_os_updates: update_ubuntu_errata() - if 'rhel' in errata_os_updates: - # update_rhel_errata() - pass - if 'suse' in errata_os_updates: - # update_suse_errata() - pass - if 'amazon' in errata_os_updates: - # update_amazon_errata() - pass if 'centos' in errata_os_updates: update_centos_errata() - update_rpm_repo_errata() @shared_task From 59abc712545cd052421e54d695b2c771a2b062d0 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 3 Mar 2025 15:00:15 -0500 Subject: [PATCH 109/199] shortern more bugzilla urls --- errata/utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/errata/utils.py b/errata/utils.py index 482e3967..0b248711 100644 --- a/errata/utils.py +++ b/errata/utils.py @@ -85,7 +85,8 @@ def fixup_erratum_reference(eref): bug = url.path.split('/')[-1] path = f'/bugs/{bug}' url = url._replace(netloc=netloc, path=path) - if url.hostname == 'bugzilla.redhat.com' and url.path == '/show_bug.cgi': + if url.hostname in ['bugzilla.redhat.com', 'bugzilla.opensuse.org', 'bugs.suse.com'] and \ + url.path == '/show_bug.cgi': bug = url.query.split('=')[1] path = f'/{bug}' url = url._replace(path=path, query='') From 37d4eae7fe99808a637581ede6a76e8c7dc9fa96 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 3 Mar 2025 15:00:34 -0500 Subject: [PATCH 110/199] add os filter for errata --- errata/views.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/errata/views.py b/errata/views.py index 9b8ce52c..7e76d832 100644 --- a/errata/views.py +++ b/errata/views.py @@ -21,9 +21,10 @@ from rest_framework import viewsets -from util.filterspecs import Filter, FilterBar +from operatingsystems.models import OSRelease from errata.models import Erratum, ErratumReference from errata.serializers import ErratumSerializer, ErratumReferenceSerializer +from util.filterspecs import Filter, FilterBar @login_required @@ -42,6 +43,9 @@ def erratum_list(request): if 'package_id' in request.GET: errata = errata.filter(packages=request.GET['package_id']) + if 'osrelease_id' in request.GET: + errata = errata.filter(osreleases=request.GET['osrelease_id']) + if 'search' in request.GET: terms = request.GET['search'].lower() query = Q() @@ -65,6 +69,8 @@ def erratum_list(request): filter_list = [] filter_list.append(Filter(request, 'Erratum Type', 'e_type', Erratum.objects.values_list('e_type', flat=True).distinct())) + filter_list.append(Filter(request, 'OS Release', 'osrelease_id', + OSRelease.objects.filter(erratum__in=errata))) filter_bar = FilterBar(request, filter_list) return render(request, From f7f0acb824f22e95e439897369f0cfb91c2697dc Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 3 Mar 2025 15:01:53 -0500 Subject: [PATCH 111/199] update os templates --- .../operatingsystems/operatingsystemrelease_table.html | 8 ++++++-- .../templates/operatingsystems/osrelease_delete.html | 1 + .../templates/operatingsystems/osrelease_detail.html | 7 ++++--- .../templates/operatingsystems/osvariant_delete.html | 2 +- .../templates/operatingsystems/osvariant_detail.html | 2 +- util/templatetags/common.py | 8 ++++++++ 6 files changed, 21 insertions(+), 7 deletions(-) diff --git a/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html b/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html index 82bb47ef..b08d05ff 100644 --- a/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html +++ b/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html @@ -4,19 +4,23 @@ OS Release CPE Name - Codename + Codename Repos OS Variants + Hosts + Errata {% for osrelease in object_list %} - {{ osrelease }} + {{ osrelease.name }} {% if osrelease.codename %}{{ osrelease.cpe_name }}{% endif %} {% if osrelease.codename %}{{ osrelease.codename }}{% endif %} {{ osrelease.repos.count }} {{ osrelease.osvariant_set.count }} + {% host_count osrelease %} + {{ osrelease.erratum_set.count }} {% endfor %} diff --git a/operatingsystems/templates/operatingsystems/osrelease_delete.html b/operatingsystems/templates/operatingsystems/osrelease_delete.html index 358d9269..e0c6f5bf 100644 --- a/operatingsystems/templates/operatingsystems/osrelease_delete.html +++ b/operatingsystems/templates/operatingsystems/osrelease_delete.html @@ -18,6 +18,7 @@ OS Variants{{ osrelease.osvariant_set.count }} Repositories{{ osrelease.repos.count }} Hosts{{ host_count }} + Errata{{ osrelease.erratum_set.count }}
    diff --git a/operatingsystems/templates/operatingsystems/osrelease_detail.html b/operatingsystems/templates/operatingsystems/osrelease_detail.html index 9cfd1123..740b9c4b 100644 --- a/operatingsystems/templates/operatingsystems/osrelease_detail.html +++ b/operatingsystems/templates/operatingsystems/osrelease_detail.html @@ -23,12 +23,13 @@
    - + +
    Name{{ osrelease }}
    Name{{ osrelease.name }}
    CPE Name{% if osrelease.cpe_name %}{{ osrelease.cpe_name }}{% endif %}
    Codename{% if osrelease.codename %}{{ osrelease.codename }}{% endif %}
    OS Variants{{ osrelease.osvariant_set.count }}
    Repositories{{ osrelease.repos.count }}
    Hosts{{ host_count }}
    Errata{{ osrelease.erratum_set.count }}
    {% if user.is_authenticated and perms.is_admin %} @@ -40,7 +41,7 @@
    {% if osrelease.osvariant_set.count == 0 %} - {{ osrelease }} has no Variants
    + {{ osrelease }} has no Variants {% else %} {% gen_table osrelease.osvariant_set.select_related %} {% endif %} @@ -50,7 +51,7 @@
    {% if osrelease.repos.count == 0 %} - {{ osrelease }} has no Repositories
    + {{ osrelease }} has no Repositories {% else %} {% gen_table osrelease.repos.select_related %} {% endif %} diff --git a/operatingsystems/templates/operatingsystems/osvariant_delete.html b/operatingsystems/templates/operatingsystems/osvariant_delete.html index 7a891989..bea8a849 100644 --- a/operatingsystems/templates/operatingsystems/osvariant_delete.html +++ b/operatingsystems/templates/operatingsystems/osvariant_delete.html @@ -15,7 +15,7 @@ Name {{ osvariant.name }} Architecture {{ osvariant.arch }} Codename {{ osvariant.codename }} - Hosts{{ osvariant.host_set.count }} + Hosts{{ osvariant.host_set.count }} OS Release{% if osvariant.osrelease != None %} {{ osvariant.osrelease }} {% else %}No OS Release{% endif %} diff --git a/operatingsystems/templates/operatingsystems/osvariant_detail.html b/operatingsystems/templates/operatingsystems/osvariant_detail.html index 008a00b4..0c1d306b 100644 --- a/operatingsystems/templates/operatingsystems/osvariant_detail.html +++ b/operatingsystems/templates/operatingsystems/osvariant_detail.html @@ -24,7 +24,7 @@ Name {{ osvariant.name }} Architecture {{ osvariant.arch }} Codename {{ osvariant.codename }} - Hosts{{ osvariant.host_set.count }} + Hosts{{ osvariant.host_set.count }} OS Release{% if osvariant.osrelease != None %} {{ osvariant.osrelease }} {% else %}No OS Release{% endif %} {% if user.is_authenticated and perms.is_admin %} diff --git a/util/templatetags/common.py b/util/templatetags/common.py index ad441df9..0d5480fa 100644 --- a/util/templatetags/common.py +++ b/util/templatetags/common.py @@ -108,3 +108,11 @@ def reports_timedelta(): default=14, ) return naturaltime(datetime.now() - timedelta(days=days)) + + +@register.simple_tag +def host_count(osrelease): + host_count = 0 + for osvariant in osrelease.osvariant_set.all(): + host_count += osvariant.host_set.count() + return host_count From c4257a169552c698d920a0737902ea51db106840 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 3 Mar 2025 15:03:57 -0500 Subject: [PATCH 112/199] explicitely install awk --- debian/control | 2 +- patchman-client.spec | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/debian/control b/debian/control index 2f0b5d10..6db37085 100644 --- a/debian/control +++ b/debian/control @@ -43,7 +43,7 @@ Description: Django-based patch status monitoring tool for linux systems. Package: patchman-client Architecture: all Homepage: https://github.com/furlongm/patchman -Depends: ${misc:Depends}, curl, debianutils, util-linux, coreutils +Depends: ${misc:Depends}, curl, debianutils, util-linux, coreutils, mawk Description: Client for the patchman monitoring system. . The client will send a list of packages and repositories to the upstream diff --git a/patchman-client.spec b/patchman-client.spec index 4d1f7b68..ac7a7385 100644 --- a/patchman-client.spec +++ b/patchman-client.spec @@ -8,7 +8,7 @@ License: GPLv3 URL: http://patchman.openbytes.ie Source: %{expand:%%(pwd)} BuildArch: noarch -Requires: curl which coreutils util-linux +Requires: curl which coreutils util-linux awk %define binary_payload w9.gzdio From aadddb3b79cc1c3f920cfb660936931239c6ffbd Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 3 Mar 2025 15:05:02 -0500 Subject: [PATCH 113/199] refactor report processing --- reports/models.py | 191 ++++++------------------------------ reports/utils.py | 242 ++++++++++++++++++++++++++++++++++++++-------- 2 files changed, 231 insertions(+), 202 deletions(-) diff --git a/reports/models.py b/reports/models.py index e7e1503b..3a2fc7af 100644 --- a/reports/models.py +++ b/reports/models.py @@ -15,19 +15,11 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -import re - -from django.db import models, IntegrityError, DatabaseError, transaction +from django.db import models, transaction from django.urls import reverse -from arch.models import MachineArchitecture -from hosts.models import Host -from operatingsystems.models import OSVariant, OSRelease -from domains.models import Domain from patchman.signals import error_message, info_message -from socket import gethostbyaddr, herror - class Report(models.Model): @@ -105,164 +97,37 @@ def parse(self, data, meta): def process(self, find_updates=True, verbose=False): """ Process a report and extract os, arch, domain, packages, repos etc """ - if self.os and self.kernel and self.arch and not self.processed: - os = self.os - cpe_name = None - codename = None - osrelease_codename = None - osvariant_codename = None - osrelease_name = os - osvariant_name = os - - # find cpe_name if it exists - match = re.match(r'(.*) \[(.*)\]', os) - if match: - cpe_name = match.group(2) - os = match.group(1) - - # find codename if it exists - match = re.match(r'(.*) \((.*)\)', os) - if match: - osrelease_name = match.group(1) - codename = match.group(2) - if not os.startswith('AlmaLinux'): - osrelease_codename = codename - - if os.startswith('Gentoo'): - osrelease_name = 'Gentoo Linux' - # presumptive, can be changed once a real cpe is assigned/used - cpe_name = 'cpe:2.3:o:gentoo:gentoo_linux:::' - - if os.startswith('AlmaLinux'): - os = os.replace('AlmaLinux', 'Alma Linux') - osrelease_name = os.split('.')[0] - # alma changes the codename with each minor release, so it's useless to us now - osvariant_name = os.replace(f' ({codename})', '') - osvariant_codename = codename - - if os.startswith('Debian'): - major, minor = os.split(' ')[1].split('.') - debian_version = f'{major}.{minor}' - osrelease_name = f'Debian {major}' - # presumptive, can be changed once a real cpe is assigned/used - cpe_name = f'cpe:2.3:o:debian:debian_linux:{debian_version}::' - - if os.startswith('Ubuntu'): - lts = '' - if 'LTS' in os: - lts = ' LTS' - major, minor, patch = os.split(' ')[1].split('.') - ubuntu_version = f'{major}_{minor}' - osrelease_name = f'Ubuntu {major}.{minor}{lts}' - cpe_name = f'cpe:2.3:o:canonical:ubuntu_linux:{ubuntu_version}::' - - if os.startswith('Arch'): - # presumptive, can be changed once a real cpe is assigned/used - cpe_name = 'cpe:2.3:o:archlinux:arch_linux:::' - - if os.startswith('Rocky'): - osrelease_name = os.split('.')[0] - - with transaction.atomic(): - m_arch, created = MachineArchitecture.objects.get_or_create(name=self.arch) - - with transaction.atomic(): - try: - osvariant, created = OSVariant.objects.get_or_create(name=osvariant_name, arch=m_arch) - except IntegrityError: - osvariants = OSVariant.objects.filter(name=osvariant_name) - if osvariants.count() == 1: - osvariant = osvariants[0] - if osvariant.arch is None: - osvariant.arch = m_arch + if not self.os or not self.kernel or not self.arch: + error_message.send(sender=None, text=f'Error: OS, kernel or arch not sent with report {self.id}') + return - if osvariant and osvariant_codename: - osvariant.codename = osvariant_codename + if self.processed: + info_message.send(sender=None, text=f'Report {self.id} has already been processed') + return - if cpe_name: - try: - osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name, cpe_name=cpe_name) - except IntegrityError: - osreleases = OSRelease.objects.filter(name=osrelease_name) - if osreleases.count() == 1: - osrelease = osreleases[0] - osrelease.cpe_name = cpe_name - elif osrelease_codename: - osreleases = OSRelease.objects.filter(codename=osrelease_codename) - if osreleases.count() == 1: - osrelease = osreleases[0] - elif osrelease_name: - osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name) - osrelease.save() - osvariant.osrelease = osrelease - osvariant.save() + from reports.utils import get_arch, get_os, get_domain, get_host + arch = get_arch(self.arch) + osvariant = get_os(self.os, arch) + domain = get_domain(self.domain) + host = get_host(self, arch, osvariant, domain) - if not self.domain: - self.domain = 'unknown' - domains = Domain.objects.all() - with transaction.atomic(): - domain, c = domains.get_or_create(name=self.domain) + if verbose: + info_message.send(sender=None, text=f'Processing report {self.id} - {self.host}') - if not self.host: - try: - self.host = str(gethostbyaddr(self.report_ip)[0]) - except herror: - self.host = self.report_ip - - with transaction.atomic(): - host, c = Host.objects.get_or_create( - hostname=self.host, - defaults={ - 'ipaddress': self.report_ip, - 'arch': m_arch, - 'osvariant': osvariant, - 'domain': domain, - 'lastreport': self.created, - }) + from reports.utils import process_packages, process_repos, process_updates, process_modules + with transaction.atomic(): + process_repos(report=self, host=host) + with transaction.atomic(): + process_modules(report=self, host=host) + with transaction.atomic(): + process_packages(report=self, host=host) + with transaction.atomic(): + process_updates(report=self, host=host) - host.ipaddress = self.report_ip - host.kernel = self.kernel - host.arch = m_arch - host.osvariant = osvariant - host.domain = domain - host.lastreport = self.created - host.tags = self.tags - if self.reboot == 'True': - host.reboot_required = True - else: - host.reboot_required = False - try: - with transaction.atomic(): - host.save() - except IntegrityError as e: - error_message.send(sender=None, text=e) - except DatabaseError as e: - error_message.send(sender=None, text=e) - host.check_rdns() + self.processed = True + self.save() + if find_updates: if verbose: - info_message.send(sender=None, text=f'Processing report {self.id} - {self.host}') - - from reports.utils import process_packages, process_repos, process_updates, process_modules - with transaction.atomic(): - process_repos(report=self, host=host) - with transaction.atomic(): - process_modules(report=self, host=host) - with transaction.atomic(): - process_packages(report=self, host=host) - with transaction.atomic(): - process_updates(report=self, host=host) - - self.processed = True - with transaction.atomic(): - self.save() - - if find_updates: - if verbose: - info_message.send(sender=None, text=f'Finding updates for report {self.id} - {self.host}') - host.find_updates() - else: - if self.processed: - info_message.send(sender=None, text=f'Report {self.id} has already been processed') - else: - error_message.send(sender=None, text=f'Error: OS, kernel or arch not sent with report {self.id}') + info_message.send(sender=None, text=f'Finding updates for report {self.id} - {self.host}') + host.find_updates() diff --git a/reports/utils.py b/reports/utils.py index f551c0ef..25a87047 100644 --- a/reports/utils.py +++ b/reports/utils.py @@ -16,17 +16,20 @@ # along with Patchman. If not, see import re +from socket import gethostbyaddr, herror from django.db import IntegrityError, DatabaseError, transaction -from hosts.models import HostRepo from arch.models import MachineArchitecture, PackageArchitecture -from repos.models import Repository, Mirror, MirrorPackage +from domains.models import Domain +from hosts.models import Host, HostRepo from modules.models import Module +from operatingsystems.models import OSVariant, OSRelease from packages.models import Package, PackageCategory from packages.utils import find_evr, get_or_create_package, get_or_create_package_update, parse_package_string -from repos.utils import get_or_create_repo from patchman.signals import pbar_start, pbar_update, error_message, info_message +from repos.models import Repository, Mirror, MirrorPackage +from repos.utils import get_or_create_repo def process_repos(report, host): @@ -309,17 +312,15 @@ def process_module(module_str): m_stream = module_str[1] m_version = module_str[2] m_context = module_str[3] - arch = module_str[4] + m_arch = module_str[4] repo_id = module_str[5] - package_arches = PackageArchitecture.objects.all() - with transaction.atomic(): - m_arch, c = package_arches.get_or_create(name=arch) + arch, c = PackageArchitecture.objects.get_or_create(name=m_arch) try: - m_repo = Repository.objects.get(repo_id=repo_id) + repo = Repository.objects.get(repo_id=repo_id) except Repository.DoesNotExist: - m_repo = None + repo = None packages = set() for pkg_str in module_str[6:]: @@ -330,32 +331,25 @@ def process_module(module_str): modules = Module.objects.all() try: - with transaction.atomic(): - module, c = modules.get_or_create(name=m_name, - stream=m_stream, - version=m_version, - context=m_context, - arch=m_arch, - repo=m_repo) + module, c = modules.get_or_create(name=m_name, + stream=m_stream, + version=m_version, + context=m_context, + arch=arch, + repo=repo) except IntegrityError as e: error_message.send(sender=None, text=e) module = modules.get(name=m_name, stream=m_stream, version=m_version, context=m_context, - arch=m_arch, - repo=m_repo) + arch=arch, + repo=repo) except DatabaseError as e: error_message.send(sender=None, text=e) for package in packages: - try: - with transaction.atomic(): - module.packages.add(package) - except IntegrityError as e: - error_message.send(sender=None, text=e) - except DatabaseError as e: - error_message.send(sender=None, text=e) + module.packages.add(package) return module @@ -405,22 +399,192 @@ def process_package(pkg, protocol): category, created = PackageCategory.objects.get_or_create(name=p_category) package.category = category - machine_arches = MachineArchitecture.objects.all() - with transaction.atomic(): - repo_arch, created = machine_arches.get_or_create(name='any') - + repo_arch, created = MachineArchitecture.objects.get_or_create(name='any') repo_name = 'Gentoo Linux' repo = get_or_create_repo(repo_name, repo_arch, Repository.GENTOO, p_repo) - with transaction.atomic(): - if p_repo == 'gentoo': - url = 'https://api.gentoo.org/mirrors/distfiles.xml' - else: - # this may not be correct. the urls are hardcoded anyway in repos/utils.py - # need to figure out a better way to determine which repo/repo url to use - url = 'https://api.gentoo.org/overlays/repositories.xml' - mirror, c = Mirror.objects.get_or_create(repo=repo, url=url, mirrorlist=True) - MirrorPackage.objects.create(mirror=mirror, package=package) - + if p_repo == 'gentoo': + url = 'https://api.gentoo.org/mirrors/distfiles.xml' + else: + # this may not be correct. the urls are hardcoded anyway in repos/utils.py + # need to figure out a better way to determine which repo/repo url to use + url = 'https://api.gentoo.org/overlays/repositories.xml' + mirror, c = Mirror.objects.get_or_create(repo=repo, url=url, mirrorlist=True) + MirrorPackage.objects.create(mirror=mirror, package=package) package.save() return package + + +def get_arch(arch): + """ Get or create MachineArchitecture from arch + Returns the MachineArchitecture + """ + return MachineArchitecture.objects.get_or_create(name=arch)[0] + + +def get_os(os, arch): + """ Get or create OSRelease and OSVariant from os details + Returns the OSVariant + """ + cpe_name = codename = osrelease_codename = osvariant_codename = None + osrelease_name = osvariant_name = os + + # find cpe_name if it exists + match = re.match(r'(.*) \[(.*)\]', os) + if match: + os = match.group(1) + cpe_name = match.group(2) + + # find codename if it exists + match = re.match(r'(.*) \((.*)\)', os) + if match: + os = match.group(1) + codename = match.group(2) + if os.startswith('AlmaLinux'): + # alma changes the codename with each minor release, so it's useless to us now + osvariant_codename = codename + else: + osrelease_codename = codename + + osrelease_name = os + osvariant_name = os + + if os.startswith('Gentoo'): + osrelease_name = 'Gentoo Linux' + # presumptive, can be changed once a real cpe is assigned/used + cpe_name = 'cpe:2.3:o:gentoo:gentoo_linux:::' + elif os.startswith('Arch'): + # presumptive, can be changed once a real cpe is assigned/used + cpe_name = 'cpe:2.3:o:archlinux:arch_linux:::' + elif os.startswith('Debian'): + major, minor = os.split(' ')[1].split('.') + debian_version = f'{major}.{minor}' + osrelease_name = f'Debian {major}' + # presumptive, can be changed once a real cpe is assigned/used + cpe_name = f'cpe:2.3:o:debian:debian_linux:{debian_version}::' + elif os.startswith('Ubuntu'): + lts = '' + if 'LTS' in os: + lts = ' LTS' + major, minor, patch = os.split(' ')[1].split('.') + ubuntu_version = f'{major}_{minor}' + osrelease_name = f'Ubuntu {major}.{minor}{lts}' + cpe_name = f'cpe:2.3:o:canonical:ubuntu_linux:{ubuntu_version}::' + elif os.startswith('AlmaLinux'): + osvariant_name = os.replace('AlmaLinux', 'Alma Linux') + osrelease_name = osvariant_name.split('.')[0] + elif os.startswith('Rocky'): + osvariant_name = os + osrelease_name = osvariant_name.split('.')[0] + elif os.startswith('Red Hat'): + osvariant_name = os.replace(' release', '') + osrelease_name = osvariant_name.split('.')[0] + elif os.startswith('Fedora'): + osvariant_name = os.replace(' release', '') + osrelease_name = osvariant_name.split('.')[0] + elif os.startswith('CentOS'): + osvariant_name = os.replace(' release', '') + osrelease_name = osvariant_name.split('.')[0] + elif os.startswith('Oracle'): + osvariant_name = os.replace(' Server', '') + osrelease_name = osvariant_name.split('.')[0] + + osrelease = get_osrelease(osrelease_name, osrelease_codename, cpe_name) + osvariant = get_osvariant(osrelease, osvariant_name, osvariant_codename, arch) + return osvariant + + +def get_osrelease(osrelease_name, osrelease_codename, cpe_name): + """ Get or create OSRelease from os details + """ + osrelease = None + if cpe_name: + try: + osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name, cpe_name=cpe_name) + except IntegrityError: + osreleases = OSRelease.objects.filter(cpe_name=cpe_name) + if osreleases.count() == 1: + osrelease = osreleases[0] + osrelease.name = osrelease_name + if not osrelease and osrelease_codename: + osreleases = OSRelease.objects.filter(codename=osrelease_codename) + if osreleases.count() == 1: + osrelease = osreleases[0] + if not osrelease and osrelease_name: + osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name) + if osrelease and cpe_name: + osrelease.cpe_name = cpe_name + if osrelease and osrelease_codename: + osrelease.codename = osrelease_codename + osrelease.save() + return osrelease + + +def get_osvariant(osrelease, osvariant_name, osvariant_codename, arch): + """ Get or create OSVariant from OSRelease and os details + """ + if not osrelease: + return + + try: + osvariant, created = OSVariant.objects.get_or_create(name=osvariant_name, arch=arch) + except IntegrityError: + osvariants = OSVariant.objects.filter(name=osvariant_name) + if osvariants.count() == 1: + osvariant = osvariants[0] + if osvariant.arch is None: + osvariant.arch = arch + if osvariant and osvariant_codename: + osvariant.codename = osvariant_codename + osvariant.osrelease = osrelease + osvariant.save() + return osvariant + + +def get_domain(report_domain): + if not report_domain: + report_domain = 'unknown' + domain, c = Domain.objects.get_or_create(name=report_domain) + return domain + + +def get_host(report, arch, osvariant, domain): + host = None + if not report.host: + try: + report.host = str(gethostbyaddr(report.report_ip)[0]) + except herror: + report.host = report.report_ip + report.save() + + with transaction.atomic(): + try: + host, c = Host.objects.get_or_create( + hostname=report.host, + defaults={ + 'ipaddress': report.report_ip, + 'arch': arch, + 'osvariant': osvariant, + 'domain': domain, + 'lastreport': report.created, + }) + + host.ipaddress = report.report_ip + host.kernel = report.kernel + host.arch = arch + host.osvariant = osvariant + host.domain = domain + host.lastreport = report.created + host.tags = report.tags + if report.reboot == 'True': + host.reboot_required = True + else: + host.reboot_required = False + host.save() + except IntegrityError as e: + error_message.send(sender=None, text=e) + except DatabaseError as e: + error_message.send(sender=None, text=e) + if host: + host.check_rdns() + return host From dfe73ca85f1afdb703897aa8959a7c7217fee32f Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 3 Mar 2025 15:07:59 -0500 Subject: [PATCH 114/199] add cdn-ubi redhat url --- reports/utils.py | 7 ++++--- repos/utils.py | 29 +++++++++++++++++++++++++---- util/tasks.py | 3 ++- 3 files changed, 31 insertions(+), 8 deletions(-) diff --git a/reports/utils.py b/reports/utils.py index 25a87047..ccfb1a12 100644 --- a/reports/utils.py +++ b/reports/utils.py @@ -255,10 +255,10 @@ def process_repo(repo, arch): unknown = [] for r_url in repo[3:]: try: - mirror = Mirror.objects.get(url=r_url) + mirror = Mirror.objects.get(url=r_url.strip('/')) except Mirror.DoesNotExist: if repository: - Mirror.objects.create(repo=repository, url=r_url) + Mirror.objects.create(repo=repository, url=r_url.rstrip('/')) else: unknown.append(r_url) else: @@ -277,10 +277,11 @@ def process_repo(repo, arch): repository.save() for url in unknown: - Mirror.objects.create(repo=repository, url=url) + Mirror.objects.create(repo=repository, url=url.rstrip('/')) for mirror in Mirror.objects.filter(repo=repository).values('url'): if mirror['url'].find('cdn.redhat.com') != -1 or \ + mirror['url'].find('cdn-ubi.redhat.com') != -1 or \ mirror['url'].find('nu.novell.com') != -1 or \ mirror['url'].find('updates.suse.com') != -1: repository.auth_required = True diff --git a/repos/utils.py b/repos/utils.py index f87536d5..1521f115 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -140,7 +140,7 @@ def find_mirror_url(stored_mirror_url, formats): for f in formats: if mirror_url.endswith(f): mirror_url = mirror_url[:-len(f)] - mirror_url = mirror_url.rstrip('/') + '/' + fmt + mirror_url = f"{mirror_url.rstrip('/')}/{fmt}" debug_message.send(sender=None, text=f'Checking for Mirror at {mirror_url}') try: res = get_url(mirror_url) @@ -287,11 +287,13 @@ def add_mirrors_from_urls(repo, mirror_urls): q = Q(mirrorlist=False, refresh=True, enabled=True) existing = repo.mirror_set.filter(q).count() if existing >= max_mirrors: - text = f'{existing} Mirrors already exist (max={max_mirrors}), not adding any more' + text = f'{existing} Mirrors already exist (max={max_mirrors}), not adding more' warning_message.send(sender=None, text=text) break from repos.models import Mirror - m, c = Mirror.objects.get_or_create(repo=repo, url=mirror_url) + # FIXME: maybe we should store the mirrorlist url with full path to repomd.xml? + # that is what metalink urls return now + m, c = Mirror.objects.get_or_create(repo=repo, url=mirror_url.rstrip('/').rstrip('repodata/repomd.xml')) if c: text = f'Added Mirror - {mirror_url}' info_message.send(sender=None, text=text) @@ -1140,5 +1142,24 @@ def clean_repos(): if rlen == 0: info_message.send(sender=None, text='No Repositories with zero Mirrors found.') else: - info_message.send(sender=None, text=f'Removing {rlen} empty Repos') + info_message.send(sender=None, text=f'Removing {rlen} empty Repositories.') repos.delete() + + +def remove_mirror_trailing_slashes(): + """ Remove trailing slashes from mirrors, delete duplicates + """ + from repos.models import Mirror + mirrors = Mirror.objects.filter(url__endswith='/') + mlen = mirrors.count() + if mlen == 0: + info_message.send(sender=None, text='No Mirrors with trailing slashes found.') + else: + info_message.send(sender=None, text=f'Removing trailing slashes from {mlen} Mirrors.') + for mirror in mirrors: + mirror.url = mirror.url.rstrip('/') + try: + mirror.save() + except IntegrityError: + warning_message.send(sender=None, text=f'Deleting duplicate Mirror {mirror.id}: {mirror.url}') + mirror.delete() diff --git a/util/tasks.py b/util/tasks.py index 20e55db1..f650e3e2 100644 --- a/util/tasks.py +++ b/util/tasks.py @@ -19,7 +19,7 @@ from arch.utils import clean_architectures from modules.utils import clean_modules from packages.utils import clean_packages, clean_packageupdates, clean_packagenames -from repos.utils import clean_repos +from repos.utils import clean_repos, remove_mirror_trailing_slashes @shared_task @@ -32,5 +32,6 @@ def clean_database(remove_duplicate_packages=False): clean_packagenames() clean_architectures() clean_repos() + remove_mirror_trailing_slashes() clean_modules() clean_packageupdates() From f96b9feb63e50e060bca5972bc7393f649d39415 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 3 Mar 2025 15:10:09 -0500 Subject: [PATCH 115/199] improve mirror checksum handling --- repos/models.py | 21 +++++++++++++++------ repos/utils.py | 1 - 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/repos/models.py b/repos/models.py index 3e367635..75fee1a7 100644 --- a/repos/models.py +++ b/repos/models.py @@ -79,10 +79,11 @@ def refresh(self, force=False): """ Refresh all of a repos mirror metadata, force can be set to force a reset of all the mirrors metadata """ - if force: for mirror in self.mirror_set.all(): mirror.packages_checksum = None + mirror.modules_checksum = None + mirror.errata_checksum = None mirror.save() if not self.auth_required: @@ -98,19 +99,24 @@ def refresh(self, force=False): text = f'Error: unknown repo type for repo {self.id}: {self.repotype}' error_message.send(sender=None, text=text) else: - text = 'Repo requires certificate authentication, not updating' + text = 'Repo requires authentication, not updating' warning_message.send(sender=None, text=text) - def refresh_errata(self): + def refresh_errata(self, force=False): + """ Refresh errata metadata for all of a repos mirrors + """ + if force: + for mirror in self.mirror_set.all(): + mirror.errata_checksum = None + mirror.save() if self.repotype == Repository.RPM: - refresh_yum_repo_errata(self) + refresh_repo_errata(self) def disable(self): """ Disable a repo. This involves disabling each mirror, which stops it being considered for package updates, and disabling refresh for each mirror so that it doesn't try to update its package metadata. """ - self.enabled = False for mirror in self.mirror_set.all(): mirror.enabled = False @@ -122,7 +128,6 @@ def enable(self): to be considered for package updates, and enabling refresh for each mirror so that it updates its package metadata. """ - self.enabled = True for mirror in self.mirror_set.all(): mirror.enabled = True @@ -169,6 +174,10 @@ def fail(self): Set MAX_MIRROR_FAILURES to -1 to disable marking mirrors as failures Default is 28 """ + if self.repo.auth_required: + text = 'Mirror requires authentication, not updating' + warning_message.send(sender=None, text=text) + return text = f'No usable mirror found at {self.url}' error_message.send(sender=None, text=text) default_max_mirror_failures = 28 diff --git a/repos/utils.py b/repos/utils.py index 1521f115..51a7b0ba 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -565,7 +565,6 @@ def fetch_mirror_data(mirror, url, text, checksum=None, checksum_type=None, meta data = download_url(res, text) if not data: - mirror.fail() return if checksum and checksum_type and metadata_type: From 5d76b86fc448aa715b66f00aa00304ac241ba672 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 3 Mar 2025 15:12:21 -0500 Subject: [PATCH 116/199] refactor and improve repo type handling --- repos/models.py | 8 +- repos/repo_types/arch.py | 115 +++++ repos/repo_types/deb.py | 109 +++++ repos/repo_types/gentoo.py | 282 +++++++++++++ repos/repo_types/rpm.py | 99 +++++ repos/repo_types/yast.py | 69 +++ repos/repo_types/yum.py | 254 +++++++++++ repos/utils.py | 846 +------------------------------------ 8 files changed, 941 insertions(+), 841 deletions(-) create mode 100644 repos/repo_types/arch.py create mode 100644 repos/repo_types/deb.py create mode 100644 repos/repo_types/gentoo.py create mode 100644 repos/repo_types/rpm.py create mode 100644 repos/repo_types/yast.py create mode 100644 repos/repo_types/yum.py diff --git a/repos/models.py b/repos/models.py index 75fee1a7..b84f7b19 100644 --- a/repos/models.py +++ b/repos/models.py @@ -22,8 +22,10 @@ from packages.models import Package from util import get_setting_of_type -from repos.utils import refresh_deb_repo, refresh_rpm_repo, refresh_arch_repo, refresh_gentoo_repo, \ - refresh_yum_repo_errata +from repos.repo_types.deb import refresh_deb_repo +from repos.repo_types.rpm import refresh_rpm_repo, refresh_repo_errata +from repos.repo_types.arch import refresh_arch_repo +from repos.repo_types.gentoo import refresh_gentoo_repo from patchman.signals import info_message, warning_message, error_message @@ -38,7 +40,7 @@ class Repository(models.Model): (RPM, 'rpm'), (DEB, 'deb'), (ARCH, 'arch'), - (GENTOO, 'gentoo'), + (GENTOO, 'gentoo') ) name = models.CharField(max_length=255, unique=True) diff --git a/repos/repo_types/arch.py b/repos/repo_types/arch.py new file mode 100644 index 00000000..ed539a3b --- /dev/null +++ b/repos/repo_types/arch.py @@ -0,0 +1,115 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import tarfile +from io import BytesIO + +from packages.models import PackageString +from patchman.signals import info_message, warning_message, pbar_start, pbar_update +from repos.utils import get_max_mirrors, fetch_mirror_data, find_mirror_url, update_mirror_packages +from util import get_datetime_now, get_checksum, Checksum + + +def refresh_arch_repo(repo): + """ Refresh all mirrors of an arch linux repo + """ + max_mirrors = get_max_mirrors() + fname = f'{repo.arch}/{repo.repo_id}.db' + ts = get_datetime_now() + + enabled_mirrors = repo.mirror_set.filter(refresh=True, enabled=True) + for i, mirror in enumerate(enabled_mirrors): + if i >= max_mirrors: + text = f'{max_mirrors} Mirrors already refreshed (max={max_mirrors}), skipping further refreshes' + warning_message.send(sender=None, text=text) + break + + res = find_mirror_url(mirror.url, [fname]) + if not res: + continue + mirror_url = res.url + text = f'Found Arch Repo - {mirror_url}' + info_message.send(sender=None, text=text) + + package_data = fetch_mirror_data( + mirror=mirror, + url=mirror_url, + text='Downloading Repo data') + if not package_data: + continue + + computed_checksum = get_checksum(package_data, Checksum.sha1) + if mirror.packages_checksum == computed_checksum: + text = 'Mirror checksum has not changed, not refreshing Package metadata' + warning_message.send(sender=None, text=text) + continue + else: + mirror.packages_checksum = computed_checksum + + packages = extract_arch_packages(package_data) + update_mirror_packages(mirror, packages) + packages.clear() + mirror.timestamp = ts + mirror.save() + + +def extract_arch_packages(data): + """ Extract package metadata from an arch linux tarfile + """ + from packages.utils import find_evr + bio = BytesIO(data) + tf = tarfile.open(fileobj=bio, mode='r:*') + packages = set() + plen = len(tf.getnames()) + if plen > 0: + pbar_start.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) + for i, tarinfo in enumerate(tf): + pbar_update.send(sender=None, index=i + 1) + if tarinfo.isfile(): + name_sec = ver_sec = arch_sec = False + t = tf.extractfile(tarinfo).read() + for line in t.decode('utf-8').splitlines(): + if line.startswith('%NAME%'): + name_sec = True + continue + if name_sec: + name_sec = False + name = line + continue + if line.startswith('%VERSION%'): + ver_sec = True + continue + if ver_sec: + ver_sec = False + epoch, version, release = find_evr(line) + continue + if line.startswith('%ARCH%'): + arch_sec = True + continue + if arch_sec: + arch_sec = False + arch = line + continue + package = PackageString(name=name.lower(), + epoch=epoch, + version=version, + release=release, + arch=arch, + packagetype='A') + packages.add(package) + else: + info_message.send(sender=None, text='No Packages found in Repo') + return packages diff --git a/repos/repo_types/deb.py b/repos/repo_types/deb.py new file mode 100644 index 00000000..ce5ea6c5 --- /dev/null +++ b/repos/repo_types/deb.py @@ -0,0 +1,109 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import re +from debian.deb822 import Packages +from debian.debian_support import Version + +from packages.models import PackageString +from patchman.signals import error_message, pbar_start, pbar_update, info_message, warning_message +from repos.utils import fetch_mirror_data, update_mirror_packages, find_mirror_url +from util import get_datetime_now, get_checksum, Checksum, extract + + +def extract_deb_packages(data, url): + """ Extract package metadata from debian Packages file + """ + try: + extracted = extract(data, url).decode('utf-8') + except UnicodeDecodeError as e: + error_message.send(sender=None, text=f'Skipping {url} : {e}') + return + package_re = re.compile('^Package: ', re.M) + plen = len(package_re.findall(extracted)) + packages = set() + + if plen > 0: + pbar_start.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) + for i, stanza in enumerate(Packages.iter_paragraphs(extracted)): + # https://github.com/furlongm/patchman/issues/55 + if 'version' not in stanza: + continue + fullversion = Version(stanza['version']) + arch = stanza['architecture'] + name = stanza['package'] + epoch = fullversion._BaseVersion__epoch + if epoch is None: + epoch = '' + version = fullversion._BaseVersion__upstream_version + release = fullversion._BaseVersion__debian_revision + if release is None: + release = '' + pbar_update.send(sender=None, index=i + 1) + package = PackageString(name=name, + epoch=epoch, + version=version, + release=release, + arch=arch, + packagetype='D') + packages.add(package) + else: + info_message.send(sender=None, text='No packages found in repo') + return packages + + +def refresh_deb_repo(repo): + """ Refresh a debian repo. + Checks for the Packages* files to determine what the mirror urls + are and then downloads and extracts packages from those files. + """ + + formats = ['Packages.xz', 'Packages.bz2', 'Packages.gz', 'Packages'] + + ts = get_datetime_now() + enabled_mirrors = repo.mirror_set.filter(refresh=True, enabled=True) + for mirror in enabled_mirrors: + res = find_mirror_url(mirror.url, formats) + if not res: + continue + mirror_url = res.url + text = f'Found deb Repo - {mirror_url}' + info_message.send(sender=None, text=text) + + package_data = fetch_mirror_data( + mirror=mirror, + url=mirror_url, + text='Downloading Repo data') + if not package_data: + continue + + computed_checksum = get_checksum(package_data, Checksum.sha1) + if mirror.packages_checksum == computed_checksum: + text = 'Mirror checksum has not changed, not refreshing Package metadata' + warning_message.send(sender=None, text=text) + continue + else: + mirror.packages_checksum = computed_checksum + + packages = extract_deb_packages(package_data, mirror_url) + if not packages: + mirror.fail() + continue + + update_mirror_packages(mirror, packages) + packages.clear() + mirror.timestamp = ts + mirror.save() diff --git a/repos/repo_types/gentoo.py b/repos/repo_types/gentoo.py new file mode 100644 index 00000000..7aed5390 --- /dev/null +++ b/repos/repo_types/gentoo.py @@ -0,0 +1,282 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import git +import os +import re +import shutil +import tarfile +import tempfile +from defusedxml import ElementTree as ET +from fnmatch import fnmatch +from io import BytesIO + +from arch.models import PackageArchitecture +from packages.models import PackageString +from packages.utils import find_evr +from patchman.signals import info_message, warning_message, error_message, pbar_start, pbar_update +from repos.utils import add_mirrors_from_urls, mirror_checksum_is_valid, update_mirror_packages +from util import extract, get_url, get_datetime_now, get_checksum, Checksum, download_url, response_is_valid + + +def refresh_gentoo_main_repo(repo): + """ Refresh all mirrors of the main gentoo repo + """ + mirrors = get_gentoo_mirror_urls() + add_mirrors_from_urls(repo, mirrors) + + +def refresh_gentoo_overlay_repo(repo): + """ Refresh all mirrors of a Gentoo overlay repo + """ + mirrors = get_gentoo_overlay_mirrors(repo.repo_id) + add_mirrors_from_urls(repo, mirrors) + + +def get_gentoo_ebuild_keywords(content): + """ Get the keywords for an ebuild + """ + keywords = set() + default_keywords = { + 'alpha', + 'amd64', + 'arm', + 'arm64', + 'hppa', + 'loong', + 'm68k', + 'mips', + 'ppc', + 'ppc64', + 'riscv', + 's390', + 'sparc', + 'x86', + } + for line in content.decode().splitlines(): + if not line.startswith('KEYWORDS='): + continue + all_keywords = line.split('=')[1].split('#')[0].strip(' "').split() + if len(all_keywords) == 0 or '*' in all_keywords: + all_keywords = default_keywords + for keyword in all_keywords: + if keyword.startswith('~'): + continue + if keyword.startswith('-'): + keyword = keyword.replace('-', '') + if keyword in all_keywords: + all_keywords.remove(keyword) + continue + keywords.add(keyword) + break + return keywords + + +def get_gentoo_overlay_mirrors(repo_name): + """Get the gentoo overlay repos that match repo.id + """ + gentoo_overlays_url = 'https://api.gentoo.org/overlays/repositories.xml' + res = get_url(gentoo_overlays_url) + if not res: + return + mirrors = [] + try: + tree = ET.parse(BytesIO(res.content)) + root = tree.getroot() + for child in root: + if child.tag == 'repo': + found = False + for element in child: + if element.tag == 'name' and element.text == repo_name: + found = True + if found and element.tag == 'source': + if element.text.startswith('http'): + mirrors.append(element.text) + except ET.ParseError as e: + error_message.send(sender=None, text=f'Error parsing {gentoo_overlays_url}: {e}') + return mirrors + + +def get_gentoo_mirror_urls(): + """ Use the Gentoo API to find http(s) mirrors + """ + gentoo_distfiles_url = 'https://api.gentoo.org/mirrors/distfiles.xml' + res = get_url(gentoo_distfiles_url) + if not res: + return + mirrors = {} + try: + tree = ET.parse(BytesIO(res.content)) + root = tree.getroot() + for child in root: + if child.tag == 'mirrorgroup': + for k, v in child.attrib.items(): + if k == 'region': + region = v + elif k == 'country': + country = v + for mirror in child: + for element in mirror: + if element.tag == 'name': + name = element.text + mirrors[name] = {} + mirrors[name]['region'] = region + mirrors[name]['country'] = country + mirrors[name]['urls'] = [] + elif element.tag == 'uri': + if element.get('protocol') == 'http': + mirrors[name]['urls'].append(element.text) + except ET.ParseError as e: + error_message.send(sender=None, text=f'Error parsing {gentoo_distfiles_url}: {e}') + mirror_urls = [] + # for now, ignore region data and choose MAX_MIRRORS mirrors at random + for _, v in mirrors.items(): + for url in v['urls']: + mirror_urls.append(url.rstrip('/') + '/snapshots/gentoo-latest.tar.xz') + return mirror_urls + + +def extract_gentoo_ebuilds(data): + """ Extract ebuilds from a Gentoo tarball + """ + extracted_ebuilds = {} + with tarfile.open(fileobj=BytesIO(data), mode='r') as tar: + for member in tar.getmembers(): + if member.isfile() and member.name.endswith('ebuild') and not member.name.endswith('skel.ebuild'): + file_content = tar.extractfile(member).read() + extracted_ebuilds[member.name] = file_content + return extracted_ebuilds + + +def extract_gentoo_packages(mirror, data): + """ Extract packages from a Gentoo mirror + """ + extracted_ebuilds = extract_gentoo_ebuilds(data) + return extract_gentoo_packages_from_ebuilds(extracted_ebuilds) + + +def extract_gentoo_packages_from_ebuilds(extracted_ebuilds): + """ Extract packages from ebuilds + """ + if not extracted_ebuilds: + return + + packages = set() + flen = len(extracted_ebuilds) + pbar_start.send(sender=None, ptext=f'Processing {flen} ebuilds', plen=flen) + for i, (path, content) in enumerate(extracted_ebuilds.items()): + pbar_update.send(sender=None, index=i + 1) + components = path.split(os.sep) + category = components[1] + name = components[2] + evr = components[3].replace(f'{name}-', '').replace('.ebuild', '') + epoch, version, release = find_evr(evr) + arches = get_gentoo_ebuild_keywords(content) + for arch in arches: + package = PackageString( + name=name.lower(), + epoch=epoch, + version=version, + release=release, + arch=arch, + packagetype='G', + category=category, + ) + packages.add(package) + plen = len(packages) + info_message.send(sender=None, text=f'Extracted {plen} Packages', plen=plen) + return packages + + +def extract_gentoo_overlay_packages(mirror): + """ Extract packages from gentoo overlay repo + """ + t = tempfile.mkdtemp() + git.Repo.clone_from(mirror.url, t, branch='master', depth=1) + packages = set() + arch, c = PackageArchitecture.objects.get_or_create(name='any') + for root, dirs, files in os.walk(t): + for name in files: + if fnmatch(name, '*.ebuild'): + full_name = root.replace(t + '/', '') + p_category, p_name = full_name.split('/') + m = re.match(fr'{p_name}-(.*)\.ebuild', name) + if m: + p_evr = m.group(1) + epoch, version, release = find_evr(p_evr) + package = PackageString( + name=p_name.lower(), + epoch=epoch, + version=version, + release=release, + arch=arch, + packagetype='G', + category=p_category, + ) + packages.add(package) + shutil.rmtree(t) + return packages + + +def refresh_gentoo_repo(repo): + """ Refresh a Gentoo repo + """ + if repo.repo_id == 'gentoo': + repo_type = 'main' + refresh_gentoo_main_repo(repo) + else: + refresh_gentoo_overlay_repo(repo) + repo_type = 'overlay' + ts = get_datetime_now() + for mirror in repo.mirror_set.filter(mirrorlist=False, refresh=True, enabled=True): + res = get_url(mirror.url + '.md5sum') + data = download_url(res, 'Downloading Repo checksum') + if data is None: + mirror.fail() + continue + checksum = data.decode().split()[0] + if checksum is None: + mirror.fail() + continue + if mirror.packages_checksum == checksum: + text = 'Mirror checksum has not changed, not refreshing Package metadata' + warning_message.send(sender=None, text=text) + continue + res = get_url(mirror.url) + mirror.last_access_ok = response_is_valid(res) + if mirror.last_access_ok: + data = download_url(res, 'Downloading Repo data') + if data is None: + mirror.fail() + continue + extracted = extract(data, mirror.url) + text = f'Found Gentoo Repo - {mirror.url}' + info_message.send(sender=None, text=text) + computed_checksum = get_checksum(data, Checksum.md5) + if not mirror_checksum_is_valid(computed_checksum, checksum, mirror, 'package'): + continue + else: + mirror.packages_checksum = checksum + if repo_type == 'main': + packages = extract_gentoo_packages(mirror, extracted) + elif repo_type == 'overlay': + packages = extract_gentoo_overlay_packages(mirror) + mirror.timestamp = ts + if packages: + update_mirror_packages(mirror, packages) + else: + mirror.fail() + mirror.save() diff --git a/repos/repo_types/rpm.py b/repos/repo_types/rpm.py new file mode 100644 index 00000000..27aef833 --- /dev/null +++ b/repos/repo_types/rpm.py @@ -0,0 +1,99 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.db.models import Q + +from patchman.signals import info_message, warning_message +from repos.repo_types.yast import refresh_yast_repo +from repos.repo_types.yum import refresh_yum_repo +from repos.utils import check_for_metalinks, check_for_mirrorlists, find_mirror_url, get_max_mirrors, fetch_mirror_data +from util import get_datetime_now + + +def refresh_repo_errata(repo): + refresh_rpm_repo_mirrors(repo, errata_only=True) + + +def refresh_rpm_repo(repo): + """ Refresh an rpm repo (yum or yast) + Checks if the repo url is a mirrorlist or metalink, + and extracts mirrors if so, then refreshes the mirrors + """ + check_for_mirrorlists(repo) + check_for_metalinks(repo) + refresh_rpm_repo_mirrors(repo) + + +def max_mirrors_refreshed(repo, checksum, ts): + """ Only refresh X mirrors, where X = max_mirrors + """ + if checksum is None: + return False + max_mirrors = get_max_mirrors() + mirrors_q = Q(mirrorlist=False, refresh=True, enabled=True, timestamp=ts, packages_checksum=checksum) + have_checksum_and_ts = repo.mirror_set.filter(mirrors_q).count() + if have_checksum_and_ts >= max_mirrors: + text = f'{max_mirrors} Mirrors already have this checksum and timestamp, skipping further refreshes' + warning_message.send(sender=None, text=text) + return True + return False + + +def refresh_rpm_repo_mirrors(repo, errata_only=False): + """ Checks a number of common yum repo formats to determine + which type of repo it is, then refreshes the mirrors + """ + formats = [ + 'repodata/repomd.xml.xz', + 'repodata/repomd.xml.bz2', + 'repodata/repomd.xml.gz', + 'repodata/repomd.xml', + 'suse/repodata/repomd.xml.xz', + 'suse/repodata/repomd.xml.bz2', + 'suse/repodata/repomd.xml.gz', + 'suse/repodata/repomd.xml', + 'content', + ] + ts = get_datetime_now() + enabled_mirrors = repo.mirror_set.filter(mirrorlist=False, refresh=True, enabled=True) + for i, mirror in enumerate(enabled_mirrors): + res = find_mirror_url(mirror.url, formats) + if not res: + mirror.fail() + continue + mirror_url = res.url + + repo_data = fetch_mirror_data( + mirror=mirror, + url=mirror_url, + text='Downloading Repo data') + if not repo_data: + continue + + if mirror_url.endswith('content'): + text = f'Found yast rpm Repo - {mirror_url}' + info_message.send(sender=None, text=text) + refresh_yast_repo(mirror, repo_data) + else: + text = f'Found yum rpm Repo - {mirror_url}' + info_message.send(sender=None, text=text) + refresh_yum_repo(mirror, repo_data, mirror_url, errata_only) + if mirror.last_access_ok: + mirror.timestamp = ts + mirror.save() + checksum = mirror.packages_checksum + if max_mirrors_refreshed(repo, checksum, ts): + break diff --git a/repos/repo_types/yast.py b/repos/repo_types/yast.py new file mode 100644 index 00000000..a14e69af --- /dev/null +++ b/repos/repo_types/yast.py @@ -0,0 +1,69 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import re + +from packages.models import PackageString +from patchman.signals import pbar_start, pbar_update, info_message +from repos.utils import fetch_mirror_data, update_mirror_packages +from util import extract + + +def refresh_yast_repo(mirror, data): + """ Refresh package metadata for a yast-style rpm mirror + and add the packages to the mirror + """ + package_dir = re.findall('DESCRDIR *(.*)', data.decode('utf-8'))[0] + package_url = f'{mirror.url}/{package_dir}/packages.gz' + + package_data = fetch_mirror_data( + mirror=mirror, + url=package_url, + text='Downloading yast Repo data') + if not package_data: + return + + mirror.packages_checksum = 'yast' + packages = extract_yast_packages(package_data) + if packages: + update_mirror_packages(mirror, packages) + packages.clear() + + +def extract_yast_packages(data): + """ Extract package metadata from yast metadata file + """ + extracted = extract(data, 'gz').decode('utf-8') + pkgs = re.findall('=Pkg: (.*)', extracted) + plen = len(pkgs) + packages = set() + + if plen > 0: + pbar_start.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) + + for i, pkg in enumerate(pkgs): + pbar_update.send(sender=None, index=i + 1) + name, version, release, arch = pkg.split() + package = PackageString(name=name.lower(), + epoch='', + version=version, + release=release, + arch=arch, + packagetype='R') + packages.add(package) + else: + info_message.send(sender=None, text='No packages found in repo') + return packages diff --git a/repos/repo_types/yum.py b/repos/repo_types/yum.py new file mode 100644 index 00000000..e69434f0 --- /dev/null +++ b/repos/repo_types/yum.py @@ -0,0 +1,254 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see +# Copyright 2013-2025 Marcus Furlong # # This file is part of Patchman. # @@ -15,37 +15,24 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -import io -import os -import git import re -import shutil -import tarfile -import tempfile -import yaml from io import BytesIO from defusedxml import ElementTree as ET -from debian.debian_support import Version -from debian.deb822 import Packages -from fnmatch import fnmatch from tenacity import RetryError from django.db import IntegrityError, DatabaseError, transaction from django.db.models import Q -from arch.models import PackageArchitecture -from packages.models import Package, PackageString -from packages.utils import parse_package_string, get_or_create_package, find_evr, \ - convert_package_to_packagestring, convert_packagestring_to_package -from util import get_url, download_url, response_is_valid, extract, get_checksum, Checksum, get_setting_of_type, \ - get_datetime_now +from packages.models import Package +from packages.utils import convert_package_to_packagestring, convert_packagestring_to_package +from util import get_url, download_url, response_is_valid, extract, get_checksum, Checksum, get_setting_of_type from patchman.signals import info_message, warning_message, error_message, debug_message, pbar_start, pbar_update def get_or_create_repo(r_name, r_arch, r_type, r_id=None): - """ Get or create a Repository object. Returns the object. Returns None if - it cannot get or create the object. + """ Get or create a Repository object and returns the object. + Returns None if it cannot get or create the object. """ from repos.models import Repository repositories = Repository.objects.all() @@ -105,33 +92,6 @@ def update_mirror_packages(mirror, packages): mirror.save() -def get_repomd_url(mirror_url, data, url_type='primary'): - if isinstance(data, str): - if data.startswith('Bad repo - not in list') or data.startswith('Invalid repo'): - return None, None, None - - ns = 'http://linux.duke.edu/metadata/repo' - extracted = extract(data, mirror_url) - location = None - try: - tree = ET.parse(BytesIO(extracted)) - root = tree.getroot() - for child in root: - if child.attrib.get('type') == url_type: - for grandchild in child: - if grandchild.tag == f'{{{ns}}}location': - location = grandchild.attrib.get('href') - if grandchild.tag == f'{{{ns}}}checksum': - checksum = grandchild.text - checksum_type = grandchild.attrib.get('type') - except ET.ParseError as e: - error_message.send(sender=None, text=(f'Error parsing repomd from {mirror_url}: {e}')) - if not location: - return None, None, None - url = str(mirror_url.rsplit('/', 2)[0]) + '/' + location - return url, checksum, checksum_type - - def find_mirror_url(stored_mirror_url, formats): """ Find the actual URL of the mirror by trying predefined paths """ @@ -150,70 +110,6 @@ def find_mirror_url(stored_mirror_url, formats): return res -def get_gentoo_mirror_urls(): - """ Use the Gentoo API to find http(s) mirrors - """ - gentoo_distfiles_url = 'https://api.gentoo.org/mirrors/distfiles.xml' - res = get_url(gentoo_distfiles_url) - if not res: - return - mirrors = {} - try: - tree = ET.parse(BytesIO(res.content)) - root = tree.getroot() - for child in root: - if child.tag == 'mirrorgroup': - for k, v in child.attrib.items(): - if k == 'region': - region = v - elif k == 'country': - country = v - for mirror in child: - for element in mirror: - if element.tag == 'name': - name = element.text - mirrors[name] = {} - mirrors[name]['region'] = region - mirrors[name]['country'] = country - mirrors[name]['urls'] = [] - elif element.tag == 'uri': - if element.get('protocol') == 'http': - mirrors[name]['urls'].append(element.text) - except ET.ParseError as e: - error_message.send(sender=None, text=f'Error parsing {gentoo_distfiles_url}: {e}') - mirror_urls = [] - # for now, ignore region data and choose MAX_MIRRORS mirrors at random - for _, v in mirrors.items(): - for url in v['urls']: - mirror_urls.append(url.rstrip('/') + '/snapshots/gentoo-latest.tar.xz') - return mirror_urls - - -def get_gentoo_overlay_mirrors(repo_name): - """Get the gentoo overlay repos that match repo.id - """ - gentoo_overlays_url = 'https://api.gentoo.org/overlays/repositories.xml' - res = get_url(gentoo_overlays_url) - if not res: - return - mirrors = [] - try: - tree = ET.parse(BytesIO(res.content)) - root = tree.getroot() - for child in root: - if child.tag == 'repo': - found = False - for element in child: - if element.tag == 'name' and element.text == repo_name: - found = True - if found and element.tag == 'source': - if element.text.startswith('http'): - mirrors.append(element.text) - except ET.ParseError as e: - error_message.send(sender=None, text=f'Error parsing {gentoo_overlays_url}: {e}') - return mirrors - - def is_metalink(url): """ Checks if a given url is a metalink url """ @@ -268,7 +164,7 @@ def get_mirrorlist_urls(url): return mirror_urls = re.findall(r'^http[s]*://.*$|^ftp://.*$', data.decode('utf-8'), re.MULTILINE) if mirror_urls: - + debug_message.send(sender=None, text=f'Found mirrorlist: {url}') return mirror_urls else: debug_message.send(sender=None, text=f'Not a mirrorlist: {url}') @@ -326,227 +222,10 @@ def check_for_metalinks(repo): mirror.mirrorlist = True mirror.last_access_ok = True mirror.save() - text = f'Found metalink - {mirror.url}' - info_message.send(sender=None, text=text) + info_message.send(sender=None, text=f'Found metalink - {mirror.url}') add_mirrors_from_urls(repo, mirror_urls) -def extract_module_metadata(data, url, repo): - """ Extract module metadata from a modules.yaml file - """ - modules = set() - extracted = extract(data, url) - try: - modules_yaml = yaml.safe_load_all(extracted) - except yaml.YAMLError as e: - error_message.send(sender=None, text=f'Error parsing modules.yaml: {e}') - - mlen = len(re.findall(r'---', yaml.dump(extracted.decode()))) - pbar_start.send(sender=None, ptext=f'Extracting {mlen} Modules ', plen=mlen) - for i, doc in enumerate(modules_yaml): - pbar_update.send(sender=None, index=i + 1) - document = doc['document'] - modulemd = doc['data'] - if document == 'modulemd': - modulemd = doc['data'] - m_name = modulemd.get('name') - m_stream = modulemd['stream'] - m_version = modulemd.get('version') - m_context = modulemd.get('context') - arch = modulemd.get('arch') - raw_packages = modulemd.get('artifacts', {}).get('rpms', '') - # raw_profiles = list(modulemd.get('profiles', {}).keys()) - - packages = set() - p_type = Package.RPM - for pkg_str in raw_packages: - p_name, p_epoch, p_ver, p_rel, p_dist, p_arch = parse_package_string(pkg_str) - package = get_or_create_package(p_name, p_epoch, p_ver, p_rel, p_arch, p_type) - packages.add(package) - - from modules.utils import get_or_create_module - module, created = get_or_create_module(m_name, m_stream, m_version, m_context, arch, repo) - - package_ids = [] - for package in packages: - package_ids.append(package.id) - module.packages.add(package) - for package in module.packages.all(): - if package.id not in package_ids: - module.packages.remove(package) - modules.add(module) - - -def extract_yum_packages(data, url): - """ Extract package metadata from a yum primary.xml file - """ - extracted = extract(data, url) - ns = 'http://linux.duke.edu/metadata/common' - packages = set() - try: - context = ET.iterparse(BytesIO(extracted), events=('start', 'end')) - for event, elem in context: - if event == 'start': - if elem.tag == f'{{{ns}}}metadata': - plen = int(elem.attrib.get('packages')) - break - pbar_start.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) - i = 0 - for event, elem in context: - if event == 'start': - if elem.tag == f'{{{ns}}}package': - if elem.attrib.get('type') == 'rpm': - name = version = release = arch = '' - elif event == 'end': - if elem.tag == f'{{{ns}}}name': - name = elem.text.lower() - elif elem.tag == f'{{{ns}}}arch': - arch = elem.text - elif elem.tag == f'{{{ns}}}version': - fullversion = elem - epoch = fullversion.get('epoch') - version = fullversion.get('ver') - release = fullversion.get('rel') - elif elem.tag == f'{{{ns}}}package': - if name and version and release and arch: - if epoch == '0': - epoch = '' - package = PackageString( - name=name, - epoch=epoch, - version=version, - release=release, - arch=arch, - packagetype='R', - ) - packages.add(package) - pbar_update.send(sender=None, index=i + 1) - i += 1 - else: - text = f'Error parsing Package: {name} {epoch} {version} {release} {arch}' - error_message.send(sender=None, text=text) - elem.clear() - except ET.ParseError as e: - error_message.send(sender=None, text=f'Error parsing yum primary.xml from {url}: {e}') - return packages - - -def extract_deb_packages(data, url): - """ Extract package metadata from debian Packages file - """ - try: - extracted = extract(data, url).decode('utf-8') - except UnicodeDecodeError as e: - error_message.send(sender=None, text=f'Skipping {url} : {e}') - return - package_re = re.compile('^Package: ', re.M) - plen = len(package_re.findall(extracted)) - packages = set() - - if plen > 0: - pbar_start.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) - for i, stanza in enumerate(Packages.iter_paragraphs(extracted)): - # https://github.com/furlongm/patchman/issues/55 - if 'version' not in stanza: - continue - fullversion = Version(stanza['version']) - arch = stanza['architecture'] - name = stanza['package'] - epoch = fullversion._BaseVersion__epoch - if epoch is None: - epoch = '' - version = fullversion._BaseVersion__upstream_version - release = fullversion._BaseVersion__debian_revision - if release is None: - release = '' - pbar_update.send(sender=None, index=i + 1) - package = PackageString(name=name, - epoch=epoch, - version=version, - release=release, - arch=arch, - packagetype='D') - packages.add(package) - else: - info_message.send(sender=None, text='No packages found in repo') - return packages - - -def extract_yast_packages(data): - """ Extract package metadata from yast metadata file - """ - extracted = extract(data, 'gz').decode('utf-8') - pkgs = re.findall('=Pkg: (.*)', extracted) - plen = len(pkgs) - packages = set() - - if plen > 0: - pbar_start.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) - - for i, pkg in enumerate(pkgs): - pbar_update.send(sender=None, index=i + 1) - name, version, release, arch = pkg.split() - package = PackageString(name=name.lower(), - epoch='', - version=version, - release=release, - arch=arch, - packagetype='R') - packages.add(package) - else: - info_message.send(sender=None, text='No packages found in repo') - return packages - - -def extract_arch_packages(data): - """ Extract package metadata from an arch linux tarfile - """ - from packages.utils import find_evr - bio = BytesIO(data) - tf = tarfile.open(fileobj=bio, mode='r:*') - packages = set() - plen = len(tf.getnames()) - if plen > 0: - pbar_start.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) - for i, tarinfo in enumerate(tf): - pbar_update.send(sender=None, index=i + 1) - if tarinfo.isfile(): - name_sec = ver_sec = arch_sec = False - t = tf.extractfile(tarinfo).read() - for line in t.decode('utf-8').splitlines(): - if line.startswith('%NAME%'): - name_sec = True - continue - if name_sec: - name_sec = False - name = line - continue - if line.startswith('%VERSION%'): - ver_sec = True - continue - if ver_sec: - ver_sec = False - epoch, version, release = find_evr(line) - continue - if line.startswith('%ARCH%'): - arch_sec = True - continue - if arch_sec: - arch_sec = False - arch = line - continue - package = PackageString(name=name.lower(), - epoch=epoch, - version=version, - release=release, - arch=arch, - packagetype='A') - packages.add(package) - else: - info_message.send(sender=None, text='No packages found in repo') - return packages - - def fetch_mirror_data(mirror, url, text, checksum=None, checksum_type=None, metadata_type=None): if not url: mirror.fail() @@ -593,515 +272,6 @@ def mirror_checksum_is_valid(computed, provided, mirror, metadata_type): return True -def extract_updateinfo(data, url): - """ Parses updateinfo.xml and extracts package/errata information - """ - print(url) - from errata.utils import get_or_create_erratum - extracted = extract(data, url) - updates = [] - try: - tree = ET.parse(BytesIO(extracted)) - root = tree.getroot() - elen = root.__len__() - pbar_start.send(sender=None, ptext=f'Extracting {elen} rpm Errata', plen=elen) - for i, update in enumerate(root.findall('update')): - pbar_update.send(sender=None, index=i + 1) - e_type = update.attrib.get('type') - name = update.find('id').text - synopsis = update.find('title').text - issue_date = update.find('issued').attrib.get('date') - e, created = get_or_create_erratum(name, e_type, issue_date, synopsis) - - xreferences = update.find('references') - for reference in xreferences.findall('reference'): - if reference.attrib.get('type') == 'cve': - cve_id = reference.attrib.get('id') - e.add_cve(cve_id) - else: - ref = reference.attrib.get('href') - e.add_reference('Link', ref) - - osrelease_name = None - release = update.find('release') - if release: - osrelease_name = release.text - - pkglist = update.find('pkglist') - packages = set() - for collection in pkglist.findall('collection'): - if not osrelease_name: - collection_name = collection.find('name') - if collection_name is not None: - osrelease_name = collection_name.text - from operatingsystems.models import OSRelease - if osrelease_name: - osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name) - e.osreleases.add(osrelease) - # TODO for opensuse, add if repo is associated with an os release - - for pkg in collection.findall('package'): - name = pkg.attrib.get('name') - epoch = pkg.attrib.get('epoch') - version = pkg.attrib.get('version') - release = pkg.attrib.get('release') - arch = pkg.attrib.get('arch') - package = get_or_create_package( - name=name.lower(), - epoch=epoch, - version=version, - release=release, - arch=arch, - p_type='R', - ) - packages.add(package) - e.add_packages(packages) - e.save() - except ET.ParseError as e: - error_message.send(sender=None, text=f'Error parsing updateinfo file: {e}') - return updates - - -def refresh_rpm_updateinfo(mirror, data, mirror_url): - url, checksum, checksum_type = get_repomd_url(mirror_url, data, url_type='updateinfo') - data = fetch_mirror_data( - mirror=mirror, - url=url, - checksum=checksum, - checksum_type=checksum_type, - text='Downloading Errata data', - metadata_type='updateinfo') - - if not mirror.last_access_ok: - return - - if mirror.modules_checksum == checksum: - text = 'Mirror Errata checksum has not changed, skipping Erratum refresh' - warning_message.send(sender=None, text=text) - return - else: - mirror.modules_checksum = checksum - mirror.save() - - extract_updateinfo(data, url) - - -def refresh_rpm_modules(mirror, data, mirror_url): - url, checksum, checksum_type = get_repomd_url(mirror_url, data, url_type='modules') - if url: - data = fetch_mirror_data( - mirror=mirror, - url=url, - checksum=checksum, - checksum_type=checksum_type, - text='Downloading Module data', - metadata_type='module') - - if not mirror.last_access_ok: - return - - if mirror.modules_checksum == checksum: - text = 'Mirror Modules checksum has not changed, skipping Module refresh' - warning_message.send(sender=None, text=text) - return - else: - mirror.modules_checksum = checksum - mirror.save() - - extract_module_metadata(data, url, mirror.repo) - - -def refresh_rpm_primary(mirror, data, mirror_url, ts): - url, checksum, checksum_type = get_repomd_url(mirror_url, data, url_type='primary') - data = fetch_mirror_data( - mirror=mirror, - url=url, - checksum=checksum, - checksum_type=checksum_type, - text='Downloading Package data', - metadata_type='package') - - if not mirror.last_access_ok: - return - - if mirror.packages_checksum == checksum: - text = 'Mirror Packages checksum has not changed, skipping Package refresh' - warning_message.send(sender=None, text=text) - return - else: - mirror.packages_checksum = checksum - mirror.save() - - # only refresh X mirrors, where X = max_mirrors - max_mirrors = get_max_mirrors() - mirrors_q = Q(mirrorlist=False, refresh=True, enabled=True, timestamp=ts, packages_checksum=checksum) - have_checksum_and_ts = mirror.repo.mirror_set.filter(mirrors_q).count() - if have_checksum_and_ts >= max_mirrors: - text = f'{max_mirrors} Mirrors already have this checksum and timestamp, skipping Package refresh' - info_message.send(sender=None, text=text) - return - - packages = extract_yum_packages(data, url) - if packages: - update_mirror_packages(mirror, packages) - - -def refresh_yum_repo(mirror, data, mirror_url, ts, errata_only): - """ Refresh package, module and updateinfo/errata data for a yum-style rpm Mirror - """ - if not errata_only: - refresh_rpm_primary(mirror, data, mirror_url, ts) - refresh_rpm_modules(mirror, data, mirror_url) - refresh_rpm_updateinfo(mirror, data, mirror_url) - - -def refresh_yum_repo_errata(repo): - refresh_rpm_repo_mirrors(repo, errata_only=True) - - -def refresh_arch_repo(repo): - """ Refresh all mirrors of an arch linux repo - """ - max_mirrors = get_max_mirrors() - fname = f'{repo.arch}/{repo.repo_id}.db' - ts = get_datetime_now() - - enabled_mirrors = repo.mirror_set.filter(refresh=True, enabled=True) - for i, mirror in enumerate(enabled_mirrors): - if i >= max_mirrors: - text = f'{max_mirrors} Mirrors already refreshed (max={max_mirrors}), skipping further refreshes' - warning_message.send(sender=None, text=text) - break - - res = find_mirror_url(mirror.url, [fname]) - if not res: - continue - mirror_url = res.url - text = f'Found Arch Repo - {mirror_url}' - info_message.send(sender=None, text=text) - - package_data = fetch_mirror_data( - mirror=mirror, - url=mirror_url, - text='Downloading Repo data') - if not package_data: - continue - - computed_checksum = get_checksum(package_data, Checksum.sha1) - if mirror.packages_checksum == computed_checksum: - text = 'Mirror checksum has not changed, not refreshing Package metadata' - warning_message.send(sender=None, text=text) - continue - else: - mirror.packages_checksum = computed_checksum - - packages = extract_arch_packages(package_data) - update_mirror_packages(mirror, packages) - packages.clear() - mirror.timestamp = ts - mirror.save() - - -def refresh_gentoo_main_repo(repo): - """ Refresh all mirrors of the main gentoo repo - """ - mirrors = get_gentoo_mirror_urls() - add_mirrors_from_urls(repo, mirrors) - - -def refresh_gentoo_overlay_repo(repo): - """ Refresh all mirrors of a Gentoo overlay repo - """ - mirrors = get_gentoo_overlay_mirrors(repo.repo_id) - add_mirrors_from_urls(repo, mirrors) - - -def get_gentoo_ebuild_keywords(content): - keywords = set() - default_keywords = { - 'alpha', - 'amd64', - 'arm', - 'arm64', - 'hppa', - 'loong', - 'm68k', - 'mips', - 'ppc', - 'ppc64', - 'riscv', - 's390', - 'sparc', - 'x86', - } - for line in content.decode().splitlines(): - if not line.startswith('KEYWORDS='): - continue - all_keywords = line.split('=')[1].split('#')[0].strip(' "').split() - if len(all_keywords) == 0 or '*' in all_keywords: - all_keywords = default_keywords - for keyword in all_keywords: - if keyword.startswith('~'): - continue - if keyword.startswith('-'): - keyword = keyword.replace('-', '') - if keyword in all_keywords: - all_keywords.remove(keyword) - continue - keywords.add(keyword) - break - return keywords - - -def extract_gentoo_ebuilds(data): - extracted_ebuilds = {} - with tarfile.open(fileobj=io.BytesIO(data), mode='r') as tar: - for member in tar.getmembers(): - if member.isfile() and member.name.endswith('ebuild') and not member.name.endswith('skel.ebuild'): - file_content = tar.extractfile(member).read() - extracted_ebuilds[member.name] = file_content - return extracted_ebuilds - - -def extract_gentoo_packages(mirror, data): - extracted_ebuilds = extract_gentoo_ebuilds(data) - return extract_gentoo_packages_from_ebuilds(extracted_ebuilds) - - -def extract_gentoo_packages_from_ebuilds(extracted_ebuilds): - if not extracted_ebuilds: - return - - packages = set() - flen = len(extracted_ebuilds) - pbar_start.send(sender=None, ptext=f'Processing {flen} ebuilds', plen=flen) - for i, (path, content) in enumerate(extracted_ebuilds.items()): - pbar_update.send(sender=None, index=i + 1) - components = path.split(os.sep) - category = components[1] - name = components[2] - evr = components[3].replace(f'{name}-', '').replace('.ebuild', '') - epoch, version, release = find_evr(evr) - arches = get_gentoo_ebuild_keywords(content) - for arch in arches: - package = PackageString( - name=name.lower(), - epoch=epoch, - version=version, - release=release, - arch=arch, - packagetype='G', - category=category, - ) - packages.add(package) - plen = len(packages) - info_message.send(sender=None, text=f'Extracted {plen} Packages', plen=plen) - return packages - - -def extract_gentoo_overlay_packages(mirror): - t = tempfile.mkdtemp() - git.Repo.clone_from(mirror.url, t, branch='master', depth=1) - packages = set() - arch, c = PackageArchitecture.objects.get_or_create(name='any') - for root, dirs, files in os.walk(t): - for name in files: - if fnmatch(name, '*.ebuild'): - full_name = root.replace(t + '/', '') - p_category, p_name = full_name.split('/') - m = re.match(fr'{p_name}-(.*)\.ebuild', name) - if m: - p_evr = m.group(1) - epoch, version, release = find_evr(p_evr) - package = PackageString( - name=p_name.lower(), - epoch=epoch, - version=version, - release=release, - arch=arch, - packagetype='G', - category=p_category, - ) - packages.add(package) - shutil.rmtree(t) - return packages - - -def refresh_gentoo_repo(repo): - """ Refresh a Gentoo repo - """ - if repo.repo_id == 'gentoo': - repo_type = 'main' - refresh_gentoo_main_repo(repo) - else: - refresh_gentoo_overlay_repo(repo) - repo_type = 'overlay' - ts = get_datetime_now() - for mirror in repo.mirror_set.filter(mirrorlist=False, refresh=True, enabled=True): - res = get_url(mirror.url + '.md5sum') - data = download_url(res, 'Downloading Repo checksum') - if data is None: - mirror.fail() - continue - checksum = data.decode().split()[0] - if checksum is None: - mirror.fail() - continue - if mirror.packages_checksum == checksum: - text = 'Mirror checksum has not changed, not refreshing Package metadata' - warning_message.send(sender=None, text=text) - continue - res = get_url(mirror.url) - mirror.last_access_ok = response_is_valid(res) - if mirror.last_access_ok: - data = download_url(res, 'Downloading Repo data') - if data is None: - mirror.fail() - continue - extracted = extract(data, mirror.url) - text = f'Found Gentoo Repo - {mirror.url}' - info_message.send(sender=None, text=text) - computed_checksum = get_checksum(data, Checksum.md5) - if not mirror_checksum_is_valid(computed_checksum, checksum, mirror, 'package'): - continue - else: - mirror.packages_checksum = checksum - if repo_type == 'main': - packages = extract_gentoo_packages(mirror, extracted) - elif repo_type == 'overlay': - packages = extract_gentoo_overlay_packages(mirror) - mirror.timestamp = ts - if packages: - update_mirror_packages(mirror, packages) - else: - mirror.fail() - mirror.save() - - -def refresh_yast_repo(mirror, data): - """ Refresh package metadata for a yast-style rpm mirror - and add the packages to the mirror - """ - package_dir = re.findall('DESCRDIR *(.*)', data.decode('utf-8'))[0] - package_url = f'{mirror.url}/{package_dir}/packages.gz' - - package_data = fetch_mirror_data( - mirror=mirror, - url=package_url, - text='Downloading yast Repo data') - if not package_data: - return - - mirror.packages_checksum = 'yast' - packages = extract_yast_packages(package_data) - if packages: - update_mirror_packages(mirror, packages) - packages.clear() - - -def refresh_rpm_repo(repo): - """ Refresh an rpm repo (yum or yast) - Checks if the repo url is a mirrorlist or metalink, - and extracts mirrors if so, then refreshes the mirrors - """ - check_for_mirrorlists(repo) - check_for_metalinks(repo) - refresh_rpm_repo_mirrors(repo) - - -def refresh_rpm_repo_mirrors(repo, errata_only=False): - """ checks a number of common yum repo formats to determine - which type of repo it is, then refreshes the mirrors - """ - formats = [ - 'repodata/repomd.xml.xz', - 'repodata/repomd.xml.bz2', - 'repodata/repomd.xml.gz', - 'repodata/repomd.xml', - 'suse/repodata/repomd.xml.xz', - 'suse/repodata/repomd.xml.bz2', - 'suse/repodata/repomd.xml.gz', - 'suse/repodata/repomd.xml', - 'content', - ] - max_mirrors = get_max_mirrors() - ts = get_datetime_now() - enabled_mirrors = repo.mirror_set.filter(mirrorlist=False, refresh=True, enabled=True) - for i, mirror in enumerate(enabled_mirrors): - if i >= max_mirrors: - text = f'{max_mirrors} Mirrors already refreshed (max={max_mirrors}), skipping further refreshes' - warning_message.send(sender=None, text=text) - break - - res = find_mirror_url(mirror.url, formats) - if not res: - mirror.fail() - continue - mirror_url = res.url - - repo_data = fetch_mirror_data( - mirror=mirror, - url=mirror_url, - text='Downloading Repo data') - if not repo_data: - continue - - if mirror_url.endswith('content'): - text = f'Found yast rpm Repo - {mirror_url}' - info_message.send(sender=None, text=text) - refresh_yast_repo(mirror, repo_data) - else: - text = f'Found yum rpm Repo - {mirror_url}' - info_message.send(sender=None, text=text) - refresh_yum_repo(mirror, repo_data, mirror_url, ts, errata_only) - mirror.timestamp = ts - mirror.save() - - -def refresh_deb_repo(repo): - """ Refresh a debian repo. - Checks for the Packages* files to determine what the mirror urls - are and then downloads and extracts packages from those files. - """ - - formats = ['Packages.xz', 'Packages.bz2', 'Packages.gz', 'Packages'] - - ts = get_datetime_now() - enabled_mirrors = repo.mirror_set.filter(refresh=True, enabled=True) - for mirror in enabled_mirrors: - res = find_mirror_url(mirror.url, formats) - if not res: - continue - mirror_url = res.url - text = f'Found deb Repo - {mirror_url}' - info_message.send(sender=None, text=text) - - package_data = fetch_mirror_data( - mirror=mirror, - url=mirror_url, - text='Downloading Repo data') - if not package_data: - continue - - computed_checksum = get_checksum(package_data, Checksum.sha1) - if mirror.packages_checksum == computed_checksum: - text = 'Mirror checksum has not changed, not refreshing Package metadata' - warning_message.send(sender=None, text=text) - continue - else: - mirror.packages_checksum = computed_checksum - - packages = extract_deb_packages(package_data, mirror_url) - if not packages: - mirror.fail() - continue - - update_mirror_packages(mirror, packages) - packages.clear() - mirror.timestamp = ts - mirror.save() - - def find_best_repo(package, hostrepos): """ Given a package and a set of HostRepos, determine the best repo. Returns the best repo. From a51a9879bd15f0220e5a8c7ef85f9521517dcebc Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 3 Mar 2025 15:12:36 -0500 Subject: [PATCH 117/199] update .gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 1497c695..1699ba8e 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,4 @@ dist run pyvenv.cfg .vscode +.venv From b3a80ed522d99107c9eaefe22e5d6d905e76812a Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 3 Mar 2025 15:13:09 -0500 Subject: [PATCH 118/199] add migration for blank host tags --- hosts/migrations/0007_alter_host_tags.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 hosts/migrations/0007_alter_host_tags.py diff --git a/hosts/migrations/0007_alter_host_tags.py b/hosts/migrations/0007_alter_host_tags.py new file mode 100644 index 00000000..3858b847 --- /dev/null +++ b/hosts/migrations/0007_alter_host_tags.py @@ -0,0 +1,20 @@ +# Generated by Django 4.2.19 on 2025-02-28 19:53 + +from django.db import migrations +import taggit.managers + + +class Migration(migrations.Migration): + + dependencies = [ + ('taggit', '0005_auto_20220424_2025'), + ('hosts', '0006_migrate_to_tz_aware'), + ] + + operations = [ + migrations.AlterField( + model_name='host', + name='tags', + field=taggit.managers.TaggableManager(blank=True, help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags'), + ), + ] From 113e48c0291d55e131043267ab36632c3a3185c5 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 3 Mar 2025 15:14:36 -0500 Subject: [PATCH 119/199] add support for parsing yum updateinfo errata repos --- errata/sources/repos/yum.py | 159 ++++++++++++++++++++++++++++++++++++ 1 file changed, 159 insertions(+) create mode 100644 errata/sources/repos/yum.py diff --git a/errata/sources/repos/yum.py b/errata/sources/repos/yum.py new file mode 100644 index 00000000..cd46c698 --- /dev/null +++ b/errata/sources/repos/yum.py @@ -0,0 +1,159 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see Date: Mon, 3 Mar 2025 21:49:33 -0500 Subject: [PATCH 120/199] use more descriptive variable name --- util/templates/dashboard.html | 10 +++++----- util/views.py | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/util/templates/dashboard.html b/util/templates/dashboard.html index 14964e98..631fea36 100644 --- a/util/templates/dashboard.html +++ b/util/templates/dashboard.html @@ -8,12 +8,12 @@ {% block content %} -{% with count=lonely_osvariants.count %} - {% if lonely_osvariants.count > 0 %} +{% with count=noosrelease_osvariants.count %} + {% if noosrelease_osvariants.count > 0 %}
    - -
    - {% gen_table lonely_osvariants %} + +
    + {% gen_table noosrelease_osvariants %}
    {% endif %} diff --git a/util/views.py b/util/views.py index e9aed9ef..b66db6b0 100644 --- a/util/views.py +++ b/util/views.py @@ -59,7 +59,7 @@ def dashboard(request): diff_rdns_hosts = hosts.exclude(reversedns=F('hostname')).filter(check_dns=True) # noqa # os variant issues - lonely_osvariants = osvariants.filter(osrelease__isnull=True) + noosrelease_osvariants = osvariants.filter(osrelease__isnull=True) nohost_osvariants = osvariants.filter(host__isnull=True) # os release issues @@ -110,7 +110,7 @@ def dashboard(request): request, 'dashboard.html', {'site': site, - 'lonely_osvariants': lonely_osvariants, + 'noosrelease_osvariants': noosrelease_osvariants, 'norepo_hosts': norepo_hosts, 'nohost_osvariants': nohost_osvariants, 'diff_rdns_hosts': diff_rdns_hosts, From bf93e96f27b36a7dac60cd6d4602856ec85ff4c6 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 3 Mar 2025 21:53:01 -0500 Subject: [PATCH 121/199] remove transaction.atomic where not needed --- errata/models.py | 5 +- errata/sources/distros/alma.py | 5 +- errata/sources/distros/centos.py | 5 +- errata/sources/distros/rocky.py | 4 +- errata/utils.py | 15 ++--- hosts/models.py | 42 +++---------- modules/utils.py | 27 ++++---- packages/utils.py | 51 ++++++--------- patchman/settings.py | 2 +- reports/models.py | 18 ++---- reports/utils.py | 104 +++++++++++-------------------- reports/views.py | 4 +- repos/utils.py | 16 ++--- 13 files changed, 99 insertions(+), 199 deletions(-) diff --git a/errata/models.py b/errata/models.py index 3031e0d6..c08d3493 100644 --- a/errata/models.py +++ b/errata/models.py @@ -16,7 +16,7 @@ from django.db import models from django.urls import reverse -from django.db import transaction, IntegrityError +from django.db import IntegrityError from packages.models import Package, PackageUpdate from errata.managers import ErratumManager @@ -71,8 +71,7 @@ def scan_for_security_updates(self): if not affected_update.security: affected_update.security = True try: - with transaction.atomic(): - affected_update.save() + affected_update.save() except IntegrityError as e: error_message.send(sender=None, text=e) # a version of this update already exists that is diff --git a/errata/sources/distros/alma.py b/errata/sources/distros/alma.py index 3a79d47f..889a3226 100644 --- a/errata/sources/distros/alma.py +++ b/errata/sources/distros/alma.py @@ -17,8 +17,6 @@ import concurrent.futures import json -from django.db import transaction - from packages.models import Package from packages.utils import get_or_create_package, parse_package_string from util import get_url, download_url, get_setting_of_type @@ -109,8 +107,7 @@ def add_alma_erratum_osreleases(e, release): """ Update OS Release for Alma Linux errata """ from operatingsystems.models import OSRelease - with transaction.atomic(): - osrelease, created = OSRelease.objects.get_or_create(name=f'Alma Linux {release}') + osrelease, created = OSRelease.objects.get_or_create(name=f'Alma Linux {release}') e.osreleases.add(osrelease) e.save() diff --git a/errata/sources/distros/centos.py b/errata/sources/distros/centos.py index b2fbfdb8..a2fd1639 100644 --- a/errata/sources/distros/centos.py +++ b/errata/sources/distros/centos.py @@ -17,8 +17,6 @@ import re from lxml import etree -from django.db import transaction - from packages.models import Package from packages.utils import parse_package_string, get_or_create_package from patchman.signals import error_message, pbar_start, pbar_update @@ -123,8 +121,7 @@ def parse_centos_errata_children(e, children): elif c.tag == 'os_release': if accepted_centos_release([c.text]): osrelease_name = f'CentOS {c.text}' - with transaction.atomic(): - osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name) + osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name) e.osreleases.add(osrelease) elif c.tag == 'packages': name, epoch, ver, rel, dist, arch = parse_package_string(c.text) diff --git a/errata/sources/distros/rocky.py b/errata/sources/distros/rocky.py index ba5d7263..b405b755 100644 --- a/errata/sources/distros/rocky.py +++ b/errata/sources/distros/rocky.py @@ -18,7 +18,6 @@ import concurrent.futures from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential -from django.db import transaction from django.db.utils import OperationalError from packages.models import Package @@ -217,8 +216,7 @@ def add_rocky_erratum_oses(e, advisory): variant = affected_os.get('variant') major_version = affected_os.get('major_version') osrelease_name = f'{variant} {major_version}' - with transaction.atomic(): - osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name) + osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name) e.osreleases.add(osrelease) e.save() diff --git a/errata/utils.py b/errata/utils.py index 0b248711..eab3f79e 100644 --- a/errata/utils.py +++ b/errata/utils.py @@ -16,8 +16,6 @@ from urllib.parse import urlparse -from django.db import transaction - from util import tz_aware_datetime from errata.models import Erratum from patchman.signals import pbar_start, pbar_update, warning_message @@ -51,13 +49,12 @@ def get_or_create_erratum(name, e_type, issue_date, synopsis): e.save() created = False except Erratum.DoesNotExist: - with transaction.atomic(): - e, created = Erratum.objects.get_or_create( - name=name, - e_type=e_type, - issue_date=tz_aware_datetime(issue_date), - synopsis=synopsis, - ) + e, created = Erratum.objects.get_or_create( + name=name, + e_type=e_type, + issue_date=tz_aware_datetime(issue_date), + synopsis=synopsis, + ) return e, created diff --git a/hosts/models.py b/hosts/models.py index c40944de..abca6ab8 100644 --- a/hosts/models.py +++ b/hosts/models.py @@ -15,7 +15,7 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -from django.db import models, IntegrityError, DatabaseError, transaction +from django.db import models from django.db.models import Q from django.urls import reverse from django.utils import timezone @@ -33,7 +33,7 @@ from operatingsystems.models import OSVariant from packages.models import Package, PackageUpdate from packages.utils import get_or_create_package_update -from patchman.signals import info_message, error_message +from patchman.signals import info_message from repos.models import Repository from repos.utils import find_best_repo @@ -118,8 +118,7 @@ def check_rdns(self): text += f'{self.hostname} != {self.reversedns}' info_message.send(sender=None, text=text) else: - info_message.send(sender=None, - text='Reverse DNS check disabled') + info_message.send(sender=None, text='Reverse DNS check disabled') def clean_reports(self): """ Remove all but the last 3 reports for a host @@ -153,28 +152,17 @@ def process_update(self, package, highest_package): if self.host_repos_only: host_repos = Q(repo__host=self) else: - host_repos = \ - Q(repo__osrelease__osvariant__host=self, repo__arch=self.arch) | \ - Q(repo__host=self) + host_repos = Q(repo__osrelease__osvariant__host=self, repo__arch=self.arch) | Q(repo__host=self) mirrors = highest_package.mirror_set.filter(host_repos) security = False - # if any of the containing repos are security, mark the update as - # security + # if any of the containing repos are security, mark the update as a security update for mirror in mirrors: if mirror.repo.security: security = True - update = get_or_create_package_update(oldpackage=package, - newpackage=highest_package, - security=security) - try: - with transaction.atomic(): - self.updates.add(update) - info_message.send(sender=None, text=f'{update}') - return update.id - except IntegrityError as e: - error_message.send(sender=None, text=e) - except DatabaseError as e: - error_message.send(sender=None, text=e) + update = get_or_create_package_update(oldpackage=package, newpackage=highest_package, security=security) + self.updates.add(update) + info_message.send(sender=None, text=f'{update}') + return update.id def find_updates(self): @@ -270,13 +258,11 @@ def find_host_repo_updates(self, host_packages, repo_packages): uid = self.process_update(package, highest_package) if uid is not None: update_ids.append(uid) - return update_ids def find_osrelease_repo_updates(self, host_packages, repo_packages): update_ids = [] - for package in host_packages: highest_package = package @@ -304,7 +290,6 @@ def find_osrelease_repo_updates(self, host_packages, repo_packages): uid = self.process_update(package, highest_package) if uid is not None: update_ids.append(uid) - return update_ids def check_if_reboot_required(self, host_highest): @@ -320,7 +305,6 @@ def check_if_reboot_required(self, host_highest): def find_kernel_updates(self, kernel_packages, repo_packages): update_ids = [] - for package in kernel_packages: host_highest = package repo_highest = package @@ -344,13 +328,7 @@ def find_kernel_updates(self, kernel_packages, repo_packages): update_ids.append(uid) self.check_if_reboot_required(host_highest) - - try: - with transaction.atomic(): - self.save() - except DatabaseError as e: - error_message.send(sender=None, text=e) - + self.save() return update_ids diff --git a/modules/utils.py b/modules/utils.py index 53eb236d..817a610c 100644 --- a/modules/utils.py +++ b/modules/utils.py @@ -14,7 +14,7 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -from django.db import IntegrityError, DatabaseError, transaction +from django.db import IntegrityError from patchman.signals import error_message, info_message from modules.models import Module @@ -26,18 +26,16 @@ def get_or_create_module(name, stream, version, context, arch, repo): Returns the module and a boolean for created """ created = False - with transaction.atomic(): - m_arch, c = PackageArchitecture.objects.get_or_create(name=arch) + m_arch, c = PackageArchitecture.objects.get_or_create(name=arch) try: - with transaction.atomic(): - module, created = Module.objects.get_or_create( - name=name, - stream=stream, - version=version, - context=context, - arch=m_arch, - repo=repo, - ) + module, created = Module.objects.get_or_create( + name=name, + stream=stream, + version=version, + context=context, + arch=m_arch, + repo=repo, + ) except IntegrityError as e: error_message.send(sender=None, text=e) module = Module.objects.get( @@ -48,8 +46,6 @@ def get_or_create_module(name, stream, version, context, arch, repo): arch=m_arch, repo=repo, ) - except DatabaseError as e: - error_message.send(sender=None, text=e) return module, created @@ -57,8 +53,7 @@ def get_matching_modules(name, stream, version, context, arch): """ Return modules that match name, stream, version, context, and arch, regardless of repo """ - with transaction.atomic(): - m_arch, c = PackageArchitecture.objects.get_or_create(name=arch) + m_arch, c = PackageArchitecture.objects.get_or_create(name=arch) modules = Module.objects.filter( name=name, stream=stream, diff --git a/packages/utils.py b/packages/utils.py index a4d37f51..9c4f9191 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -18,7 +18,7 @@ import re from django.core.exceptions import MultipleObjectsReturned -from django.db import IntegrityError, DatabaseError, transaction +from django.db import IntegrityError from arch.models import PackageArchitecture from packages.models import PackageName, Package, PackageUpdate, PackageCategory, PackageString @@ -50,17 +50,14 @@ def convert_package_to_packagestring(package): def convert_packagestring_to_package(strpackage): """ Convert a PackageString object to a Package object """ - with transaction.atomic(): - name, created = PackageName.objects.get_or_create(name=strpackage.name.lower()) + name, created = PackageName.objects.get_or_create(name=strpackage.name.lower()) epoch = strpackage.epoch version = strpackage.version release = strpackage.release - with transaction.atomic(): - arch, created = PackageArchitecture.objects.get_or_create(name=strpackage.arch) + arch, created = PackageArchitecture.objects.get_or_create(name=strpackage.arch) packagetype = strpackage.packagetype if strpackage.category: - with transaction.atomic(): - category, created = PackageCategory.objects.get_or_create(name=strpackage.category) + category, created = PackageCategory.objects.get_or_create(name=strpackage.category) else: category = None @@ -173,21 +170,16 @@ def get_or_create_package(name, epoch, version, release, arch, p_type): if epoch in [None, 0, '0']: epoch = '' - with transaction.atomic(): - package_name, c = PackageName.objects.get_or_create(name=name) - - with transaction.atomic(): - package_arch, c = PackageArchitecture.objects.get_or_create(name=arch) - - with transaction.atomic(): - package, c = Package.objects.get_or_create( - name=package_name, - arch=package_arch, - epoch=epoch, - version=version, - release=release, - packagetype=p_type, - ) + package_name, c = PackageName.objects.get_or_create(name=name) + package_arch, c = PackageArchitecture.objects.get_or_create(name=arch) + package, c = Package.objects.get_or_create( + name=package_name, + arch=package_arch, + epoch=epoch, + version=version, + release=release, + packagetype=p_type, + ) return package @@ -226,21 +218,18 @@ def get_or_create_package_update(oldpackage, newpackage, security): if update: if security and not update.security: update.security = True - with transaction.atomic(): - update.save() + update.save() else: - with transaction.atomic(): - update, c = updates.get_or_create( - oldpackage=oldpackage, - newpackage=newpackage, - security=security) + update, c = updates.get_or_create( + oldpackage=oldpackage, + newpackage=newpackage, + security=security, + ) except IntegrityError as e: error_message.send(sender=None, text=e) update = updates.get(oldpackage=oldpackage, newpackage=newpackage, security=security) - except DatabaseError as e: - error_message.send(sender=None, text=e) return update diff --git a/patchman/settings.py b/patchman/settings.py index 85ff6719..557e8c68 100644 --- a/patchman/settings.py +++ b/patchman/settings.py @@ -82,7 +82,7 @@ 'rest_framework', 'django_filters', 'celery', - 'django_celery_beat' + 'django_celery_beat', ] LOCAL_APPS = [ diff --git a/reports/models.py b/reports/models.py index 3a2fc7af..bd005664 100644 --- a/reports/models.py +++ b/reports/models.py @@ -15,7 +15,7 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -from django.db import models, transaction +from django.db import models from django.urls import reverse from patchman.signals import error_message, info_message @@ -90,9 +90,7 @@ def parse(self, data, meta): fqdn = self.host.split('.', 1) if len(fqdn) == 2: self.domain = fqdn.pop() - - with transaction.atomic(): - self.save() + self.save() def process(self, find_updates=True, verbose=False): """ Process a report and extract os, arch, domain, packages, repos etc @@ -115,14 +113,10 @@ def process(self, find_updates=True, verbose=False): info_message.send(sender=None, text=f'Processing report {self.id} - {self.host}') from reports.utils import process_packages, process_repos, process_updates, process_modules - with transaction.atomic(): - process_repos(report=self, host=host) - with transaction.atomic(): - process_modules(report=self, host=host) - with transaction.atomic(): - process_packages(report=self, host=host) - with transaction.atomic(): - process_updates(report=self, host=host) + process_repos(report=self, host=host) + process_modules(report=self, host=host) + process_packages(report=self, host=host) + process_updates(report=self, host=host) self.processed = True self.save() diff --git a/reports/utils.py b/reports/utils.py index ccfb1a12..065e5e65 100644 --- a/reports/utils.py +++ b/reports/utils.py @@ -46,19 +46,12 @@ def process_repos(report, host): if repo: repo_ids.append(repo.id) try: - with transaction.atomic(): - hostrepo, c = host_repos.get_or_create(host=host, - repo=repo) - except IntegrityError as e: - error_message.send(sender=None, text=e) + hostrepo, c = host_repos.get_or_create(host=host, repo=repo) + except IntegrityError: hostrepo = host_repos.get(host=host, repo=repo) - try: - if hostrepo.priority != priority: - hostrepo.priority = priority - with transaction.atomic(): - hostrepo.save() - except IntegrityError as e: - error_message.send(sender=None, text=e) + if hostrepo.priority != priority: + hostrepo.priority = priority + hostrepo.save() pbar_update.send(sender=None, index=i + 1) for hostrepo in host_repos: @@ -78,13 +71,7 @@ def process_modules(report, host): module = process_module(module_str) if module: module_ids.append(module.id) - try: - with transaction.atomic(): - host.modules.add(module) - except IntegrityError as e: - error_message.send(sender=None, text=e) - except DatabaseError as e: - error_message.send(sender=None, text=e) + host.modules.add(module) pbar_update.send(sender=None, index=i + 1) for module in host.modules.all(): @@ -104,17 +91,10 @@ def process_packages(report, host): package = process_package(pkg_str, report.protocol) if package: package_ids.append(package.id) - try: - with transaction.atomic(): - host.packages.add(package) - except IntegrityError as e: - error_message.send(sender=None, text=e) - except DatabaseError as e: - error_message.send(sender=None, text=e) + host.packages.add(package) else: if pkg_str[0].lower() != 'gpg-pubkey': - text = f'No package returned for {pkg_str}' - info_message.send(sender=None, text=text) + info_message.send(sender=None, text=f'No package returned for {pkg_str}') pbar_update.send(sender=None, index=i + 1) for package in host.packages.all(): @@ -154,7 +134,6 @@ def add_updates(updates, host): ulen = len(updates) if ulen > 0: pbar_start.send(sender=None, ptext=f'{host} Updates', plen=ulen) - for i, (u, sec) in enumerate(updates.items()): update = process_update(host, u, sec) if update: @@ -177,7 +156,7 @@ def parse_updates(updates_string, security): def process_update(host, update_string, security): """ Processes a single sanitized update string and converts to an update - object. Only works if the original package exists. Returns None otherwise + object. Only works if the original package exists. Returns None otherwise """ update_str = update_string.split() repo_id = update_str[2] @@ -187,29 +166,26 @@ def process_update(host, update_string, security): p_arch = parts[2] p_epoch, p_version, p_release = find_evr(update_str[1]) - package = get_or_create_package(name=p_name, - epoch=p_epoch, - version=p_version, - release=p_release, - arch=p_arch, - p_type='R') + package = get_or_create_package( + name=p_name, + epoch=p_epoch, + version=p_version, + release=p_release, + arch=p_arch, + p_type=Package.RPM + ) try: repo = Repository.objects.get(repo_id=repo_id) except Repository.DoesNotExist: repo = None if repo: for mirror in repo.mirror_set.all(): - with transaction.atomic(): - MirrorPackage.objects.create(mirror=mirror, package=package) + MirrorPackage.objects.create(mirror=mirror, package=package) - installed_packages = host.packages.filter(name=package.name, - arch=package.arch, - packagetype='R') + installed_packages = host.packages.filter(name=package.name, arch=package.arch, packagetype=Package.RPM) if installed_packages: installed_package = installed_packages[0] - update = get_or_create_package_update(oldpackage=installed_package, - newpackage=package, - security=security) + update = get_or_create_package_update(oldpackage=installed_package, newpackage=package, security=security) return update @@ -249,8 +225,7 @@ def process_repo(repo, arch): if repo[1]: r_name = repo[1] - with transaction.atomic(): - r_arch, c = MachineArchitecture.objects.get_or_create(name=arch) + r_arch, c = MachineArchitecture.objects.get_or_create(name=arch) unknown = [] for r_url in repo[3:]: @@ -268,29 +243,21 @@ def process_repo(repo, arch): if r_id and repository.repo_id != r_id: repository.repo_id = r_id - with transaction.atomic(): - repository.save() if r_name and repository.name != r_name: repository.name = r_name - with transaction.atomic(): - repository.save() for url in unknown: Mirror.objects.create(repo=repository, url=url.rstrip('/')) for mirror in Mirror.objects.filter(repo=repository).values('url'): - if mirror['url'].find('cdn.redhat.com') != -1 or \ - mirror['url'].find('cdn-ubi.redhat.com') != -1 or \ - mirror['url'].find('nu.novell.com') != -1 or \ - mirror['url'].find('updates.suse.com') != -1: + mirror_url = mirror.get('url') + auth_urls = ['cdn.redhat.com', 'cdn-ubi.redhat.com', 'nu.novell.com', 'updates.suse.com'] + if any(auth_url in mirror_url for auth_url in auth_urls): repository.auth_required = True - with transaction.atomic(): - repository.save() - if mirror['url'].find('security') != -1: + if 'security' in mirror_url: repository.security = True - with transaction.atomic(): - repository.save() + repository.save() return repository, r_priority @@ -557,10 +524,9 @@ def get_host(report, arch, osvariant, domain): except herror: report.host = report.report_ip report.save() - - with transaction.atomic(): - try: - host, c = Host.objects.get_or_create( + try: + with transaction.atomic(): + host, created = Host.objects.get_or_create( hostname=report.host, defaults={ 'ipaddress': report.report_ip, @@ -568,8 +534,8 @@ def get_host(report, arch, osvariant, domain): 'osvariant': osvariant, 'domain': domain, 'lastreport': report.created, - }) - + } + ) host.ipaddress = report.report_ip host.kernel = report.kernel host.arch = arch @@ -582,10 +548,10 @@ def get_host(report, arch, osvariant, domain): else: host.reboot_required = False host.save() - except IntegrityError as e: - error_message.send(sender=None, text=e) - except DatabaseError as e: - error_message.send(sender=None, text=e) + except IntegrityError as e: + error_message.send(sender=None, text=e) + except DatabaseError as e: + error_message.send(sender=None, text=e) if host: host.check_rdns() return host diff --git a/reports/views.py b/reports/views.py index 792123ea..ccef1bb2 100644 --- a/reports/views.py +++ b/reports/views.py @@ -23,7 +23,6 @@ from django.contrib.auth.decorators import login_required from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger from django.urls import reverse -from django.db import transaction from django.db.models import Q from django.contrib import messages from django.db.utils import OperationalError @@ -44,8 +43,7 @@ def upload(request): data = request.POST.copy() meta = request.META.copy() - with transaction.atomic(): - report = Report.objects.create() + report = Report.objects.create() report.parse(data, meta) from reports.tasks import process_report diff --git a/repos/utils.py b/repos/utils.py index d392e78c..a4df3741 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -21,7 +21,7 @@ from defusedxml import ElementTree as ET from tenacity import RetryError -from django.db import IntegrityError, DatabaseError, transaction +from django.db import IntegrityError from django.db.models import Q from packages.models import Package @@ -37,17 +37,10 @@ def get_or_create_repo(r_name, r_arch, r_type, r_id=None): from repos.models import Repository repositories = Repository.objects.all() try: - with transaction.atomic(): - repository, c = repositories.get_or_create(name=r_name, - arch=r_arch, - repotype=r_type) + repository, c = repositories.get_or_create(name=r_name, arch=r_arch, repotype=r_type) except IntegrityError as e: error_message.send(sender=None, text=e) - repository = repositories.get(name=r_name, - arch=r_arch, - repotype=r_type) - except DatabaseError as e: - error_message.send(sender=None, text=e) + repository = repositories.get(name=r_name, arch=r_arch, repotype=r_type) if repository: if r_id: repository.repo_id = r_id @@ -85,8 +78,7 @@ def update_mirror_packages(mirror, packages): pbar_update.send(sender=None, index=i + 1) try: package = convert_packagestring_to_package(strpackage) - with transaction.atomic(): - mirror_package, c = MirrorPackage.objects.get_or_create(mirror=mirror, package=package) + mirror_package, c = MirrorPackage.objects.get_or_create(mirror=mirror, package=package) except Package.MultipleObjectsReturned: error_message.send(sender=None, text=f'Duplicate Package found in {mirror}: {strpackage}') mirror.save() From be73fa2c613da18dcba1ec866d3b5a91f2546b16 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Mon, 3 Mar 2025 21:53:30 -0500 Subject: [PATCH 122/199] show url for authenticated mirrors --- repos/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/repos/models.py b/repos/models.py index b84f7b19..181a103d 100644 --- a/repos/models.py +++ b/repos/models.py @@ -177,7 +177,7 @@ def fail(self): Default is 28 """ if self.repo.auth_required: - text = 'Mirror requires authentication, not updating' + text = f'Mirror requires authentication, not updating - {self.url}' warning_message.send(sender=None, text=text) return text = f'No usable mirror found at {self.url}' From 846fff568bd29ffb32b73fe031b9a0ae47f54eff Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 4 Mar 2025 16:06:02 -0500 Subject: [PATCH 123/199] add option to specify errata type on command line --- errata/tasks.py | 21 +++++++++++++++------ sbin/patchman | 7 +++++-- 2 files changed, 20 insertions(+), 8 deletions(-) diff --git a/errata/tasks.py b/errata/tasks.py index 4421c860..9e1d1790 100644 --- a/errata/tasks.py +++ b/errata/tasks.py @@ -22,6 +22,7 @@ from errata.sources.distros.centos import update_centos_errata from errata.sources.distros.rocky import update_rocky_errata from errata.sources.distros.ubuntu import update_ubuntu_errata +from patchman.signals import error_message from repos.models import Repository from security.tasks import update_cves, update_cwes from util import get_setting_of_type @@ -36,14 +37,22 @@ def update_yum_repo_errata(): @shared_task -def update_errata(): +def update_errata(erratum_type=None): """ Update all distros errata """ - errata_os_updates = get_setting_of_type( - setting_name='ERRATA_OS_UPDATES', - setting_type=list, - default=['yum', 'rocky', 'alma', 'arch', 'ubuntu', 'debian'], - ) + errata_os_updates = [] + erratum_type_defaults = ['yum', 'rocky', 'alma', 'arch', 'ubuntu', 'debian'] + if erratum_type: + if erratum_type not in erratum_type_defaults: + error_message.send(sender=None, text=f'Erratum type must be one of {erratum_type_defaults}') + else: + errata_os_updates = erratum_type + else: + errata_os_updates = get_setting_of_type( + setting_name='ERRATA_OS_UPDATES', + setting_type=list, + default=erratum_type_defaults, + ) if 'yum' in errata_os_updates: update_yum_repo_errata() if 'arch' in errata_os_updates: diff --git a/sbin/patchman b/sbin/patchman index 78cbad3b..6ad7a602 100755 --- a/sbin/patchman +++ b/sbin/patchman @@ -433,6 +433,9 @@ def collect_args(): parser.add_argument( '-e', '--update-errata', action='store_true', help='Update Errata') + parser.add_argument( + '-E', '--erratum-type', + help='Only update the specified Erratum type (e.g. `yum`, `ubuntu`, `arch`)') parser.add_argument( '-v', '--update-cves', action='store_true', help='Update CVEs from https://cve.org') @@ -454,7 +457,7 @@ def process_args(args): args.clean_updates = True args.dbcheck = True args.dns_checks = True - args.errata = False + args.errata = True if args.list_repos: list_repos(args.repo) return False @@ -511,7 +514,7 @@ def process_args(args): dns_checks(args.host) showhelp = False if args.update_errata: - update_errata() + update_errata(args.erratum_type) mark_errata_security_updates() showhelp = False if args.update_cves: From 070beaf292e73fedbb031badcdebe1258c0b9f67 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 4 Mar 2025 17:09:16 -0500 Subject: [PATCH 124/199] standardize model options format --- hosts/migrations/0008_alter_host_options.py | 17 +++++++++++++ hosts/models.py | 4 +-- .../migrations/0004_alter_module_options.py | 17 +++++++++++++ modules/models.py | 4 +-- ...release_options_alter_osvariant_options.py | 21 ++++++++++++++++ operatingsystems/models.py | 6 ++--- ..._alter_packagecategory_options_and_more.py | 25 +++++++++++++++++++ packages/models.py | 18 ++++++------- .../migrations/0005_alter_report_options.py | 17 +++++++++++++ reports/models.py | 2 +- security/migrations/0004_alter_cve_options.py | 17 +++++++++++++ security/models.py | 2 +- 12 files changed, 130 insertions(+), 20 deletions(-) create mode 100644 hosts/migrations/0008_alter_host_options.py create mode 100644 modules/migrations/0004_alter_module_options.py create mode 100644 operatingsystems/migrations/0008_alter_osrelease_options_alter_osvariant_options.py create mode 100644 packages/migrations/0004_alter_package_options_alter_packagecategory_options_and_more.py create mode 100644 reports/migrations/0005_alter_report_options.py create mode 100644 security/migrations/0004_alter_cve_options.py diff --git a/hosts/migrations/0008_alter_host_options.py b/hosts/migrations/0008_alter_host_options.py new file mode 100644 index 00000000..0952cf19 --- /dev/null +++ b/hosts/migrations/0008_alter_host_options.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.19 on 2025-03-04 22:07 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('hosts', '0007_alter_host_tags'), + ] + + operations = [ + migrations.AlterModelOptions( + name='host', + options={'ordering': ['hostname'], 'verbose_name': 'Host', 'verbose_name_plural': 'Hosts'}, + ), + ] diff --git a/hosts/models.py b/hosts/models.py index abca6ab8..452c9055 100644 --- a/hosts/models.py +++ b/hosts/models.py @@ -64,7 +64,7 @@ class Host(models.Model): class Meta: verbose_name = 'Host' verbose_name_plural = 'Hosts' - ordering = ('hostname',) + ordering = ['hostname'] def __str__(self): return self.hostname @@ -339,7 +339,7 @@ class HostRepo(models.Model): priority = models.IntegerField(default=0) class Meta: - unique_together = ('host', 'repo') + unique_together = ['host', 'repo'] def __str__(self): return f'{self.host}-{self.repo}' diff --git a/modules/migrations/0004_alter_module_options.py b/modules/migrations/0004_alter_module_options.py new file mode 100644 index 00000000..e999ec36 --- /dev/null +++ b/modules/migrations/0004_alter_module_options.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.19 on 2025-03-04 22:07 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('modules', '0003_alter_module_options'), + ] + + operations = [ + migrations.AlterModelOptions( + name='module', + options={'ordering': ['name', 'stream'], 'verbose_name': 'Module', 'verbose_name_plural': 'Modules'}, + ), + ] diff --git a/modules/models.py b/modules/models.py index 1eb4d236..931a41c3 100644 --- a/modules/models.py +++ b/modules/models.py @@ -35,8 +35,8 @@ class Module(models.Model): class Meta: verbose_name = 'Module' verbose_name_plural = 'Modules' - unique_together = ('name', 'stream', 'version', 'context', 'arch',) - ordering = ('name', 'stream',) + unique_together = ['name', 'stream', 'version', 'context', 'arch'] + ordering = ['name', 'stream'] def __str__(self): return f'{self.name}-{self.stream}-{self.version}-{self.version}-{self.context}' diff --git a/operatingsystems/migrations/0008_alter_osrelease_options_alter_osvariant_options.py b/operatingsystems/migrations/0008_alter_osrelease_options_alter_osvariant_options.py new file mode 100644 index 00000000..bcce94e4 --- /dev/null +++ b/operatingsystems/migrations/0008_alter_osrelease_options_alter_osvariant_options.py @@ -0,0 +1,21 @@ +# Generated by Django 4.2.19 on 2025-03-04 22:07 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('operatingsystems', '0007_alter_osrelease_unique_together'), + ] + + operations = [ + migrations.AlterModelOptions( + name='osrelease', + options={'ordering': ['name'], 'verbose_name': 'Operating System Release', 'verbose_name_plural': 'Operating System Releases'}, + ), + migrations.AlterModelOptions( + name='osvariant', + options={'ordering': ['name'], 'verbose_name': 'Operating System Variant', 'verbose_name_plural': 'Operating System Variants'}, + ), + ] diff --git a/operatingsystems/models.py b/operatingsystems/models.py index bed134af..234b8ab3 100644 --- a/operatingsystems/models.py +++ b/operatingsystems/models.py @@ -35,8 +35,8 @@ class OSRelease(models.Model): class Meta: verbose_name = 'Operating System Release' verbose_name_plural = 'Operating System Releases' - unique_together = ('name', 'codename', 'cpe_name') - ordering = ('name',) + unique_together = ['name', 'codename', 'cpe_name'] + ordering = ['name'] def __str__(self): if self.codename: @@ -61,7 +61,7 @@ class OSVariant(models.Model): class Meta: verbose_name = 'Operating System Variant' verbose_name_plural = 'Operating System Variants' - ordering = ('name',) + ordering = ['name'] def __str__(self): osvariant_name = f'{self.name} {self.arch}' diff --git a/packages/migrations/0004_alter_package_options_alter_packagecategory_options_and_more.py b/packages/migrations/0004_alter_package_options_alter_packagecategory_options_and_more.py new file mode 100644 index 00000000..7ed04ab6 --- /dev/null +++ b/packages/migrations/0004_alter_package_options_alter_packagecategory_options_and_more.py @@ -0,0 +1,25 @@ +# Generated by Django 4.2.19 on 2025-03-04 22:07 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('packages', '0003_auto_20250207_1746'), + ] + + operations = [ + migrations.AlterModelOptions( + name='package', + options={'ordering': ['name', 'epoch', 'version', 'release', 'arch']}, + ), + migrations.AlterModelOptions( + name='packagecategory', + options={'ordering': ['name'], 'verbose_name': 'Package Category', 'verbose_name_plural': 'Package Categories'}, + ), + migrations.AlterModelOptions( + name='packagename', + options={'ordering': ['name'], 'verbose_name': 'Package', 'verbose_name_plural': 'Packages'}, + ), + ] diff --git a/packages/models.py b/packages/models.py index 30cded29..0f2cf5c2 100644 --- a/packages/models.py +++ b/packages/models.py @@ -35,7 +35,7 @@ class PackageName(models.Model): class Meta: verbose_name = 'Package' verbose_name_plural = 'Packages' - ordering = ('name',) + ordering = ['name'] def __str__(self): return self.name @@ -51,7 +51,7 @@ class PackageCategory(models.Model): class Meta: verbose_name = 'Package Category' verbose_name_plural = 'Package Categories' - ordering = ('name',) + ordering = ['name'] def __str__(self): return self.name @@ -86,8 +86,8 @@ class Package(models.Model): objects = PackageManager() class Meta: - ordering = ('name', 'epoch', 'version', 'release', 'arch') - unique_together = ('name', 'epoch', 'version', 'release', 'arch', 'packagetype', 'category') + ordering = ['name', 'epoch', 'version', 'release', 'arch'] + unique_together = ['name', 'epoch', 'version', 'release', 'arch', 'packagetype', 'category'] def __str__(self): if self.epoch: @@ -213,16 +213,12 @@ def __hash__(self): class PackageUpdate(models.Model): - oldpackage = models.ForeignKey(Package, - on_delete=models.CASCADE, - related_name='oldpackage') - newpackage = models.ForeignKey(Package, - on_delete=models.CASCADE, - related_name='newpackage') + oldpackage = models.ForeignKey(Package, on_delete=models.CASCADE, related_name='oldpackage') + newpackage = models.ForeignKey(Package, on_delete=models.CASCADE, related_name='newpackage') security = models.BooleanField(default=False) class Meta: - unique_together = (('oldpackage', 'newpackage', 'security')) + unique_together = ['oldpackage', 'newpackage', 'security'] def __str__(self): if self.security: diff --git a/reports/migrations/0005_alter_report_options.py b/reports/migrations/0005_alter_report_options.py new file mode 100644 index 00000000..e2626984 --- /dev/null +++ b/reports/migrations/0005_alter_report_options.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.19 on 2025-03-04 22:07 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('reports', '0004_migrate_to_tz_aware'), + ] + + operations = [ + migrations.AlterModelOptions( + name='report', + options={'ordering': ['-created'], 'verbose_name_plural': 'Reports'}, + ), + ] diff --git a/reports/models.py b/reports/models.py index bd005664..ab5564ba 100644 --- a/reports/models.py +++ b/reports/models.py @@ -44,7 +44,7 @@ class Report(models.Model): class Meta: verbose_name_plural = 'Report' verbose_name_plural = 'Reports' - ordering = ('-created',) + ordering = ['-created'] def __str__(self): return f"{self.host} {self.created.strftime('%c')}" diff --git a/security/migrations/0004_alter_cve_options.py b/security/migrations/0004_alter_cve_options.py new file mode 100644 index 00000000..8650801f --- /dev/null +++ b/security/migrations/0004_alter_cve_options.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.19 on 2025-03-04 22:07 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('security', '0003_alter_cve_description_alter_cwe_description'), + ] + + operations = [ + migrations.AlterModelOptions( + name='cve', + options={'ordering': ['cve_id']}, + ), + ] diff --git a/security/models.py b/security/models.py index a4a414e4..94ecf507 100644 --- a/security/models.py +++ b/security/models.py @@ -87,7 +87,7 @@ class CVE(models.Model): objects = CVEManager() class Meta: - ordering = ('cve_id',) + ordering = ['cve_id'] def __str__(self): return self.cve_id From c1a713861dbf28a3f6a6aaa51fabf0ab9b976a86 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 4 Mar 2025 20:05:02 -0500 Subject: [PATCH 125/199] remove unneeded saves --- errata/sources/distros/alma.py | 4 +--- errata/sources/distros/rocky.py | 5 ++--- errata/sources/distros/ubuntu.py | 2 -- errata/sources/repos/yum.py | 1 - hosts/models.py | 2 +- hosts/tasks.py | 2 +- hosts/utils.py | 9 +-------- packages/utils.py | 1 - repos/utils.py | 8 +++----- sbin/patchman | 2 +- 10 files changed, 10 insertions(+), 26 deletions(-) diff --git a/errata/sources/distros/alma.py b/errata/sources/distros/alma.py index 889a3226..ddd2af95 100644 --- a/errata/sources/distros/alma.py +++ b/errata/sources/distros/alma.py @@ -109,7 +109,6 @@ def add_alma_erratum_osreleases(e, release): from operatingsystems.models import OSRelease osrelease, created = OSRelease.objects.get_or_create(name=f'Alma Linux {release}') e.osreleases.add(osrelease) - e.save() def add_alma_erratum_references(e, advisory): @@ -139,7 +138,6 @@ def add_alma_erratum_packages(e, advisory): p_type = Package.RPM pkg = get_or_create_package(name, epoch, ver, rel, arch, p_type) e.packages.add(pkg) - e.save() def add_alma_erratum_modules(e, advisory): @@ -156,5 +154,5 @@ def add_alma_erratum_modules(e, advisory): matching_modules = get_matching_modules(name, stream, version, context, arch) for match in matching_modules: for package in match.packages.all(): + match.packages.add(package) e.packages.add(package) - e.save() diff --git a/errata/sources/distros/rocky.py b/errata/sources/distros/rocky.py index b405b755..587cb736 100644 --- a/errata/sources/distros/rocky.py +++ b/errata/sources/distros/rocky.py @@ -218,7 +218,6 @@ def add_rocky_erratum_oses(e, advisory): osrelease_name = f'{variant} {major_version}' osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name) e.osreleases.add(osrelease) - e.save() def add_rocky_erratum_packages(e, advisory): @@ -243,7 +242,7 @@ def add_rocky_erratum_packages(e, advisory): module_stream, module_version, module_context, - arch) + arch, + ) for match in matching_modules: match.packages.add(pkg) - e.save() diff --git a/errata/sources/distros/ubuntu.py b/errata/sources/distros/ubuntu.py index f0a518cc..0f00d722 100644 --- a/errata/sources/distros/ubuntu.py +++ b/errata/sources/distros/ubuntu.py @@ -132,7 +132,6 @@ def add_ubuntu_erratum_osreleases(e, affected_releases, accepted_releases): if release in accepted_releases: osrelease = OSRelease.objects.get(codename=release) e.osreleases.add(osrelease) - e.save() def release_is_affected(affected_releases, accepted_releases): @@ -194,7 +193,6 @@ def add_ubuntu_erratum_packages(e, advisory): ) for package in matching_packages: e.packages.add(package) - e.save() def get_accepted_ubuntu_codenames(): diff --git a/errata/sources/repos/yum.py b/errata/sources/repos/yum.py index cd46c698..127e213f 100644 --- a/errata/sources/repos/yum.py +++ b/errata/sources/repos/yum.py @@ -156,4 +156,3 @@ def add_updateinfo_packages(e, update): ) packages.add(package) e.add_packages(packages) - e.save() diff --git a/hosts/models.py b/hosts/models.py index 452c9055..186d6b91 100644 --- a/hosts/models.py +++ b/hosts/models.py @@ -301,6 +301,7 @@ def check_if_reboot_required(self, host_highest): self.reboot_required = True else: self.reboot_required = False + self.save() def find_kernel_updates(self, kernel_packages, repo_packages): @@ -328,7 +329,6 @@ def find_kernel_updates(self, kernel_packages, repo_packages): update_ids.append(uid) self.check_if_reboot_required(host_highest) - self.save() return update_ids diff --git a/hosts/tasks.py b/hosts/tasks.py index 4d53d5ab..2fdce96f 100755 --- a/hosts/tasks.py +++ b/hosts/tasks.py @@ -47,8 +47,8 @@ def find_all_host_updates_homogenous(): ts = get_datetime_now() for host in Host.objects.all(): if host not in updated_hosts: - host.updated_at = ts host.find_updates() + host.updated_at = ts host.save() # only include hosts with the exact same number of packages diff --git a/hosts/utils.py b/hosts/utils.py index 4198408e..ee45b3df 100644 --- a/hosts/utils.py +++ b/hosts/utils.py @@ -17,10 +17,6 @@ from socket import gethostbyaddr, gaierror, herror -from django.db import DatabaseError - -from patchman.signals import error_message - def update_rdns(host): """ Update the reverse DNS for a host @@ -31,7 +27,4 @@ def update_rdns(host): reversedns = 'None' host.reversedns = reversedns.lower() - try: - host.save() - except DatabaseError as e: - error_message.send(sender=None, text=e) + host.save() diff --git a/packages/utils.py b/packages/utils.py index 9c4f9191..314f066a 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -268,7 +268,6 @@ def clean_packageupdates(): for host in duplicate.host_set.all(): host.updates.remove(duplicate) host.updates.add(update) - host.save() duplicate.delete() diff --git a/repos/utils.py b/repos/utils.py index a4df3741..44c865f3 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -81,7 +81,6 @@ def update_mirror_packages(mirror, packages): mirror_package, c = MirrorPackage.objects.get_or_create(mirror=mirror, package=package) except Package.MultipleObjectsReturned: error_message.send(sender=None, text=f'Duplicate Package found in {mirror}: {strpackage}') - mirror.save() def find_mirror_url(stored_mirror_url, formats): @@ -229,10 +228,11 @@ def fetch_mirror_data(mirror, url, text, checksum=None, checksum_type=None, meta mirror.fail() return - mirror.last_access_ok = response_is_valid(res) - if not mirror.last_access_ok: + if not response_is_valid(res): mirror.fail() return + mirror.last_access_ok = True + mirror.save() data = download_url(res, text) if not data: @@ -243,8 +243,6 @@ def fetch_mirror_data(mirror, url, text, checksum=None, checksum_type=None, meta if not mirror_checksum_is_valid(computed_checksum, checksum, mirror, metadata_type): mirror.fail() return - - mirror.save() return data diff --git a/sbin/patchman b/sbin/patchman index 6ad7a602..9589b127 100755 --- a/sbin/patchman +++ b/sbin/patchman @@ -161,9 +161,9 @@ def host_updates_alt(host=None): for host in hosts: info_message.send(sender=None, text=str(host)) if host not in updated_hosts: - host.updated_at = ts host.find_updates() info_message.send(sender=None, text='') + host.updated_at = ts host.save() # only include hosts with the same number of packages From 2c337c53555c8e752277858f3470b49a312f3325 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 4 Mar 2025 20:06:02 -0500 Subject: [PATCH 126/199] remove unneeded DatabaseError exception catching --- packages/utils.py | 23 ++++++----------------- reports/utils.py | 2 -- repos/utils.py | 8 +++----- 3 files changed, 9 insertions(+), 24 deletions(-) diff --git a/packages/utils.py b/packages/utils.py index 314f066a..9501ed4f 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -187,7 +187,6 @@ def get_or_create_package_update(oldpackage, newpackage, security): """ Get or create a PackageUpdate object. Returns the object. Returns None if it cannot be created """ - updates = PackageUpdate.objects.all() # see if any version of this update exists # if it's already marked as a security update, leave it that way # if not, mark it as a security update if security==True @@ -196,23 +195,16 @@ def get_or_create_package_update(oldpackage, newpackage, security): # very likely to happen. if it does, we err on the side of caution # and mark it as the security update try: - update = updates.get( - oldpackage=oldpackage, - newpackage=newpackage - ) + update = PackageUpdate.objects.get(oldpackage=oldpackage, newpackage=newpackage) except PackageUpdate.DoesNotExist: update = None except MultipleObjectsReturned: e = 'Error: MultipleObjectsReturned when attempting to add package \n' e += f'update with oldpackage={oldpackage} | newpackage={newpackage}:' error_message.send(sender=None, text=e) - updates = updates.filter( - oldpackage=oldpackage, - newpackage=newpackage - ) + updates = PackageUpdate.objects.filter(oldpackage=oldpackage, newpackage=newpackage) for update in updates: - e = str(update) - error_message.send(sender=None, text=e) + error_message.send(sender=None, text=str(update)) return try: if update: @@ -220,16 +212,13 @@ def get_or_create_package_update(oldpackage, newpackage, security): update.security = True update.save() else: - update, c = updates.get_or_create( + update, c = PackageUpdate.objects.get_or_create( oldpackage=oldpackage, newpackage=newpackage, security=security, ) - except IntegrityError as e: - error_message.send(sender=None, text=e) - update = updates.get(oldpackage=oldpackage, - newpackage=newpackage, - security=security) + except IntegrityError: + update = PackageUpdate.objects.get(oldpackage=oldpackage, newpackage=newpackage, security=security) return update diff --git a/reports/utils.py b/reports/utils.py index 065e5e65..b3b8b879 100644 --- a/reports/utils.py +++ b/reports/utils.py @@ -550,8 +550,6 @@ def get_host(report, arch, osvariant, domain): host.save() except IntegrityError as e: error_message.send(sender=None, text=e) - except DatabaseError as e: - error_message.send(sender=None, text=e) if host: host.check_rdns() return host diff --git a/repos/utils.py b/repos/utils.py index 44c865f3..934d3996 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -35,12 +35,10 @@ def get_or_create_repo(r_name, r_arch, r_type, r_id=None): Returns None if it cannot get or create the object. """ from repos.models import Repository - repositories = Repository.objects.all() try: - repository, c = repositories.get_or_create(name=r_name, arch=r_arch, repotype=r_type) - except IntegrityError as e: - error_message.send(sender=None, text=e) - repository = repositories.get(name=r_name, arch=r_arch, repotype=r_type) + repository, c = Repository.objects.get_or_create(name=r_name, arch=r_arch, repotype=r_type) + except IntegrityError: + repository = Repository.objects.get(name=r_name, arch=r_arch, repotype=r_type) if repository: if r_id: repository.repo_id = r_id From 5faaeeb36d6f20a32e430406f842ce4eb14b8804 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 4 Mar 2025 20:07:51 -0500 Subject: [PATCH 127/199] use existing function for module creation --- reports/utils.py | 24 +++--------------------- 1 file changed, 3 insertions(+), 21 deletions(-) diff --git a/reports/utils.py b/reports/utils.py index b3b8b879..3a614ec4 100644 --- a/reports/utils.py +++ b/reports/utils.py @@ -18,12 +18,12 @@ import re from socket import gethostbyaddr, herror -from django.db import IntegrityError, DatabaseError, transaction +from django.db import IntegrityError, transaction from arch.models import MachineArchitecture, PackageArchitecture from domains.models import Domain from hosts.models import Host, HostRepo -from modules.models import Module +from modules.utils import get_or_create_module from operatingsystems.models import OSVariant, OSRelease from packages.models import Package, PackageCategory from packages.utils import find_evr, get_or_create_package, get_or_create_package_update, parse_package_string @@ -297,25 +297,7 @@ def process_module(module_str): package = get_or_create_package(p_name, p_epoch, p_ver, p_rel, p_arch, p_type) packages.add(package) - modules = Module.objects.all() - try: - module, c = modules.get_or_create(name=m_name, - stream=m_stream, - version=m_version, - context=m_context, - arch=arch, - repo=repo) - except IntegrityError as e: - error_message.send(sender=None, text=e) - module = modules.get(name=m_name, - stream=m_stream, - version=m_version, - context=m_context, - arch=arch, - repo=repo) - except DatabaseError as e: - error_message.send(sender=None, text=e) - + module = get_or_create_module(m_name, m_stream, m_version, m_context, arch, repo) for package in packages: module.packages.add(package) return module From 8f293b3f18148eaaf870135396ae3ae54f2e58bf Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 4 Mar 2025 20:08:35 -0500 Subject: [PATCH 128/199] add a default gentoo repo for all gentoo hosts --- reports/utils.py | 40 +++++++++++++++++++++++++--------------- 1 file changed, 25 insertions(+), 15 deletions(-) diff --git a/reports/utils.py b/reports/utils.py index 3a614ec4..69263e30 100644 --- a/reports/utils.py +++ b/reports/utils.py @@ -35,6 +35,10 @@ def process_repos(report, host): """ Processes the quoted repos string sent with a report """ + if host.osvariant.name.startswith('Gentoo'): + gentoo_repo = Repository.objects.get(repo_id='gentoo') + host_repos = HostRepo.objects.filter(host=host) + hostrepo, c = host_repos.get_or_create(host=host, repo=gentoo_repo) if report.repos: repo_ids = [] host_repos = HostRepo.objects.filter(host=host) @@ -346,23 +350,29 @@ def process_package(pkg, protocol): package = get_or_create_package(name, epoch, ver, rel, arch, p_type) if p_type == Package.GENTOO: - category, created = PackageCategory.objects.get_or_create(name=p_category) - package.category = category + process_gentoo_package(package, name, p_category, p_repo) + return package - repo_arch, created = MachineArchitecture.objects.get_or_create(name='any') - repo_name = 'Gentoo Linux' - repo = get_or_create_repo(repo_name, repo_arch, Repository.GENTOO, p_repo) - if p_repo == 'gentoo': - url = 'https://api.gentoo.org/mirrors/distfiles.xml' - else: - # this may not be correct. the urls are hardcoded anyway in repos/utils.py - # need to figure out a better way to determine which repo/repo url to use - url = 'https://api.gentoo.org/overlays/repositories.xml' - mirror, c = Mirror.objects.get_or_create(repo=repo, url=url, mirrorlist=True) - MirrorPackage.objects.create(mirror=mirror, package=package) - package.save() - return package +def process_gentoo_package(package, name, category, repo): + """ Processes a single gentoo package + """ + category, created = PackageCategory.objects.get_or_create(name=category) + package.category = category + package.save() + + repo_arch, created = MachineArchitecture.objects.get_or_create(name='any') + repo_name = 'Gentoo Linux' + gentoo_repo = get_or_create_repo(repo_name, repo_arch, Repository.GENTOO, repo) + + if repo == 'gentoo': + url = 'https://api.gentoo.org/mirrors/distfiles.xml' + else: + # this may not be correct. the urls are hardcoded anyway in repos/utils.py + # need to figure out a better way to determine which repo/repo url to use + url = 'https://api.gentoo.org/overlays/repositories.xml' + mirror, c = Mirror.objects.get_or_create(repo=gentoo_repo, url=url, mirrorlist=True) + MirrorPackage.objects.create(mirror=mirror, package=package) def get_arch(arch): From 5e00709a2d4fb9f5d66e9ce6a90f4503e29204e1 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 4 Mar 2025 20:10:15 -0500 Subject: [PATCH 129/199] function import at beginning of function --- errata/sources/distros/rocky.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/errata/sources/distros/rocky.py b/errata/sources/distros/rocky.py index 587cb736..c6a60afc 100644 --- a/errata/sources/distros/rocky.py +++ b/errata/sources/distros/rocky.py @@ -210,8 +210,8 @@ def add_rocky_erratum_references(e, advisory): def add_rocky_erratum_oses(e, advisory): """ Update OS Variant, OS Release and MachineArch for Rocky Linux errata """ - affected_oses = advisory.get('affected_products') from operatingsystems.models import OSRelease + affected_oses = advisory.get('affected_products') for affected_os in affected_oses: variant = affected_os.get('variant') major_version = affected_os.get('major_version') From c9605abd056b6dfe8c31846dfbb3fce5388cc547 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 4 Mar 2025 20:42:25 -0500 Subject: [PATCH 130/199] don't allow duplicate erratum references --- ...02_alter_erratumreference_unique_together.py | 17 +++++++++++++++++ errata/models.py | 3 +++ 2 files changed, 20 insertions(+) create mode 100644 errata/migrations/0002_alter_erratumreference_unique_together.py diff --git a/errata/migrations/0002_alter_erratumreference_unique_together.py b/errata/migrations/0002_alter_erratumreference_unique_together.py new file mode 100644 index 00000000..f88ff86d --- /dev/null +++ b/errata/migrations/0002_alter_erratumreference_unique_together.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.19 on 2025-03-05 01:41 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('errata', '0001_initial'), + ] + + operations = [ + migrations.AlterUniqueTogether( + name='erratumreference', + unique_together={('er_type', 'url')}, + ), + ] diff --git a/errata/models.py b/errata/models.py index c08d3493..ecefdc99 100644 --- a/errata/models.py +++ b/errata/models.py @@ -30,6 +30,9 @@ class ErratumReference(models.Model): er_type = models.CharField(max_length=255) url = models.URLField(max_length=2000) + class Meta: + unique_together = ['er_type', 'url'] + def __str__(self): return self.url From 3db85b31944a469f8d5c669a7405a373a72f34a6 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 4 Mar 2025 22:36:58 -0500 Subject: [PATCH 131/199] move function to create host from report --- hosts/utils.py | 45 +++++++++++++++++++++++++++++++++++++++++++++ reports/models.py | 5 +++-- reports/utils.py | 46 +++------------------------------------------- 3 files changed, 51 insertions(+), 45 deletions(-) diff --git a/hosts/utils.py b/hosts/utils.py index ee45b3df..b328129f 100644 --- a/hosts/utils.py +++ b/hosts/utils.py @@ -17,6 +17,10 @@ from socket import gethostbyaddr, gaierror, herror +from django.db import transaction, IntegrityError + +from patchman.signals import error_message + def update_rdns(host): """ Update the reverse DNS for a host @@ -28,3 +32,44 @@ def update_rdns(host): host.reversedns = reversedns.lower() host.save() + + +def get_or_create_host(report, arch, osvariant, domain): + """ Get or create a host from from a report + """ + from hosts.models import Host + if not report.host: + try: + report.host = str(gethostbyaddr(report.report_ip)[0]) + except herror: + report.host = report.report_ip + report.save() + try: + with transaction.atomic(): + host, created = Host.objects.get_or_create( + hostname=report.host, + defaults={ + 'ipaddress': report.report_ip, + 'arch': arch, + 'osvariant': osvariant, + 'domain': domain, + 'lastreport': report.created, + } + ) + host.ipaddress = report.report_ip + host.kernel = report.kernel + host.arch = arch + host.osvariant = osvariant + host.domain = domain + host.lastreport = report.created + host.tags = report.tags + if report.reboot == 'True': + host.reboot_required = True + else: + host.reboot_required = False + host.save() + except IntegrityError as e: + error_message.send(sender=None, text=e) + if host: + host.check_rdns() + return host diff --git a/reports/models.py b/reports/models.py index ab5564ba..6818ea23 100644 --- a/reports/models.py +++ b/reports/models.py @@ -18,6 +18,7 @@ from django.db import models from django.urls import reverse +from hosts.utils import get_or_create_host from patchman.signals import error_message, info_message @@ -103,11 +104,11 @@ def process(self, find_updates=True, verbose=False): info_message.send(sender=None, text=f'Report {self.id} has already been processed') return - from reports.utils import get_arch, get_os, get_domain, get_host + from reports.utils import get_arch, get_os, get_domain arch = get_arch(self.arch) osvariant = get_os(self.os, arch) domain = get_domain(self.domain) - host = get_host(self, arch, osvariant, domain) + host = get_or_create_host(self, arch, osvariant, domain) if verbose: info_message.send(sender=None, text=f'Processing report {self.id} - {self.host}') diff --git a/reports/utils.py b/reports/utils.py index 69263e30..bfc5976f 100644 --- a/reports/utils.py +++ b/reports/utils.py @@ -16,18 +16,17 @@ # along with Patchman. If not, see import re -from socket import gethostbyaddr, herror -from django.db import IntegrityError, transaction +from django.db import IntegrityError from arch.models import MachineArchitecture, PackageArchitecture from domains.models import Domain -from hosts.models import Host, HostRepo +from hosts.models import HostRepo from modules.utils import get_or_create_module from operatingsystems.models import OSVariant, OSRelease from packages.models import Package, PackageCategory from packages.utils import find_evr, get_or_create_package, get_or_create_package_update, parse_package_string -from patchman.signals import pbar_start, pbar_update, error_message, info_message +from patchman.signals import pbar_start, pbar_update, info_message from repos.models import Repository, Mirror, MirrorPackage from repos.utils import get_or_create_repo @@ -506,42 +505,3 @@ def get_domain(report_domain): report_domain = 'unknown' domain, c = Domain.objects.get_or_create(name=report_domain) return domain - - -def get_host(report, arch, osvariant, domain): - host = None - if not report.host: - try: - report.host = str(gethostbyaddr(report.report_ip)[0]) - except herror: - report.host = report.report_ip - report.save() - try: - with transaction.atomic(): - host, created = Host.objects.get_or_create( - hostname=report.host, - defaults={ - 'ipaddress': report.report_ip, - 'arch': arch, - 'osvariant': osvariant, - 'domain': domain, - 'lastreport': report.created, - } - ) - host.ipaddress = report.report_ip - host.kernel = report.kernel - host.arch = arch - host.osvariant = osvariant - host.domain = domain - host.lastreport = report.created - host.tags = report.tags - if report.reboot == 'True': - host.reboot_required = True - else: - host.reboot_required = False - host.save() - except IntegrityError as e: - error_message.send(sender=None, text=e) - if host: - host.check_rdns() - return host From 380e5f80876a1c22ae163bcf444c5a97fc0db8da Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 4 Mar 2025 22:35:49 -0500 Subject: [PATCH 132/199] add helper functions to create osrelease and osvariants --- errata/sources/distros/alma.py | 4 +- errata/sources/distros/arch.py | 15 +++--- errata/sources/distros/centos.py | 4 +- errata/sources/distros/debian.py | 14 ++---- errata/sources/distros/rocky.py | 6 +-- errata/sources/distros/ubuntu.py | 3 +- errata/sources/repos/yum.py | 4 +- operatingsystems/utils.py | 79 ++++++++++++++++++++++++++++++++ reports/utils.py | 58 ++++------------------- 9 files changed, 107 insertions(+), 80 deletions(-) create mode 100644 operatingsystems/utils.py diff --git a/errata/sources/distros/alma.py b/errata/sources/distros/alma.py index ddd2af95..cc35c3f4 100644 --- a/errata/sources/distros/alma.py +++ b/errata/sources/distros/alma.py @@ -17,6 +17,7 @@ import concurrent.futures import json +from operatingsystems.utils import get_or_create_osrelease from packages.models import Package from packages.utils import get_or_create_package, parse_package_string from util import get_url, download_url, get_setting_of_type @@ -106,8 +107,7 @@ def process_alma_erratum(release, advisory): def add_alma_erratum_osreleases(e, release): """ Update OS Release for Alma Linux errata """ - from operatingsystems.models import OSRelease - osrelease, created = OSRelease.objects.get_or_create(name=f'Alma Linux {release}') + osrelease = get_or_create_osrelease(name=f'Alma Linux {release}') e.osreleases.add(osrelease) diff --git a/errata/sources/distros/arch.py b/errata/sources/distros/arch.py index 791e1705..a03f3860 100644 --- a/errata/sources/distros/arch.py +++ b/errata/sources/distros/arch.py @@ -17,11 +17,11 @@ import concurrent.futures import json -from operatingsystems.models import OSRelease, OSVariant +from operatingsystems.utils import get_or_create_osrelease, get_or_create_osvariant +from patchman.signals import error_message, pbar_start, pbar_update from packages.models import Package from packages.utils import find_evr, get_matching_packages from util import get_url, download_url -from patchman.signals import error_message, pbar_start, pbar_update def update_arch_errata(concurrent_processing=False): @@ -54,7 +54,7 @@ def parse_arch_errata(advisories, concurrent_processing): def parse_arch_errata_serially(advisories): """ Parse Arch Linux Errata Advisories serially """ - osrelease = OSRelease.objects.get(name='Arch Linux') + osrelease = get_or_create_osrelease(name='Arch Linux') elen = len(advisories) pbar_start.send(sender=None, ptext=f'Processing {elen} Arch Advisories', plen=elen) for i, advisory in enumerate(advisories): @@ -65,7 +65,7 @@ def parse_arch_errata_serially(advisories): def parse_arch_errata_concurrently(advisories): """ Parse Arch Linux Errata Advisories concurrently """ - osrelease = OSRelease.objects.get(name='Arch Linux') + osrelease = get_or_create_osrelease(name='Arch Linux') elen = len(advisories) pbar_start.send(sender=None, ptext=f'Processing {elen} Arch Advisories', plen=elen) i = 0 @@ -102,11 +102,8 @@ def process_arch_erratum(advisory, osrelease): def add_arch_linux_osrelease(): """ Add Arch Linux OSRelease and link existing OSVariants """ - osrelease, created = OSRelease.objects.get_or_create(name='Arch Linux') - osvariants = OSVariant.objects.filter(name__startswith='Arch Linux') - for osvariant in osvariants: - osvariant.osrelease = osrelease - osvariant.save() + osrelease = get_or_create_osrelease(name='Arch Linux') + get_or_create_osvariant(name='Arch Linux', osrelease=osrelease) def add_arch_erratum_references(e, advisory): diff --git a/errata/sources/distros/centos.py b/errata/sources/distros/centos.py index a2fd1639..33905337 100644 --- a/errata/sources/distros/centos.py +++ b/errata/sources/distros/centos.py @@ -17,6 +17,7 @@ import re from lxml import etree +from operatingsystems.utils import get_or_create_osrelease from packages.models import Package from packages.utils import parse_package_string, get_or_create_package from patchman.signals import error_message, pbar_start, pbar_update @@ -114,14 +115,13 @@ def add_centos_erratum_references(e, references): def parse_centos_errata_children(e, children): """ Parse errata children to obtain architecture, release and packages """ - from operatingsystems.models import OSRelease for c in children: if c.tag == 'os_arch': pass elif c.tag == 'os_release': if accepted_centos_release([c.text]): osrelease_name = f'CentOS {c.text}' - osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name) + osrelease = get_or_create_osrelease(name=osrelease_name) e.osreleases.add(osrelease) elif c.tag == 'packages': name, epoch, ver, rel, dist, arch = parse_package_string(c.text) diff --git a/errata/sources/distros/debian.py b/errata/sources/distros/debian.py index e19af829..78ae9ad4 100644 --- a/errata/sources/distros/debian.py +++ b/errata/sources/distros/debian.py @@ -22,13 +22,12 @@ from io import StringIO from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential -from django.db.utils import IntegrityError - from operatingsystems.models import OSRelease +from operatingsystems.utils import get_or_create_osrelease from packages.models import Package from packages.utils import get_or_create_package, find_evr -from util import get_url, download_url, get_setting_of_type from patchman.signals import error_message, pbar_start, pbar_update +from util import get_url, download_url, get_setting_of_type def update_debian_errata(concurrent_processing=True): @@ -247,14 +246,7 @@ def create_debian_os_releases(codename_to_version): for codename, version in codename_to_version.items(): if codename in accepted_codenames: osrelease_name = f'Debian {version}' - try: - osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name, codename=codename) - except IntegrityError: - osreleases = OSRelease.objects.filter(name=osrelease_name) - if osreleases.count() == 1: - osrelease = osreleases[0] - osrelease.codename = codename - osrelease.save() + get_or_create_osrelease(name=osrelease_name, codename=codename) def process_debian_erratum_affected_packages(e, package_data): diff --git a/errata/sources/distros/rocky.py b/errata/sources/distros/rocky.py index c6a60afc..b5714530 100644 --- a/errata/sources/distros/rocky.py +++ b/errata/sources/distros/rocky.py @@ -20,10 +20,11 @@ from django.db.utils import OperationalError +from operatingsystems.utils import get_or_create_osrelease from packages.models import Package from packages.utils import parse_package_string, get_or_create_package -from util import get_url, download_url, info_message, error_message from patchman.signals import pbar_start, pbar_update +from util import get_url, download_url, info_message, error_message def update_rocky_errata(concurrent_processing=True): @@ -210,13 +211,12 @@ def add_rocky_erratum_references(e, advisory): def add_rocky_erratum_oses(e, advisory): """ Update OS Variant, OS Release and MachineArch for Rocky Linux errata """ - from operatingsystems.models import OSRelease affected_oses = advisory.get('affected_products') for affected_os in affected_oses: variant = affected_os.get('variant') major_version = affected_os.get('major_version') osrelease_name = f'{variant} {major_version}' - osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name) + osrelease = get_or_create_osrelease(name=osrelease_name) e.osreleases.add(osrelease) diff --git a/errata/sources/distros/ubuntu.py b/errata/sources/distros/ubuntu.py index 0f00d722..ed95c2b6 100644 --- a/errata/sources/distros/ubuntu.py +++ b/errata/sources/distros/ubuntu.py @@ -22,6 +22,7 @@ from urllib.parse import urlparse from operatingsystems.models import OSRelease, OSVariant +from operatingsystems.utils import get_or_create_osrelease from packages.models import Package, PackageName from packages.utils import get_or_create_package, parse_package_string, find_evr from util import get_url, download_url, get_sha256, bunzip2, get_setting_of_type @@ -230,7 +231,7 @@ def create_ubuntu_os_releases(codename_to_version): for codename, version in codename_to_version.items(): if codename in accepted_codenames: osrelease_name = f'Ubuntu {version}' - osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name, codename=codename) + osrelease = get_or_create_osrelease(name=osrelease_name, codename=codename) for osvariant in OSVariant.objects.filter(name__startswith=osrelease_name.replace(' LTS', '')): osvariant.osrelease = osrelease osvariant.save() diff --git a/errata/sources/repos/yum.py b/errata/sources/repos/yum.py index 127e213f..36e5d1f8 100644 --- a/errata/sources/repos/yum.py +++ b/errata/sources/repos/yum.py @@ -18,6 +18,7 @@ from io import BytesIO import defusedxml.ElementTree as ET +from operatingsystems.utils import get_or_create_osrelease from packages.models import Package from packages.utils import get_or_create_package from patchman.signals import pbar_start, pbar_update, error_message @@ -115,7 +116,6 @@ def add_updateinfo_osreleases(e, collection, osrelease_names): """ Adds OSRelease objects to an Erratum rocky and alma need some renaming """ - from operatingsystems.models import OSRelease if not osrelease_names: collection_name = collection.find('name') if collection_name is not None: @@ -128,7 +128,7 @@ def add_updateinfo_osreleases(e, collection, osrelease_names): elif osrelease_name.startswith('rocky-linux'): version = osrelease_name.split('-')[2] osrelease_name = 'Rocky Linux ' + version - osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name) + osrelease = get_or_create_osrelease(name=osrelease_name) e.osreleases.add(osrelease) diff --git a/operatingsystems/utils.py b/operatingsystems/utils.py new file mode 100644 index 00000000..c66182be --- /dev/null +++ b/operatingsystems/utils.py @@ -0,0 +1,79 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.db import IntegrityError + + +def get_or_create_osrelease(name, cpe_name=None, codename=None): + """ Get or create OSRelease from OS details + """ + from operatingsystems.models import OSRelease + osrelease = None + updated = False + if cpe_name: + try: + osrelease, created = OSRelease.objects.get_or_create(name=name, cpe_name=cpe_name) + except IntegrityError: + osreleases = OSRelease.objects.filter(cpe_name=cpe_name) + if osreleases.count() == 1: + osrelease = osreleases.first() + osrelease.name = name + if not osrelease and codename: + try: + osrelease, created = OSRelease.objects.get_or_create(name=name, codename=codename) + except IntegrityError: + osreleases = OSRelease.objects.filter(codename=codename) + if osreleases.count() == 1: + osrelease = osreleases.first() + osrelease.name = name + osrelease.save() + if not osrelease: + osrelease, created = OSRelease.objects.get_or_create(name=name) + if cpe_name and osrelease.cpe_name != cpe_name: + osrelease.cpe_name = cpe_name + updated = True + if codename and osrelease.codename != codename: + osrelease.codename = codename + updated = True + if updated: + osrelease.save() + return osrelease + + +def get_or_create_osvariant(name, osrelease, codename=None, arch=None): + """ Get or create OSVariant from OSRelease and os details + """ + from operatingsystems.models import OSVariant + osvariant = None + updated = False + try: + osvariant, created = OSVariant.objects.get_or_create(name=name, arch=arch) + except IntegrityError: + osvariants = OSVariant.objects.filter(name=name) + if osvariants.count() == 1: + osvariant = osvariants.first() + if osvariant.osrelease != osrelease: + osvariant.osrelease = osrelease + updated = True + if arch and osvariant.arch != arch: + osvariant.arch = arch + updated = True + if codename and osvariant.codename != codename: + osvariant.codename = codename + updated = True + if updated: + osvariant.save() + return osvariant diff --git a/reports/utils.py b/reports/utils.py index bfc5976f..eb5b7481 100644 --- a/reports/utils.py +++ b/reports/utils.py @@ -23,7 +23,7 @@ from domains.models import Domain from hosts.models import HostRepo from modules.utils import get_or_create_module -from operatingsystems.models import OSVariant, OSRelease +from operatingsystems.utils import get_or_create_osrelease, get_or_create_osvariant from packages.models import Package, PackageCategory from packages.utils import find_evr, get_or_create_package, get_or_create_package_update, parse_package_string from patchman.signals import pbar_start, pbar_update, info_message @@ -448,55 +448,13 @@ def get_os(os, arch): osvariant_name = os.replace(' Server', '') osrelease_name = osvariant_name.split('.')[0] - osrelease = get_osrelease(osrelease_name, osrelease_codename, cpe_name) - osvariant = get_osvariant(osrelease, osvariant_name, osvariant_codename, arch) - return osvariant - - -def get_osrelease(osrelease_name, osrelease_codename, cpe_name): - """ Get or create OSRelease from os details - """ - osrelease = None - if cpe_name: - try: - osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name, cpe_name=cpe_name) - except IntegrityError: - osreleases = OSRelease.objects.filter(cpe_name=cpe_name) - if osreleases.count() == 1: - osrelease = osreleases[0] - osrelease.name = osrelease_name - if not osrelease and osrelease_codename: - osreleases = OSRelease.objects.filter(codename=osrelease_codename) - if osreleases.count() == 1: - osrelease = osreleases[0] - if not osrelease and osrelease_name: - osrelease, created = OSRelease.objects.get_or_create(name=osrelease_name) - if osrelease and cpe_name: - osrelease.cpe_name = cpe_name - if osrelease and osrelease_codename: - osrelease.codename = osrelease_codename - osrelease.save() - return osrelease - - -def get_osvariant(osrelease, osvariant_name, osvariant_codename, arch): - """ Get or create OSVariant from OSRelease and os details - """ - if not osrelease: - return - - try: - osvariant, created = OSVariant.objects.get_or_create(name=osvariant_name, arch=arch) - except IntegrityError: - osvariants = OSVariant.objects.filter(name=osvariant_name) - if osvariants.count() == 1: - osvariant = osvariants[0] - if osvariant.arch is None: - osvariant.arch = arch - if osvariant and osvariant_codename: - osvariant.codename = osvariant_codename - osvariant.osrelease = osrelease - osvariant.save() + osrelease = get_or_create_osrelease(name=osrelease_name, codename=osrelease_codename, cpe_name=cpe_name) + osvariant = get_or_create_osvariant( + name=osvariant_name, + osrelease=osrelease, + codename=osvariant_codename, + arch=arch, + ) return osvariant From c25ab18b3130dc76639a8238da141b82138bf783 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 4 Mar 2025 23:27:15 -0500 Subject: [PATCH 133/199] add cli option to remove package duplicates --- sbin/patchman | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/sbin/patchman b/sbin/patchman index 9589b127..67278fcb 100755 --- a/sbin/patchman +++ b/sbin/patchman @@ -351,11 +351,11 @@ def process_reports(host=None, force=False): report.process(find_updates=False) -def dbcheck(): +def dbcheck(remove_duplicates=False): """ Runs all clean_* functions to check database consistency """ clean_packageupdates() - clean_packages() + clean_packages(remove_duplicates) clean_packagenames() clean_architectures() clean_repos() @@ -421,6 +421,9 @@ def collect_args(): parser.add_argument( '-d', '--dbcheck', action='store_true', help='Perform some sanity checks and clean unused db entries') + parser.add_argument( + '-rd', '--remove-duplicates', action='store_true', + help='Remove duplicates during dbcheck - this may take some time') parser.add_argument( '-n', '--dns-checks', action='store_true', help='Perform reverse DNS checks if enabled for that Host') @@ -494,7 +497,7 @@ def process_args(args): process_reports(args.host, args.force) showhelp = False if args.dbcheck: - dbcheck() + dbcheck(args.remove_duplicates) showhelp = False if args.refresh_repos: refresh_repos(args.repo, args.force) @@ -509,7 +512,7 @@ def process_args(args): showhelp = False recheck = True if args.dbcheck and recheck: - dbcheck() + dbcheck(args.remove_duplicates) if args.dns_checks: dns_checks(args.host) showhelp = False From 679150c794157121c35e0cdb24c970494c6b6a14 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 5 Mar 2025 02:14:05 -0500 Subject: [PATCH 134/199] add more info to cve template --- security/templates/security/cve_detail.html | 30 ++++++++++++--------- 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/security/templates/security/cve_detail.html b/security/templates/security/cve_detail.html index 52c622a9..0dda0ccf 100644 --- a/security/templates/security/cve_detail.html +++ b/security/templates/security/cve_detail.html @@ -47,23 +47,29 @@ OSes Affected - {% for osrelease in osreleases %} - {{ osrelease }}
    - {% endfor %} + + {% for osrelease in osreleases %} + + + {% endfor %} +
    {{ osrelease }} +
    - URLs + Links - - - - {% for reference in cve.erratum_set.references.all %} - - - - + + + + {% for erratum in cve.erratum_set.all %} + {% for reference in erratum.references.all %} + + + + + {% endfor %} {% endfor %}
    NIST {% bootstrap_icon "link" %}
    MITRE {% bootstrap_icon "link" %}
    osv.dev {% bootstrap_icon "link" %}
    {{ reference.er_type }}{{reference.url}}
    NISThttps://nvd.nist.gov/vuln/detail/{{ cve.cve_id }} {% bootstrap_icon "link" %}
    MITREhttps://www.cve.org/CVERecord?id={{ cve.cve_id }} {% bootstrap_icon "link" %}
    osv.devhttps://osv.dev/vulnerability/{{ cve.cve_id }} {% bootstrap_icon "link" %}
    {{ reference.er_type }}{{reference.url}} {% bootstrap_icon "link" %}
    From a956456a849ddf18a99e64bdcd21ab298b50d81c Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 5 Mar 2025 02:14:37 -0500 Subject: [PATCH 135/199] add transaction.atomic for package creation --- packages/utils.py | 38 ++++++++++++++++++++------------------ 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/packages/utils.py b/packages/utils.py index 9501ed4f..9a8fa20c 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -18,7 +18,7 @@ import re from django.core.exceptions import MultipleObjectsReturned -from django.db import IntegrityError +from django.db import IntegrityError, transaction from arch.models import PackageArchitecture from packages.models import PackageName, Package, PackageUpdate, PackageCategory, PackageString @@ -61,15 +61,16 @@ def convert_packagestring_to_package(strpackage): else: category = None - package, created = Package.objects.get_or_create( - name=name, - epoch=epoch, - version=version, - release=release, - arch=arch, - packagetype=packagetype, - category=category, - ) + with transaction.atomic(): + package, created = Package.objects.get_or_create( + name=name, + epoch=epoch, + version=version, + release=release, + arch=arch, + packagetype=packagetype, + category=category, + ) return package @@ -172,14 +173,15 @@ def get_or_create_package(name, epoch, version, release, arch, p_type): package_name, c = PackageName.objects.get_or_create(name=name) package_arch, c = PackageArchitecture.objects.get_or_create(name=arch) - package, c = Package.objects.get_or_create( - name=package_name, - arch=package_arch, - epoch=epoch, - version=version, - release=release, - packagetype=p_type, - ) + with transaction.atomic(): + package, c = Package.objects.get_or_create( + name=package_name, + arch=package_arch, + epoch=epoch, + version=version, + release=release, + packagetype=p_type, + ) return package From 7ca8e8d772c7b1ceaba44b751a3fd8cd65d6d789 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 5 Mar 2025 18:08:03 -0500 Subject: [PATCH 136/199] move reference objects under security --- errata/admin.py | 3 +- ...ratumreference_alter_erratum_references.py | 26 +++++ errata/models.py | 30 ++---- errata/serializers.py | 8 +- errata/sources/distros/alma.py | 10 +- errata/templates/errata/erratum_detail.html | 2 +- errata/templates/errata/erratum_table.html | 2 +- .../errata/erratumreference_list.html | 7 -- errata/urls.py | 1 - errata/utils.py | 59 ------------ errata/views.py | 53 +--------- patchman/urls.py | 2 +- security/admin.py | 3 +- .../0005_reference_cve_references.py | 29 ++++++ security/models.py | 13 +++ security/serializers.py | 8 +- security/templates/security/cve_detail.html | 12 +-- .../templates/security/reference_list.html | 7 ++ .../templates/security/reference_table.html | 2 +- .../templates/security/security_landing.html | 1 + security/urls.py | 1 + security/utils.py | 96 +++++++++++++++++-- security/views.py | 59 +++++++++++- 23 files changed, 255 insertions(+), 179 deletions(-) create mode 100644 errata/migrations/0003_delete_erratumreference_alter_erratum_references.py delete mode 100644 errata/templates/errata/erratumreference_list.html create mode 100644 security/migrations/0005_reference_cve_references.py create mode 100644 security/templates/security/reference_list.html rename errata/templates/errata/erratumreference_table.html => security/templates/security/reference_table.html (93%) diff --git a/errata/admin.py b/errata/admin.py index 20e7066f..7bffc909 100644 --- a/errata/admin.py +++ b/errata/admin.py @@ -15,7 +15,7 @@ # along with Patchman. If not, see from django.contrib import admin -from errata.models import Erratum, ErratumReference +from errata.models import Erratum class ErratumAdmin(admin.ModelAdmin): @@ -23,4 +23,3 @@ class ErratumAdmin(admin.ModelAdmin): admin.site.register(Erratum, ErratumAdmin) -admin.site.register(ErratumReference) diff --git a/errata/migrations/0003_delete_erratumreference_alter_erratum_references.py b/errata/migrations/0003_delete_erratumreference_alter_erratum_references.py new file mode 100644 index 00000000..aebfd7ed --- /dev/null +++ b/errata/migrations/0003_delete_erratumreference_alter_erratum_references.py @@ -0,0 +1,26 @@ +# Generated by Django 4.2.19 on 2025-03-05 19:57 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('security', '0005_reference_cve_references'), + ('errata', '0002_alter_erratumreference_unique_together'), + ] + + operations = [ + migrations.RemoveField( + model_name='erratum', + name='references', + ), + migrations.DeleteModel( + name='ErratumReference', + ), + migrations.AddField( + model_name='erratum', + name='references', + field=models.ManyToManyField(blank=True, to='security.Reference'), + ), + ] diff --git a/errata/models.py b/errata/models.py index ecefdc99..8490c5a9 100644 --- a/errata/models.py +++ b/errata/models.py @@ -20,23 +20,11 @@ from packages.models import Package, PackageUpdate from errata.managers import ErratumManager -from security.models import CVE -from security.utils import get_or_create_cve +from security.models import CVE, Reference +from security.utils import get_or_create_cve, get_or_create_reference from patchman.signals import error_message -class ErratumReference(models.Model): - - er_type = models.CharField(max_length=255) - url = models.URLField(max_length=2000) - - class Meta: - unique_together = ['er_type', 'url'] - - def __str__(self): - return self.url - - class Erratum(models.Model): name = models.CharField(max_length=255, unique=True) @@ -47,7 +35,7 @@ class Erratum(models.Model): from operatingsystems.models import OSRelease osreleases = models.ManyToManyField(OSRelease, blank=True) cves = models.ManyToManyField(CVE, blank=True) - references = models.ManyToManyField(ErratumReference, blank=True) + references = models.ManyToManyField(Reference, blank=True) objects = ErratumManager() @@ -90,14 +78,8 @@ def add_cve(self, cve_id): """ self.cves.add(get_or_create_cve(cve_id)) - def add_reference(self, e_type, url): + def add_reference(self, ref_type, url): """ Add a reference to an Erratum object """ - from errata.utils import fixup_erratum_reference - reference = fixup_erratum_reference({'er_type': e_type, 'url': url}) - if reference: - er, created = ErratumReference.objects.get_or_create( - er_type=reference.get('er_type'), - url=reference.get('url'), - ) - self.references.add(er) + reference = get_or_create_reference(ref_type=ref_type, url=url) + self.references.add(reference) diff --git a/errata/serializers.py b/errata/serializers.py index 274326cc..c559a422 100644 --- a/errata/serializers.py +++ b/errata/serializers.py @@ -16,16 +16,10 @@ from rest_framework import serializers -from errata.models import Erratum, ErratumReference +from errata.models import Erratum class ErratumSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Erratum fields = ('id', 'name', 'e_type', 'issue_date', 'synopsis', 'cves', 'releases', 'references') - - -class ErratumReferenceSerializer(serializers.HyperlinkedModelSerializer): - class Meta: - model = ErratumReference - fields = ('id', 'er_type', 'url') diff --git a/errata/sources/distros/alma.py b/errata/sources/distros/alma.py index cc35c3f4..a37f917c 100644 --- a/errata/sources/distros/alma.py +++ b/errata/sources/distros/alma.py @@ -117,14 +117,14 @@ def add_alma_erratum_references(e, advisory): references = advisory.get('references') for reference in references: ref_id = reference.get('id') - er_type = reference.get('type') + ref_type = reference.get('type') er_url = reference.get('href') - if er_type == 'cve': + if ref_type == 'cve': e.add_cve(ref_id) continue - if er_type == 'self': - er_type = ref_id.split('-')[0].upper() - e.add_reference(er_type, er_url) + if ref_type == 'self': + ref_type = ref_id.split('-')[0].upper() + e.add_reference(ref_type, er_url) def add_alma_erratum_packages(e, advisory): diff --git a/errata/templates/errata/erratum_detail.html b/errata/templates/errata/erratum_detail.html index dd640a47..71e5d2e1 100644 --- a/errata/templates/errata/erratum_detail.html +++ b/errata/templates/errata/erratum_detail.html @@ -46,7 +46,7 @@ {% for reference in erratum.references.all %} - + {% endfor %} diff --git a/errata/templates/errata/erratum_table.html b/errata/templates/errata/erratum_table.html index e52fe8a3..2ea388b9 100644 --- a/errata/templates/errata/erratum_table.html +++ b/errata/templates/errata/erratum_table.html @@ -22,7 +22,7 @@ - + {% endfor %} diff --git a/errata/templates/errata/erratumreference_list.html b/errata/templates/errata/erratumreference_list.html deleted file mode 100644 index 36ac6357..00000000 --- a/errata/templates/errata/erratumreference_list.html +++ /dev/null @@ -1,7 +0,0 @@ -{% extends "objectlist.html" %} - -{% block page_title %}Erratum References{% endblock %} - -{% block breadcrumbs %} {{ block.super }}
  • Errata
  • Erratum References
  • {% endblock %} - -{% block content_title %} Erratum References {% endblock %} diff --git a/errata/urls.py b/errata/urls.py index 33624459..6ec1cac0 100644 --- a/errata/urls.py +++ b/errata/urls.py @@ -23,5 +23,4 @@ urlpatterns = [ path('', views.erratum_list, name='erratum_list'), path('errata//', views.erratum_detail, name='erratum_detail'), - path('references/', views.erratumreference_list, name='erratumreference_list'), ] diff --git a/errata/utils.py b/errata/utils.py index eab3f79e..a1859b5f 100644 --- a/errata/utils.py +++ b/errata/utils.py @@ -58,65 +58,6 @@ def get_or_create_erratum(name, e_type, issue_date, synopsis): return e, created -def fixup_erratum_reference(eref): - """ Fix up an ErratumReference object to normalize the URL and type - """ - url = urlparse(eref.get('url')) - er_type = eref.get('er_type') - if 'lists' in url.hostname or 'lists' in url.path: - er_type = 'Mailing List' - if er_type == 'bugzilla' or 'bug' in url.hostname or 'bugs' in url.path: - er_type = 'Bug Tracker' - if ('ubuntu.com' in url.hostname and 'usn/' in url.path) or url.hostname == 'usn.ubuntu.com': - netloc = url.netloc.replace('usn.', '').replace('www.', '') - path = url.path.replace('usn/', 'security/notices/').replace('usn', 'USN').rstrip('/') - usn_id = path.split('/')[-1] - if 'USN' not in usn_id: - path = '/'.join(path.split('/')[:-1]) + '/USN-' + usn_id - url = url._replace(netloc=netloc, path=path) - if url.hostname == 'ubuntu.com' and url.path.startswith('/security/notices/USN'): - er_type = 'USN' - if 'launchpad.net' in url.hostname: - er_type = 'Bug Tracker' - netloc = url.netloc.replace('bugs.', '') - bug = url.path.split('/')[-1] - path = f'/bugs/{bug}' - url = url._replace(netloc=netloc, path=path) - if url.hostname in ['bugzilla.redhat.com', 'bugzilla.opensuse.org', 'bugs.suse.com'] and \ - url.path == '/show_bug.cgi': - bug = url.query.split('=')[1] - path = f'/{bug}' - url = url._replace(path=path, query='') - if url.hostname == 'rhn.redhat.com': - netloc = url.netloc.replace('rhn', 'access') - path = url.path.replace('.html', '') - url = url._replace(netloc=netloc, path=path) - if url.hostname == 'access.redhat.com': - if 'l1d-cache-eviction-and-vector-register-sampling' in url.path or \ - 'security/vulnerabilities/speculativeexecution' in url.path or \ - 'security/vulnerabilities/stackguard' in url.path: - er_type = 'Link' - elif 'security/cve' in url.path: - return - else: - old_ref = url.path.split('/')[-1] - refs = old_ref.split('-') - if ':' not in url.path: - try: - new_ref = f'{refs[0]}-{refs[1]}:{refs[2]}' - path = url.path.replace(old_ref, new_ref) - url = url._replace(path=path) - except IndexError: - pass - er_type = refs[0].upper() - final_url = url.geturl() - if final_url in ['https://launchpad.net/bugs/', 'https://launchpad.net/bugs/XXXXXX']: - return - eref['er_type'] = er_type - eref['url'] = final_url - return eref - - def mark_errata_security_updates(): """ For each set of erratum packages, modify any PackageUpdate that should be marked as a security update. diff --git a/errata/views.py b/errata/views.py index 7e76d832..0564e1dd 100644 --- a/errata/views.py +++ b/errata/views.py @@ -22,8 +22,8 @@ from rest_framework import viewsets from operatingsystems.models import OSRelease -from errata.models import Erratum, ErratumReference -from errata.serializers import ErratumSerializer, ErratumReferenceSerializer +from errata.models import Erratum +from errata.serializers import ErratumSerializer from util.filterspecs import Filter, FilterBar @@ -80,48 +80,6 @@ def erratum_list(request): 'terms': terms}) -@login_required -def erratumreference_list(request): - erefs = ErratumReference.objects.select_related().order_by('er_type') - - if 'er_type' in request.GET: - erefs = erefs.filter(er_type=request.GET['er_type']).distinct() - - if 'erratum_id' in request.GET: - erefs = erefs.filter(erratum__id=request.GET['erratum_id']) - - if 'search' in request.GET: - terms = request.GET['search'].lower() - query = Q() - for term in terms.split(' '): - q = Q(url__icontains=term) - query = query & q - erefs = erefs.filter(query) - else: - terms = '' - - page_no = request.GET.get('page') - paginator = Paginator(erefs, 50) - - try: - page = paginator.page(page_no) - except PageNotAnInteger: - page = paginator.page(1) - except EmptyPage: - page = paginator.page(paginator.num_pages) - - filter_list = [] - filter_list.append(Filter(request, 'Reference Type', 'er_type', - ErratumReference.objects.values_list('er_type', flat=True).distinct())) - filter_bar = FilterBar(request, filter_list) - - return render(request, - 'errata/erratumreference_list.html', - {'page': page, - 'filter_bar': filter_bar, - 'terms': terms}) - - @login_required def erratum_detail(request, erratum_name): erratum = get_object_or_404(Erratum, name=erratum_name) @@ -135,10 +93,3 @@ class ErratumViewSet(viewsets.ModelViewSet): """ queryset = Erratum.objects.all() serializer_class = ErratumSerializer - - -class ErratumReferenceViewSet(viewsets.ModelViewSet): - """ API endpoint that allows erratum references to be viewed or edited. - """ - queryset = ErratumReference.objects.all() - serializer_class = ErratumReferenceSerializer diff --git a/patchman/urls.py b/patchman/urls.py index e8576c7a..337d6b63 100644 --- a/patchman/urls.py +++ b/patchman/urls.py @@ -44,8 +44,8 @@ router.register(r'package', package_views.PackageViewSet) router.register(r'package-update', package_views.PackageUpdateViewSet) router.register(r'cve', security_views.CVEViewSet) +router.register(r'reference', security_views.ReferenceViewSet), router.register(r'erratum', errata_views.ErratumViewSet) -router.register(r'erratum-reference', errata_views.ErratumReferenceViewSet) router.register(r'repo', repo_views.RepositoryViewSet) router.register(r'mirror', repo_views.MirrorViewSet) router.register(r'mirror-package', repo_views.MirrorPackageViewSet) diff --git a/security/admin.py b/security/admin.py index 9c90a8fe..196a9468 100644 --- a/security/admin.py +++ b/security/admin.py @@ -15,9 +15,10 @@ # along with Patchman. If not, see from django.contrib import admin -from security.models import CWE, CVSS, CVE +from security.models import CWE, CVSS, CVE, Reference admin.site.register(CWE) admin.site.register(CVSS) admin.site.register(CVE) +admin.site.register(Reference) diff --git a/security/migrations/0005_reference_cve_references.py b/security/migrations/0005_reference_cve_references.py new file mode 100644 index 00000000..97251add --- /dev/null +++ b/security/migrations/0005_reference_cve_references.py @@ -0,0 +1,29 @@ +# Generated by Django 4.2.19 on 2025-03-05 19:57 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('security', '0004_alter_cve_options'), + ] + + operations = [ + migrations.CreateModel( + name='Reference', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('ref_type', models.CharField(max_length=255)), + ('url', models.URLField(max_length=2000)), + ], + options={ + 'unique_together': {('ref_type', 'url')}, + }, + ), + migrations.AddField( + model_name='cve', + name='references', + field=models.ManyToManyField(blank=True, to='security.reference'), + ), + ] diff --git a/security/models.py b/security/models.py index 94ecf507..afaac1a0 100644 --- a/security/models.py +++ b/security/models.py @@ -24,6 +24,18 @@ from util import get_url, download_url, tz_aware_datetime, error_message +class Reference(models.Model): + + ref_type = models.CharField(max_length=255) + url = models.URLField(max_length=2000) + + class Meta: + unique_together = ['ref_type', 'url'] + + def __str__(self): + return self.url + + class CWE(models.Model): cwe_id = models.CharField(max_length=255, unique=True) @@ -83,6 +95,7 @@ class CVE(models.Model): updated_date = models.DateTimeField(blank=True, null=True) cwes = models.ManyToManyField(CWE, blank=True) cvss_scores = models.ManyToManyField(CVSS, blank=True) + references = models.ManyToManyField(Reference, blank=True) objects = CVEManager() diff --git a/security/serializers.py b/security/serializers.py index c66a72ad..31730c53 100644 --- a/security/serializers.py +++ b/security/serializers.py @@ -16,7 +16,7 @@ from rest_framework import serializers -from security.models import CVE, CWE +from security.models import CVE, CWE, Reference class CWESerializer(serializers.HyperlinkedModelSerializer): @@ -30,3 +30,9 @@ class Meta: model = CVE fields = ('cve_id', 'title', 'description', 'cvss_score', 'cwe', 'registered_date', 'published_date', 'updated_date') + + +class ReferenceSerializer(serializers.HyperlinkedModelSerializer): + class Meta: + model = Reference + fields = ('id', 'ref_type', 'url') diff --git a/security/templates/security/cve_detail.html b/security/templates/security/cve_detail.html index 0dda0ccf..bdf77377 100644 --- a/security/templates/security/cve_detail.html +++ b/security/templates/security/cve_detail.html @@ -63,13 +63,11 @@ - {% for erratum in cve.erratum_set.all %} - {% for reference in erratum.references.all %} - - - - - {% endfor %} + {% for reference in references %} + + + + {% endfor %}
    {{ reference.er_type }}{{ reference.ref_type }} {{reference.url}}
    {{ erratum.packages.count }} {{ erratum.osreleases.count }} {{ erratum.cves.count }}{{ erratum.references.count }}{{ erratum.references.count }}
    NISThttps://nvd.nist.gov/vuln/detail/{{ cve.cve_id }} {% bootstrap_icon "link" %}
    MITREhttps://www.cve.org/CVERecord?id={{ cve.cve_id }} {% bootstrap_icon "link" %}
    osv.devhttps://osv.dev/vulnerability/{{ cve.cve_id }} {% bootstrap_icon "link" %}
    {{ reference.er_type }}{{reference.url}} {% bootstrap_icon "link" %}
    {{ reference.ref_type }}{{reference.url}} {% bootstrap_icon "link" %}
    diff --git a/security/templates/security/reference_list.html b/security/templates/security/reference_list.html new file mode 100644 index 00000000..2ae1dab3 --- /dev/null +++ b/security/templates/security/reference_list.html @@ -0,0 +1,7 @@ +{% extends "objectlist.html" %} + +{% block page_title %}References{% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Security
  • References
  • {% endblock %} + +{% block content_title %} References {% endblock %} diff --git a/errata/templates/errata/erratumreference_table.html b/security/templates/security/reference_table.html similarity index 93% rename from errata/templates/errata/erratumreference_table.html rename to security/templates/security/reference_table.html index 5a96ca5c..a28ff719 100644 --- a/errata/templates/errata/erratumreference_table.html +++ b/security/templates/security/reference_table.html @@ -10,7 +10,7 @@ {% for eref in object_list %} - {{ eref.er_type }} + {{ eref.ref_type }} {{ eref.url }} {{ eref.erratum_set.count }} diff --git a/security/templates/security/security_landing.html b/security/templates/security/security_landing.html index ad0f175a..4a79c139 100644 --- a/security/templates/security/security_landing.html +++ b/security/templates/security/security_landing.html @@ -13,6 +13,7 @@ CVEs CWEs Security Errata + Security References
    diff --git a/security/urls.py b/security/urls.py index b3bf3506..c87b9a67 100644 --- a/security/urls.py +++ b/security/urls.py @@ -26,4 +26,5 @@ path('cves/', views.cve_detail, name='cve_detail'), path('cwes', views.cwe_list, name='cwe_list'), path('cwes/', views.cwe_detail, name='cwe_detail'), + path('references/', views.reference_list, name='reference_list'), ] diff --git a/security/utils.py b/security/utils.py index c224f5d3..d106e9f2 100644 --- a/security/utils.py +++ b/security/utils.py @@ -14,14 +14,16 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -from security.models import CVE, CWE +from urllib.parse import urlparse + +from security.models import CVE, CWE, Reference def get_cve_reference(cve_id): """ Given a CVE ID, return a dictionary with the URL to the CVE record. """ url = f'https://www.cve.org/CVERecord?id={cve_id}' - return {'er_type': 'CVE', 'url': url} + return {'ref_type': 'CVE', 'url': url} def get_or_create_cve(cve_id): @@ -31,18 +33,98 @@ def get_or_create_cve(cve_id): return cve -def update_cves(): +def update_cves(cve_id=None, download_nist_data=False): """ Download the latest CVE data from the CVE API. e.g. https://cveawg.mitre.org/api/cve/CVE-2024-1234 """ - for cve in CVE.objects.all(): - cve.download_cve_data() + if cve_id: + cve = CVE.objects.get(cve_id=cve_id) + cve.download_cve_data(download_nist_data, sleep_secs=0) + else: + for cve in CVE.objects.all(): + cve.download_cve_data(download_nist_data) -def update_cwes(): +def update_cwes(cve_id=None): """ Download the latest CWEs from the CWE API. e.g. https://cwe-api.mitre.org/api/v1/cwe/74,79 https://cwe-api.mitre.org/api/v1/cwe/weakness/79 """ - for cwe in CWE.objects.all(): + if cve_id: + cve = CVE.objects.get(cve_id=cve_id) + cwes = cve.cwes.all() + else: + cwes = CWE.objects.all() + for cwe in cwes: cwe.download_cwe_data() + + +def fixup_reference(ref): + """ Fix up a Security Reference object to normalize the URL and type + """ + url = urlparse(ref.get('url')) + ref_type = ref.get('ref_type') + if 'lists' in url.hostname or 'lists' in url.path: + ref_type = 'Mailing List' + if ref_type == 'bugzilla' or 'bug' in url.hostname or 'bugs' in url.path: + ref_type = 'Bug Tracker' + if ('ubuntu.com' in url.hostname and 'usn/' in url.path) or url.hostname == 'usn.ubuntu.com': + netloc = url.netloc.replace('usn.', '').replace('www.', '') + path = url.path.replace('usn/', 'security/notices/').replace('usn', 'USN').rstrip('/') + usn_id = path.split('/')[-1] + if 'USN' not in usn_id: + path = '/'.join(path.split('/')[:-1]) + '/USN-' + usn_id + url = url._replace(netloc=netloc, path=path) + if url.hostname == 'ubuntu.com' and url.path.startswith('/security/notices/USN'): + ref_type = 'USN' + if 'launchpad.net' in url.hostname: + ref_type = 'Bug Tracker' + netloc = url.netloc.replace('bugs.', '') + bug = url.path.split('/')[-1] + path = f'/bugs/{bug}' + url = url._replace(netloc=netloc, path=path) + if url.hostname in ['bugzilla.redhat.com', 'bugzilla.opensuse.org', 'bugs.suse.com'] and \ + url.path == '/show_bug.cgi': + bug = url.query.split('=')[1] + path = f'/{bug}' + url = url._replace(path=path, query='') + if url.hostname == 'rhn.redhat.com': + netloc = url.netloc.replace('rhn', 'access') + path = url.path.replace('.html', '') + url = url._replace(netloc=netloc, path=path) + if url.hostname == 'access.redhat.com': + if 'l1d-cache-eviction-and-vector-register-sampling' in url.path or \ + 'security/vulnerabilities/speculativeexecution' in url.path or \ + 'security/vulnerabilities/stackguard' in url.path: + ref_type = 'Link' + elif 'security/cve' in url.path: + return + else: + old_ref = url.path.split('/')[-1] + refs = old_ref.split('-') + if ':' not in url.path: + try: + new_ref = f'{refs[0]}-{refs[1]}:{refs[2]}' + path = url.path.replace(old_ref, new_ref) + url = url._replace(path=path) + except IndexError: + pass + ref_type = refs[0].upper() + final_url = url.geturl() + if final_url in ['https://launchpad.net/bugs/', 'https://launchpad.net/bugs/XXXXXX']: + return + ref['ref_type'] = ref_type + ref['url'] = final_url + return ref + + +def get_or_create_reference(ref_type, url): + """ Get or create a Reference object. + """ + reference = fixup_reference({'ref_type': ref_type, 'url': url}) + if reference: + ref, created = Reference.objects.get_or_create( + ref_type=reference.get('ref_type'), + url=reference.get('url'), + ) + return ref diff --git a/security/views.py b/security/views.py index eec2bfa9..8a885e76 100644 --- a/security/views.py +++ b/security/views.py @@ -23,8 +23,9 @@ from packages.models import Package from operatingsystems.models import OSRelease -from security.models import CVE, CWE -from security.serializers import CVESerializer, CWESerializer +from security.models import CVE, CWE, Reference +from security.serializers import CVESerializer, CWESerializer, ReferenceSerializer +from util.filterspecs import Filter, FilterBar @login_required @@ -114,11 +115,56 @@ def cve_detail(request, cve_id): cve = get_object_or_404(CVE, cve_id=cve_id) packages = Package.objects.filter(erratum__in=cve.erratum_set.all()).distinct() osreleases = OSRelease.objects.filter(erratum__in=cve.erratum_set.all()).distinct() + references = Reference.objects.filter(Q(erratum__in=cve.erratum_set.all()) | Q(cve=cve)).distinct() return render(request, 'security/cve_detail.html', {'cve': cve, 'packages': packages, - 'osreleases': osreleases}) + 'osreleases': osreleases, + 'references': references, + }) + + +@login_required +def reference_list(request): + refs = Reference.objects.select_related().order_by('ref_type') + + if 'ref_type' in request.GET: + refs = refs.filter(ref_type=request.GET['ref_type']).distinct() + + if 'erratum_id' in request.GET: + refs = refs.filter(erratum__id=request.GET['erratum_id']) + + if 'search' in request.GET: + terms = request.GET['search'].lower() + query = Q() + for term in terms.split(' '): + q = Q(url__icontains=term) + query = query & q + refs = refs.filter(query) + else: + terms = '' + + page_no = request.GET.get('page') + paginator = Paginator(refs, 50) + + try: + page = paginator.page(page_no) + except PageNotAnInteger: + page = paginator.page(1) + except EmptyPage: + page = paginator.page(paginator.num_pages) + + filter_list = [] + filter_list.append(Filter(request, 'Reference Type', 'ref_type', + Reference.objects.values_list('ref_type', flat=True).distinct())) + filter_bar = FilterBar(request, filter_list) + + return render(request, + 'security/reference_list.html', + {'page': page, + 'filter_bar': filter_bar, + 'terms': terms}) @login_required @@ -138,3 +184,10 @@ class CVEViewSet(viewsets.ModelViewSet): """ queryset = CVE.objects.all() serializer_class = CVESerializer + + +class ReferenceViewSet(viewsets.ModelViewSet): + """ API endpoint that allows security references to be viewed or edited. + """ + queryset = Reference.objects.all() + serializer_class = ReferenceSerializer From a1a639e0e07c177e2fb6d21f9f5b8122ac3a29d2 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 5 Mar 2025 18:08:59 -0500 Subject: [PATCH 137/199] default to 25 ProcessPoolWorkers --- errata/sources/distros/alma.py | 2 +- errata/sources/distros/arch.py | 2 +- errata/sources/distros/debian.py | 2 +- errata/sources/distros/rocky.py | 2 +- errata/sources/distros/ubuntu.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/errata/sources/distros/alma.py b/errata/sources/distros/alma.py index a37f917c..24ff14a9 100644 --- a/errata/sources/distros/alma.py +++ b/errata/sources/distros/alma.py @@ -77,7 +77,7 @@ def process_alma_errata_concurrently(release, advisories): elen = len(advisories) pbar_start.send(sender=None, ptext=f'Processing {elen} Alma {release} Errata', plen=elen) i = 0 - with concurrent.futures.ProcessPoolExecutor(max_workers=10) as executor: + with concurrent.futures.ProcessPoolExecutor(max_workers=25) as executor: futures = [executor.submit(process_alma_erratum, release, advisory) for advisory in advisories] for future in concurrent.futures.as_completed(futures): i += 1 diff --git a/errata/sources/distros/arch.py b/errata/sources/distros/arch.py index a03f3860..db9c9013 100644 --- a/errata/sources/distros/arch.py +++ b/errata/sources/distros/arch.py @@ -69,7 +69,7 @@ def parse_arch_errata_concurrently(advisories): elen = len(advisories) pbar_start.send(sender=None, ptext=f'Processing {elen} Arch Advisories', plen=elen) i = 0 - with concurrent.futures.ProcessPoolExecutor(max_workers=3) as executor: + with concurrent.futures.ProcessPoolExecutor(max_workers=25) as executor: futures = [executor.submit(process_arch_erratum, advisory, osrelease) for advisory in advisories] for future in concurrent.futures.as_completed(futures): i += 1 diff --git a/errata/sources/distros/debian.py b/errata/sources/distros/debian.py index 78ae9ad4..00c7e618 100644 --- a/errata/sources/distros/debian.py +++ b/errata/sources/distros/debian.py @@ -142,7 +142,7 @@ def create_debian_errata_concurrently(errata, accepted_codenames): elen = len(errata) pbar_start.send(sender=None, ptext=f'Processing {elen} Debian Errata', plen=elen) i = 0 - with concurrent.futures.ProcessPoolExecutor(max_workers=200) as executor: + with concurrent.futures.ProcessPoolExecutor(max_workers=25) as executor: futures = [executor.submit(process_debian_erratum, erratum, accepted_codenames) for erratum in errata] for future in concurrent.futures.as_completed(futures): i += 1 diff --git a/errata/sources/distros/rocky.py b/errata/sources/distros/rocky.py index b5714530..4a79e348 100644 --- a/errata/sources/distros/rocky.py +++ b/errata/sources/distros/rocky.py @@ -161,7 +161,7 @@ def process_rocky_errata_concurrently(advisories): elen = len(advisories) pbar_start.send(sender=None, ptext=f'Processing {elen} Rocky Errata', plen=elen) i = 0 - with concurrent.futures.ProcessPoolExecutor(max_workers=100) as executor: + with concurrent.futures.ProcessPoolExecutor(max_workers=25) as executor: futures = [executor.submit(process_rocky_erratum, advisory) for advisory in advisories] for future in concurrent.futures.as_completed(futures): i += 1 diff --git a/errata/sources/distros/ubuntu.py b/errata/sources/distros/ubuntu.py index ed95c2b6..1ff626cf 100644 --- a/errata/sources/distros/ubuntu.py +++ b/errata/sources/distros/ubuntu.py @@ -90,7 +90,7 @@ def parse_usn_data_concurrently(advisories, accepted_releases): elen = len(advisories) pbar_start.send(sender=None, ptext=f'Processing {elen} Ubuntu Errata', plen=elen) i = 0 - with concurrent.futures.ProcessPoolExecutor(max_workers=20) as executor: + with concurrent.futures.ProcessPoolExecutor(max_workers=25) as executor: futures = [executor.submit(process_usn, usn_id, advisory, accepted_releases) for usn_id, advisory in advisories.items()] for future in concurrent.futures.as_completed(futures): From 89949adea43bc15e50e10ffd2f86894c9f5583d9 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 5 Mar 2025 18:09:48 -0500 Subject: [PATCH 138/199] add incorrect erratum type to error output --- errata/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/errata/tasks.py b/errata/tasks.py index 9e1d1790..fd1f93fd 100644 --- a/errata/tasks.py +++ b/errata/tasks.py @@ -44,7 +44,7 @@ def update_errata(erratum_type=None): erratum_type_defaults = ['yum', 'rocky', 'alma', 'arch', 'ubuntu', 'debian'] if erratum_type: if erratum_type not in erratum_type_defaults: - error_message.send(sender=None, text=f'Erratum type must be one of {erratum_type_defaults}') + error_message.send(sender=None, text=f'Erratum type must be one of {erratum_type_defaults} - {erratum_type}') else: errata_os_updates = erratum_type else: From 0c99dd342711cfd6944e315e45808fbac20d8e58 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 5 Mar 2025 18:10:12 -0500 Subject: [PATCH 139/199] fix incorrect variable --- .../operatingsystems/operatingsystemrelease_table.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html b/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html index b08d05ff..6a7eae13 100644 --- a/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html +++ b/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html @@ -15,7 +15,7 @@ {% for osrelease in object_list %} {{ osrelease.name }} - {% if osrelease.codename %}{{ osrelease.cpe_name }}{% endif %} + {% if osrelease.cpe_name %}{{ osrelease.cpe_name }}{% endif %} {% if osrelease.codename %}{{ osrelease.codename }}{% endif %} {{ osrelease.repos.count }} {{ osrelease.osvariant_set.count }} From 6701f2e4b2bae1b3717d487283e375e12f3226cb Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 5 Mar 2025 18:14:24 -0500 Subject: [PATCH 140/199] update first.org link --- security/templates/security/cve_detail.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/security/templates/security/cve_detail.html b/security/templates/security/cve_detail.html index bdf77377..63f71385 100644 --- a/security/templates/security/cve_detail.html +++ b/security/templates/security/cve_detail.html @@ -30,7 +30,7 @@ CVSS Scores {% for score in cve.cvss_scores.all %} - {{ score.score }} - {{ score.severity }} (CVSS {{ score.version }})
    + {{ score.score }} - {{ score.severity }} (CVSS {{ score.version }}) {% bootstrap_icon "link" %}
    {% endfor %} From 6be5069a9a458aff31856627adca74012faf591b Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 5 Mar 2025 18:14:03 -0500 Subject: [PATCH 141/199] add NIST CVE data download for CVSS scores and references --- sbin/patchman | 10 ++++++-- security/models.py | 63 +++++++++++++++++++++++++++++++++++++++------- 2 files changed, 62 insertions(+), 11 deletions(-) diff --git a/sbin/patchman b/sbin/patchman index 67278fcb..897a58b5 100755 --- a/sbin/patchman +++ b/sbin/patchman @@ -442,6 +442,12 @@ def collect_args(): parser.add_argument( '-v', '--update-cves', action='store_true', help='Update CVEs from https://cve.org') + parser.add_argument( + '--cve', help="Only update the specified CVE (e.g. CVE-2024-1234)") + parser.add_argument( + '--download-nist-data', '-nd', action='store_true', + help='Download NIST CVE data in addition to MITRE data (rate-limited to 1 API call every 6 seconds)' + ) return parser @@ -521,8 +527,8 @@ def process_args(args): mark_errata_security_updates() showhelp = False if args.update_cves: - update_cves() - update_cwes() + update_cves(args.cve, args.download_nist_data) + update_cwes(args.cve) showhelp = False return showhelp diff --git a/security/models.py b/security/models.py index afaac1a0..f265a5dd 100644 --- a/security/models.py +++ b/security/models.py @@ -16,6 +16,7 @@ import json import re +from time import sleep from django.db import models from django.urls import reverse @@ -54,8 +55,8 @@ def int_id(self): def download_cwe_data(self): int_id = self.int_id - cwe_url = f'https://cwe-api.mitre.org/api/v1/cwe/{int_id}' - res = get_url(cwe_url) + mitre_cwe_url = f'https://cwe-api.mitre.org/api/v1/cwe/{int_id}' + res = get_url(mitre_cwe_url) data = download_url(res, f'Downloading {self.cwe_id} data') cwe_json = json.loads(data) if cwe_json == 'at least one CWE not found': @@ -108,17 +109,61 @@ def __str__(self): def get_absolute_url(self): return reverse('security:cve_detail', args=[self.cve_id]) - def download_cve_data(self): - cve_url = f'https://cveawg.mitre.org/api/cve/{self.cve_id}' - res = get_url(cve_url) + def download_cve_data(self, download_nist_data=False, sleep_secs=6): + self.download_mitre_cve_data() + if download_nist_data: + self.download_nist_cve_data() + sleep(sleep_secs) # rate limited, see https://nvd.nist.gov/developers/start-here + + def download_mitre_cve_data(self): + mitre_cve_url = f'https://cveawg.mitre.org/api/cve/{self.cve_id}' + res = get_url(mitre_cve_url) if res.status_code == 404: - error_message.send(sender=None, text=f'404 - Skipping {self.cve_id}') + error_message.send(sender=None, text=f'404 - Skipping {self.cve_id} - {mitre_cve_url}') return - data = download_url(res, f'Downloading {self.cve_id} data') + data = download_url(res, f'Downloading {self.cve_id} MITRE data') cve_json = json.loads(data) - self.parse_cve_data(cve_json) + self.parse_mitre_cve_data(cve_json) - def parse_cve_data(self, cve_json): + def download_nist_cve_data(self): + nist_cve_url = f'https://services.nvd.nist.gov/rest/json/cves/2.0?cveId={self.cve_id}' + res = get_url(nist_cve_url) + data = download_url(res, f'Downloading {self.cve_id} NIST data') + if res.status_code == 404: + error_message.send(sender=None, text=f'404 - Skipping {self.cve_id} - {nist_cve_url}') + cve_json = json.loads(data) + self.parse_nist_cve_data(cve_json) + + def parse_nist_cve_data(self, cve_json): + from security.utils import get_or_create_reference + vulnerabilites = cve_json.get('vulnerabilities') + for vulnerability in vulnerabilites: + cve = vulnerability.get('cve') + cve_id = cve.get('id') + if cve_id != self.cve_id: + error_message.send(sender=None, text=f'CVE ID mismatch - {self.cve_id} - {cve_id}') + return + metrics = cve.get('metrics') + for metric, score_data in metrics.items(): + if metric.startswith('cvss'): + for scores in score_data: + for key, value in scores.items(): + if key.startswith('cvssData'): + cvss_score, created = CVSS.objects.get_or_create( + score=value.get('baseScore'), + severity=value.get('baseSeverity'), + version=value.get('version'), + vector_string=value.get('vectorString'), + ) + self.cvss_scores.add(cvss_score) + references = cve.get('references') + for reference in references: + ref_type = 'Link' + url = reference.get('url') + ref = get_or_create_reference(ref_type=ref_type, url=url) + self.references.add(ref) + + def parse_mitre_cve_data(self, cve_json): cve_metadata = cve_json.get('cveMetadata') reserved_date = cve_metadata.get('dateReserved') if reserved_date: From 3c662d21bd32af5ece2e8030c8cc0ac2edced125 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 5 Mar 2025 20:04:35 -0500 Subject: [PATCH 142/199] flake8 --- errata/tasks.py | 2 +- errata/utils.py | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/errata/tasks.py b/errata/tasks.py index fd1f93fd..c0a644b2 100644 --- a/errata/tasks.py +++ b/errata/tasks.py @@ -44,7 +44,7 @@ def update_errata(erratum_type=None): erratum_type_defaults = ['yum', 'rocky', 'alma', 'arch', 'ubuntu', 'debian'] if erratum_type: if erratum_type not in erratum_type_defaults: - error_message.send(sender=None, text=f'Erratum type must be one of {erratum_type_defaults} - {erratum_type}') + error_message.send(sender=None, text=f'Erratum type `{erratum_type}` not in {erratum_type_defaults}') else: errata_os_updates = erratum_type else: diff --git a/errata/utils.py b/errata/utils.py index a1859b5f..4c915535 100644 --- a/errata/utils.py +++ b/errata/utils.py @@ -14,8 +14,6 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -from urllib.parse import urlparse - from util import tz_aware_datetime from errata.models import Erratum from patchman.signals import pbar_start, pbar_update, warning_message From 58af5fccc556fcbb4fdd3a627d6a77d329126096 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 5 Mar 2025 20:15:05 -0500 Subject: [PATCH 143/199] prefer fetch over download --- .gitignore | 1 + errata/sources/distros/alma.py | 10 +++++----- errata/sources/distros/arch.py | 10 +++++----- errata/sources/distros/centos.py | 18 +++++++++--------- errata/sources/distros/debian.py | 26 +++++++++++++------------- errata/sources/distros/rocky.py | 30 +++++++++++++++--------------- errata/sources/distros/ubuntu.py | 20 ++++++++++---------- etc/patchman/local_settings.py | 4 ++-- repos/repo_types/arch.py | 2 +- repos/repo_types/deb.py | 4 ++-- repos/repo_types/gentoo.py | 6 +++--- repos/repo_types/rpm.py | 2 +- repos/repo_types/yast.py | 2 +- repos/repo_types/yum.py | 6 +++--- repos/utils.py | 8 ++++---- sbin/patchman | 6 +++--- security/models.py | 24 ++++++++++++------------ security/tasks.py | 4 ++-- security/utils.py | 12 ++++++------ util/__init__.py | 4 ++-- 20 files changed, 100 insertions(+), 99 deletions(-) diff --git a/.gitignore b/.gitignore index 1699ba8e..3a1397f5 100644 --- a/.gitignore +++ b/.gitignore @@ -15,3 +15,4 @@ run pyvenv.cfg .vscode .venv +*.xml diff --git a/errata/sources/distros/alma.py b/errata/sources/distros/alma.py index 24ff14a9..eaed150c 100644 --- a/errata/sources/distros/alma.py +++ b/errata/sources/distros/alma.py @@ -20,7 +20,7 @@ from operatingsystems.utils import get_or_create_osrelease from packages.models import Package from packages.utils import get_or_create_package, parse_package_string -from util import get_url, download_url, get_setting_of_type +from util import get_url, fetch_content, get_setting_of_type from patchman.signals import pbar_start, pbar_update @@ -37,17 +37,17 @@ def update_alma_errata(concurrent_processing=True): default=default_alma_releases, ) for release in alma_releases: - advisories = download_alma_advisories(release) + advisories = fetch_alma_advisories(release) process_alma_errata(release, advisories, concurrent_processing) -def download_alma_advisories(release): - """ Download Alma Linux advisories +def fetch_alma_advisories(release): + """ Fetch Alma Linux advisories """ alma_errata_url = f'https://errata.almalinux.org/{release}/errata.full.json' headers = {'Accept': 'application/json', 'Cache-Control': 'no-cache, no-tranform'} res = get_url(alma_errata_url, headers=headers) - data = download_url(res, f'Downloading Alma {release} Errata') + data = fetch_content(res, f'Fetching Alma {release} Errata') advisories = json.loads(data).get('data') return advisories diff --git a/errata/sources/distros/arch.py b/errata/sources/distros/arch.py index db9c9013..8ef7440a 100644 --- a/errata/sources/distros/arch.py +++ b/errata/sources/distros/arch.py @@ -21,7 +21,7 @@ from patchman.signals import error_message, pbar_start, pbar_update from packages.models import Package from packages.utils import find_evr, get_matching_packages -from util import get_url, download_url +from util import get_url, fetch_content def update_arch_errata(concurrent_processing=False): @@ -29,16 +29,16 @@ def update_arch_errata(concurrent_processing=False): https://security.archlinux.org/advisories.json """ add_arch_linux_osrelease() - advisories = download_arch_errata() + advisories = fetch_arch_errata() parse_arch_errata(advisories, concurrent_processing) -def download_arch_errata(): - """ Download Arch Linux Errata Advisories +def fetch_arch_errata(): + """ Fetch Arch Linux Errata Advisories https://security.archlinux.org/advisories.json """ res = get_url('https://security.archlinux.org/advisories.json') - advisories = download_url(res, 'Downloading Arch Advisories') + advisories = fetch_content(res, 'Fetching Arch Advisories') return json.loads(advisories) diff --git a/errata/sources/distros/centos.py b/errata/sources/distros/centos.py index 33905337..9b0017a9 100644 --- a/errata/sources/distros/centos.py +++ b/errata/sources/distros/centos.py @@ -21,15 +21,15 @@ from packages.models import Package from packages.utils import parse_package_string, get_or_create_package from patchman.signals import error_message, pbar_start, pbar_update -from util import bunzip2, get_url, download_url, get_sha1, get_setting_of_type +from util import bunzip2, get_url, fetch_content, get_sha1, get_setting_of_type def update_centos_errata(): """ Update CentOS errata from https://cefs.steve-meier.de/ """ - data = download_centos_errata_checksum() + data = fetch_centos_errata_checksum() expected_checksum = parse_centos_errata_checksum(data) - data = download_centos_errata() + data = fetch_centos_errata() actual_checksum = get_sha1(data) if actual_checksum != expected_checksum: e = 'CEFS checksum mismatch, skipping CentOS errata parsing\n' @@ -40,18 +40,18 @@ def update_centos_errata(): parse_centos_errata(bunzip2(data)) -def download_centos_errata_checksum(): - """ Download CentOS errata checksum from https://cefs.steve-meier.de/ +def fetch_centos_errata_checksum(): + """ Fetch CentOS errata checksum from https://cefs.steve-meier.de/ """ res = get_url('https://cefs.steve-meier.de/errata.latest.sha1') - return download_url(res, 'Downloading CentOS Errata Checksum:') + return fetch_content(res, 'Fetching CentOS Errata Checksum') -def download_centos_errata(): - """ Download CentOS errata from https://cefs.steve-meier.de/ +def fetch_centos_errata(): + """ Fetch CentOS errata from https://cefs.steve-meier.de/ """ res = get_url('https://cefs.steve-meier.de/errata.latest.xml.bz2') - return download_url(res, 'Downloading CentOS Errata:') + return fetch_content(res, 'Fetching CentOS Errata') def parse_centos_errata_checksum(data): diff --git a/errata/sources/distros/debian.py b/errata/sources/distros/debian.py index 00c7e618..3a6753b5 100644 --- a/errata/sources/distros/debian.py +++ b/errata/sources/distros/debian.py @@ -27,7 +27,7 @@ from packages.models import Package from packages.utils import get_or_create_package, find_evr from patchman.signals import error_message, pbar_start, pbar_update -from util import get_url, download_url, get_setting_of_type +from util import get_url, fetch_content, get_setting_of_type def update_debian_errata(concurrent_processing=True): @@ -37,29 +37,29 @@ def update_debian_errata(concurrent_processing=True): """ codenames = retrieve_debian_codenames() create_debian_os_releases(codenames) - dsas = download_debian_dsa_advisories() - dlas = download_debian_dla_advisories() + dsas = fetch_debian_dsa_advisories() + dlas = fetch_debian_dla_advisories() advisories = dsas + dlas accepted_codenames = get_accepted_debian_codenames() errata = parse_debian_errata(advisories, accepted_codenames) create_debian_errata(errata, accepted_codenames, concurrent_processing) -def download_debian_dsa_advisories(): - """ Download the current Debian DLA file +def fetch_debian_dsa_advisories(): + """ Fetch the current Debian DLA file """ debian_dsa_url = 'https://salsa.debian.org/security-tracker-team/security-tracker/raw/master/data/DSA/list' res = get_url(debian_dsa_url) - data = download_url(res, 'Downloading Debian DSAs') + data = fetch_content(res, 'Fetching Debian DSAs') return data.decode() -def download_debian_dla_advisories(): - """ Download the current Debian DSA file +def fetch_debian_dla_advisories(): + """ Fetch the current Debian DSA file """ debian_dsa_url = 'https://salsa.debian.org/security-tracker-team/security-tracker/raw/master/data/DLA/list' res = get_url(debian_dsa_url) - data = download_url(res, 'Downloading Debian DLAs') + data = fetch_content(res, 'Fetching Debian DLAs') return data.decode() @@ -194,8 +194,8 @@ def parse_debian_erratum_packages(line, accepted_codenames): stop=stop_after_attempt(10), wait=wait_exponential(multiplier=1, min=2, max=15), ) -def download_debian_package_dsc(codename, package, version): - """ Download a DSC file for the given source package +def fetch_debian_package_dsc(codename, package, version): + """ Fetch a DSC file for the given source package From this we can determine which packages are built from a given source package """ @@ -229,7 +229,7 @@ def retrieve_debian_codenames(): """ distro_info_url = 'https://debian.pages.debian.net/distro-info-data/debian.csv' res = get_url(distro_info_url) - debian_csv = download_url(res, 'Downloading Debian distro data') + debian_csv = fetch_content(res, 'Fetching Debian distro data') reader = csv.DictReader(StringIO(debian_csv.decode())) codename_to_version = {} for row in reader: @@ -253,7 +253,7 @@ def process_debian_erratum_affected_packages(e, package_data): """ Process packages affected by Debian errata """ codename, source_package, source_version = package_data - dsc = download_debian_package_dsc(codename, source_package, source_version) + dsc = fetch_debian_package_dsc(codename, source_package, source_version) if not dsc: return epoch, ver, rel = find_evr(str(dsc.get_version())) diff --git a/errata/sources/distros/rocky.py b/errata/sources/distros/rocky.py index 4a79e348..0a4ce41d 100644 --- a/errata/sources/distros/rocky.py +++ b/errata/sources/distros/rocky.py @@ -24,7 +24,7 @@ from packages.models import Package from packages.utils import parse_package_string, get_or_create_package from patchman.signals import pbar_start, pbar_update -from util import get_url, download_url, info_message, error_message +from util import get_url, fetch_content, info_message, error_message def update_rocky_errata(concurrent_processing=True): @@ -33,7 +33,7 @@ def update_rocky_errata(concurrent_processing=True): rocky_errata_api_host = 'https://apollo.build.resf.org' rocky_errata_api_url = '/api/v3/' if check_rocky_errata_endpoint_health(rocky_errata_api_host): - advisories = download_rocky_advisories(rocky_errata_api_host, rocky_errata_api_url, concurrent_processing) + advisories = fetch_rocky_advisories(rocky_errata_api_host, rocky_errata_api_url, concurrent_processing) process_rocky_errata(advisories, concurrent_processing) @@ -44,7 +44,7 @@ def check_rocky_errata_endpoint_health(rocky_errata_api_host): rocky_errata_healthcheck_url = rocky_errata_api_host + rocky_errata_healthcheck_path headers = {'Accept': 'application/json'} res = get_url(rocky_errata_healthcheck_url, headers=headers) - data = download_url(res, 'Rocky Linux Errata API healthcheck') + data = fetch_content(res, 'Rocky Linux Errata API healthcheck') try: health = json.loads(data) if health.get('status') == 'ok': @@ -62,17 +62,17 @@ def check_rocky_errata_endpoint_health(rocky_errata_api_host): return False -def download_rocky_advisories(rocky_errata_api_host, rocky_errata_api_url, concurrent_processing): - """ Download Rocky Linux advisories and return the list +def fetch_rocky_advisories(rocky_errata_api_host, rocky_errata_api_url, concurrent_processing): + """ Fetch Rocky Linux advisories and return the list """ if concurrent_processing: - return download_rocky_advisories_concurrently(rocky_errata_api_host, rocky_errata_api_url) + return fetch_rocky_advisories_concurrently(rocky_errata_api_host, rocky_errata_api_url) else: - return download_rocky_advisories_serially(rocky_errata_api_host, rocky_errata_api_url) + return fetch_rocky_advisories_serially(rocky_errata_api_host, rocky_errata_api_url) -def download_rocky_advisories_serially(rocky_errata_api_host, rocky_errata_api_url): - """ Download Rocky Linux advisories serially and return the list +def fetch_rocky_advisories_serially(rocky_errata_api_host, rocky_errata_api_url): + """ Fetch Rocky Linux advisories serially and return the list """ rocky_errata_advisories_url = rocky_errata_api_host + rocky_errata_api_url + 'advisories/' headers = {'Accept': 'application/json'} @@ -82,7 +82,7 @@ def download_rocky_advisories_serially(rocky_errata_api_host, rocky_errata_api_u params = {'page': 1, 'size': 100} while True: res = get_url(rocky_errata_advisories_url, headers=headers, params=params) - data = download_url(res, f'Rocky Advisories {page}{"/"+pages if pages else ""}') + data = fetch_content(res, f'Rocky Advisories {page}{"/"+pages if pages else ""}') advisories_dict = json.loads(data) advisories += advisories_dict.get('advisories') links = advisories_dict.get('links') @@ -99,20 +99,20 @@ def download_rocky_advisories_serially(rocky_errata_api_host, rocky_errata_api_u return advisories -def download_rocky_advisories_concurrently(rocky_errata_api_host, rocky_errata_api_url): - """ Download Rocky Linux advisories concurrently and return the list +def fetch_rocky_advisories_concurrently(rocky_errata_api_host, rocky_errata_api_url): + """ Fetch Rocky Linux advisories concurrently and return the list """ rocky_errata_advisories_url = rocky_errata_api_host + rocky_errata_api_url + 'advisories/' headers = {'Accept': 'application/json'} advisories = [] params = {'page': 1, 'size': 100} res = get_url(rocky_errata_advisories_url, headers=headers, params=params) - data = download_url(res, 'Rocky Advisories Page 1') + data = fetch_content(res, 'Rocky Advisories Page 1') advisories_dict = json.loads(data) links = advisories_dict.get('links') last_link = links.get('last') pages = int(last_link.split('=')[-1]) - ptext = 'Downloading Rocky Advisories' + ptext = 'Fetching Rocky Advisories' pbar_start.send(sender=None, ptext=ptext, plen=pages) i = 0 with concurrent.futures.ThreadPoolExecutor(max_workers=100) as executor: @@ -126,7 +126,7 @@ def download_rocky_advisories_concurrently(rocky_errata_api_host, rocky_errata_a def get_rocky_advisory(rocky_errata_advisories_url, page): - """ Download a single Rocky Linux advisory + """ Fetch a single Rocky Linux advisory """ headers = {'Accept': 'application/json'} params = {'page': page, 'size': 100} diff --git a/errata/sources/distros/ubuntu.py b/errata/sources/distros/ubuntu.py index 1ff626cf..5490909e 100644 --- a/errata/sources/distros/ubuntu.py +++ b/errata/sources/distros/ubuntu.py @@ -25,7 +25,7 @@ from operatingsystems.utils import get_or_create_osrelease from packages.models import Package, PackageName from packages.utils import get_or_create_package, parse_package_string, find_evr -from util import get_url, download_url, get_sha256, bunzip2, get_setting_of_type +from util import get_url, fetch_content, get_sha256, bunzip2, get_setting_of_type from patchman.signals import error_message, pbar_start, pbar_update @@ -34,9 +34,9 @@ def update_ubuntu_errata(concurrent_processing=False): """ codenames = retrieve_ubuntu_codenames() create_ubuntu_os_releases(codenames) - data = download_ubuntu_usn_db() + data = fetch_ubuntu_usn_db() if data: - expected_checksum = download_ubuntu_usn_db_checksum() + expected_checksum = fetch_ubuntu_usn_db_checksum() actual_checksum = get_sha256(data) if actual_checksum == expected_checksum: parse_usn_data(data, concurrent_processing) @@ -46,20 +46,20 @@ def update_ubuntu_errata(concurrent_processing=False): error_message.send(sender=None, text=e) -def download_ubuntu_usn_db(): - """ Download the Ubuntu USN database +def fetch_ubuntu_usn_db(): + """ Fetch the Ubuntu USN database """ ubuntu_usn_db_json_url = 'https://usn.ubuntu.com/usn-db/database.json.bz2' res = get_url(ubuntu_usn_db_json_url) - return download_url(res, 'Downloading Ubuntu Errata') + return fetch_content(res, 'Fetching Ubuntu Errata') -def download_ubuntu_usn_db_checksum(): - """ Download the Ubuntu USN database checksum +def fetch_ubuntu_usn_db_checksum(): + """ Fetch the Ubuntu USN database checksum """ ubuntu_usn_db_checksum_url = 'https://usn.ubuntu.com/usn-db/database.json.bz2.sha256' res = get_url(ubuntu_usn_db_checksum_url) - return download_url(res, 'Downloading Ubuntu Errata Checksum').decode().split()[0] + return fetch_content(res, 'Fetching Ubuntu Errata Checksum').decode().split()[0] def parse_usn_data(data, concurrent_processing): @@ -214,7 +214,7 @@ def retrieve_ubuntu_codenames(): """ distro_info_url = 'https://debian.pages.debian.net/distro-info-data/ubuntu.csv' res = get_url(distro_info_url) - ubuntu_csv = download_url(res, 'Downloading Ubuntu distro data') + ubuntu_csv = fetch_content(res, 'Fetching Ubuntu distro data') reader = csv.DictReader(StringIO(ubuntu_csv.decode())) codename_to_version = {} for row in reader: diff --git a/etc/patchman/local_settings.py b/etc/patchman/local_settings.py index f71d42fd..a8269f5d 100644 --- a/etc/patchman/local_settings.py +++ b/etc/patchman/local_settings.py @@ -33,13 +33,13 @@ ALLOWED_HOSTS = ['127.0.0.1', '*'] # Maximum number of mirrors to add or refresh per repo -MAX_MIRRORS = 3 +MAX_MIRRORS = 2 # Number of days to wait before raising that a host has not reported DAYS_WITHOUT_REPORT = 14 # Whether to run patchman under the gunicorn web server -RUN_GUNICORN = False +RUN_GUNICORN = True CACHES = { 'default': { diff --git a/repos/repo_types/arch.py b/repos/repo_types/arch.py index ed539a3b..6e85b153 100644 --- a/repos/repo_types/arch.py +++ b/repos/repo_types/arch.py @@ -47,7 +47,7 @@ def refresh_arch_repo(repo): package_data = fetch_mirror_data( mirror=mirror, url=mirror_url, - text='Downloading Repo data') + text='Fetching Repo data') if not package_data: continue diff --git a/repos/repo_types/deb.py b/repos/repo_types/deb.py index ce5ea6c5..25d8eba7 100644 --- a/repos/repo_types/deb.py +++ b/repos/repo_types/deb.py @@ -68,7 +68,7 @@ def extract_deb_packages(data, url): def refresh_deb_repo(repo): """ Refresh a debian repo. Checks for the Packages* files to determine what the mirror urls - are and then downloads and extracts packages from those files. + are and then fetches and extracts packages from those files. """ formats = ['Packages.xz', 'Packages.bz2', 'Packages.gz', 'Packages'] @@ -86,7 +86,7 @@ def refresh_deb_repo(repo): package_data = fetch_mirror_data( mirror=mirror, url=mirror_url, - text='Downloading Repo data') + text='Fetching Repo data') if not package_data: continue diff --git a/repos/repo_types/gentoo.py b/repos/repo_types/gentoo.py index 7aed5390..fb27ff74 100644 --- a/repos/repo_types/gentoo.py +++ b/repos/repo_types/gentoo.py @@ -29,7 +29,7 @@ from packages.utils import find_evr from patchman.signals import info_message, warning_message, error_message, pbar_start, pbar_update from repos.utils import add_mirrors_from_urls, mirror_checksum_is_valid, update_mirror_packages -from util import extract, get_url, get_datetime_now, get_checksum, Checksum, download_url, response_is_valid +from util import extract, get_url, get_datetime_now, get_checksum, Checksum, fetch_content, response_is_valid def refresh_gentoo_main_repo(repo): @@ -243,7 +243,7 @@ def refresh_gentoo_repo(repo): ts = get_datetime_now() for mirror in repo.mirror_set.filter(mirrorlist=False, refresh=True, enabled=True): res = get_url(mirror.url + '.md5sum') - data = download_url(res, 'Downloading Repo checksum') + data = fetch_content(res, 'Fetching Repo checksum') if data is None: mirror.fail() continue @@ -258,7 +258,7 @@ def refresh_gentoo_repo(repo): res = get_url(mirror.url) mirror.last_access_ok = response_is_valid(res) if mirror.last_access_ok: - data = download_url(res, 'Downloading Repo data') + data = fetch_content(res, 'Fetching Repo data') if data is None: mirror.fail() continue diff --git a/repos/repo_types/rpm.py b/repos/repo_types/rpm.py index 27aef833..aa3354c7 100644 --- a/repos/repo_types/rpm.py +++ b/repos/repo_types/rpm.py @@ -79,7 +79,7 @@ def refresh_rpm_repo_mirrors(repo, errata_only=False): repo_data = fetch_mirror_data( mirror=mirror, url=mirror_url, - text='Downloading Repo data') + text='Fetching Repo data') if not repo_data: continue diff --git a/repos/repo_types/yast.py b/repos/repo_types/yast.py index a14e69af..0ef54358 100644 --- a/repos/repo_types/yast.py +++ b/repos/repo_types/yast.py @@ -32,7 +32,7 @@ def refresh_yast_repo(mirror, data): package_data = fetch_mirror_data( mirror=mirror, url=package_url, - text='Downloading yast Repo data') + text='Fetching yast Repo data') if not package_data: return diff --git a/repos/repo_types/yum.py b/repos/repo_types/yum.py index e69434f0..e8a5791f 100644 --- a/repos/repo_types/yum.py +++ b/repos/repo_types/yum.py @@ -169,7 +169,7 @@ def refresh_repomd_updateinfo(mirror, data, mirror_url): url=url, checksum=checksum, checksum_type=checksum_type, - text='Downloading Errata data', + text='Fetching Errata data', metadata_type='updateinfo') if not mirror.last_access_ok: @@ -198,7 +198,7 @@ def refresh_repomd_modules(mirror, data, mirror_url): url=url, checksum=checksum, checksum_type=checksum_type, - text='Downloading Module data', + text='Fetching Module data', metadata_type='module') if not mirror.last_access_ok: @@ -226,7 +226,7 @@ def refresh_repomd_primary(mirror, data, mirror_url): url=url, checksum=checksum, checksum_type=checksum_type, - text='Downloading Package data', + text='Fetching Package data', metadata_type='package') if not mirror.last_access_ok: diff --git a/repos/utils.py b/repos/utils.py index 934d3996..003b612e 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -26,7 +26,7 @@ from packages.models import Package from packages.utils import convert_package_to_packagestring, convert_packagestring_to_package -from util import get_url, download_url, response_is_valid, extract, get_checksum, Checksum, get_setting_of_type +from util import get_url, fetch_content, response_is_valid, extract, get_checksum, Checksum, get_setting_of_type from patchman.signals import info_message, warning_message, error_message, debug_message, pbar_start, pbar_update @@ -117,7 +117,7 @@ def get_metalink_urls(url): if not res.headers.get('content-type') == 'application/metalink+xml': return metalink_urls = [] - data = download_url(res, 'Downloading metalink data') + data = fetch_content(res, 'Fetching metalink data') extracted = extract(data, url) ns = 'http://www.metalinker.org/' try: @@ -148,7 +148,7 @@ def get_mirrorlist_urls(url): return if response_is_valid(res): try: - data = download_url(res, 'Downloading Repo data') + data = fetch_content(res, 'Fetching Repo data') if data is None: return mirror_urls = re.findall(r'^http[s]*://.*$|^ftp://.*$', data.decode('utf-8'), re.MULTILINE) @@ -232,7 +232,7 @@ def fetch_mirror_data(mirror, url, text, checksum=None, checksum_type=None, meta mirror.last_access_ok = True mirror.save() - data = download_url(res, text) + data = fetch_content(res, text) if not data: return diff --git a/sbin/patchman b/sbin/patchman index 897a58b5..72b2f012 100755 --- a/sbin/patchman +++ b/sbin/patchman @@ -445,8 +445,8 @@ def collect_args(): parser.add_argument( '--cve', help="Only update the specified CVE (e.g. CVE-2024-1234)") parser.add_argument( - '--download-nist-data', '-nd', action='store_true', - help='Download NIST CVE data in addition to MITRE data (rate-limited to 1 API call every 6 seconds)' + '--fetch-nist-data', '-nd', action='store_true', + help='Fetch NIST CVE data in addition to MITRE data (rate-limited to 1 API call every 6 seconds)' ) return parser @@ -527,7 +527,7 @@ def process_args(args): mark_errata_security_updates() showhelp = False if args.update_cves: - update_cves(args.cve, args.download_nist_data) + update_cves(args.cve, args.fetch_nist_data) update_cwes(args.cve) showhelp = False return showhelp diff --git a/security/models.py b/security/models.py index f265a5dd..4929bbf2 100644 --- a/security/models.py +++ b/security/models.py @@ -22,7 +22,7 @@ from django.urls import reverse from security.managers import CVEManager -from util import get_url, download_url, tz_aware_datetime, error_message +from util import get_url, fetch_content, tz_aware_datetime, error_message class Reference(models.Model): @@ -53,11 +53,11 @@ def get_absolute_url(self): def int_id(self): return int(self.cwe_id.split('-')[1]) - def download_cwe_data(self): + def fetch_cwe_data(self): int_id = self.int_id mitre_cwe_url = f'https://cwe-api.mitre.org/api/v1/cwe/{int_id}' res = get_url(mitre_cwe_url) - data = download_url(res, f'Downloading {self.cwe_id} data') + data = fetch_content(res, f'Fetching {self.cwe_id} data') cwe_json = json.loads(data) if cwe_json == 'at least one CWE not found': return @@ -65,7 +65,7 @@ def download_cwe_data(self): if cwe.get('Type').endswith('weakness'): weakness_url = f'https://cwe-api.mitre.org/api/v1/cwe/weakness/{int_id}' res = get_url(weakness_url) - data = download_url(res, f'Downloading {self.cwe_id} weakness data') + data = fetch_content(res, f'Fetching {self.cwe_id} weakness data') weakness_json = json.loads(data) for weakness in weakness_json.get('Weaknesses'): if int(weakness.get('ID')) == int_id: @@ -109,26 +109,26 @@ def __str__(self): def get_absolute_url(self): return reverse('security:cve_detail', args=[self.cve_id]) - def download_cve_data(self, download_nist_data=False, sleep_secs=6): - self.download_mitre_cve_data() - if download_nist_data: - self.download_nist_cve_data() + def fetch_cve_data(self, fetch_nist_data=False, sleep_secs=6): + self.fetch_mitre_cve_data() + if fetch_nist_data: + self.fetch_nist_cve_data() sleep(sleep_secs) # rate limited, see https://nvd.nist.gov/developers/start-here - def download_mitre_cve_data(self): + def fetch_mitre_cve_data(self): mitre_cve_url = f'https://cveawg.mitre.org/api/cve/{self.cve_id}' res = get_url(mitre_cve_url) if res.status_code == 404: error_message.send(sender=None, text=f'404 - Skipping {self.cve_id} - {mitre_cve_url}') return - data = download_url(res, f'Downloading {self.cve_id} MITRE data') + data = fetch_content(res, f'Fetching {self.cve_id} MITRE data') cve_json = json.loads(data) self.parse_mitre_cve_data(cve_json) - def download_nist_cve_data(self): + def fetch_nist_cve_data(self): nist_cve_url = f'https://services.nvd.nist.gov/rest/json/cves/2.0?cveId={self.cve_id}' res = get_url(nist_cve_url) - data = download_url(res, f'Downloading {self.cve_id} NIST data') + data = fetch_content(res, f'Fetching {self.cve_id} NIST data') if res.status_code == 404: error_message.send(sender=None, text=f'404 - Skipping {self.cve_id} - {nist_cve_url}') cve_json = json.loads(data) diff --git a/security/tasks.py b/security/tasks.py index 9250a4ae..a04bb1c8 100644 --- a/security/tasks.py +++ b/security/tasks.py @@ -24,7 +24,7 @@ def update_cve(cve_id): """ Task to update a CVE """ cve = CVE.objects.get(id=cve_id) - cve.download_cve_data() + cve.fetch_cve_data() @shared_task @@ -40,7 +40,7 @@ def update_cwe(cwe_id): """ Task to update a CWE """ cwe = CWE.objects.get(id=cwe_id) - cwe.download_cwe_data() + cwe.fetch_cwe_data() @shared_task diff --git a/security/utils.py b/security/utils.py index d106e9f2..ca67af32 100644 --- a/security/utils.py +++ b/security/utils.py @@ -33,20 +33,20 @@ def get_or_create_cve(cve_id): return cve -def update_cves(cve_id=None, download_nist_data=False): - """ Download the latest CVE data from the CVE API. +def update_cves(cve_id=None, fetch_nist_data=False): + """ Fetch the latest CVE data from the CVE API. e.g. https://cveawg.mitre.org/api/cve/CVE-2024-1234 """ if cve_id: cve = CVE.objects.get(cve_id=cve_id) - cve.download_cve_data(download_nist_data, sleep_secs=0) + cve.fetch_cve_data(fetch_nist_data, sleep_secs=0) else: for cve in CVE.objects.all(): - cve.download_cve_data(download_nist_data) + cve.fetch_cve_data(fetch_nist_data) def update_cwes(cve_id=None): - """ Download the latest CWEs from the CWE API. + """ Fetch the latest CWEs from the CWE API. e.g. https://cwe-api.mitre.org/api/v1/cwe/74,79 https://cwe-api.mitre.org/api/v1/cwe/weakness/79 """ @@ -56,7 +56,7 @@ def update_cwes(cve_id=None): else: cwes = CWE.objects.all() for cwe in cwes: - cwe.download_cwe_data() + cwe.fetch_cwe_data() def fixup_reference(ref): diff --git a/util/__init__.py b/util/__init__.py index 742239da..6744e1e8 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -75,8 +75,8 @@ def update_pbar(index, **kwargs): pbar = None -def download_url(response, text='', ljust=35): - """ Display a progress bar to download the request content if verbose is +def fetch_content(response, text='', ljust=35): + """ Display a progress bar to fetch the request content if verbose is True. Otherwise, just return the request content """ global verbose From 2c512d4e5513b5e4d6fcf91ffd8c93153113798d Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 5 Mar 2025 21:12:41 -0500 Subject: [PATCH 144/199] don't create osvariant with no arch --- errata/sources/distros/arch.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/errata/sources/distros/arch.py b/errata/sources/distros/arch.py index 8ef7440a..4821ca13 100644 --- a/errata/sources/distros/arch.py +++ b/errata/sources/distros/arch.py @@ -17,7 +17,7 @@ import concurrent.futures import json -from operatingsystems.utils import get_or_create_osrelease, get_or_create_osvariant +from operatingsystems.utils import get_or_create_osrelease from patchman.signals import error_message, pbar_start, pbar_update from packages.models import Package from packages.utils import find_evr, get_matching_packages @@ -102,8 +102,7 @@ def process_arch_erratum(advisory, osrelease): def add_arch_linux_osrelease(): """ Add Arch Linux OSRelease and link existing OSVariants """ - osrelease = get_or_create_osrelease(name='Arch Linux') - get_or_create_osvariant(name='Arch Linux', osrelease=osrelease) + get_or_create_osrelease(name='Arch Linux') def add_arch_erratum_references(e, advisory): From 6ccf5afe9bd736006413133e481613fa1918638c Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 5 Mar 2025 21:41:10 -0500 Subject: [PATCH 145/199] handle cvss v2 scores better --- security/templates/security/cve_detail.html | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/security/templates/security/cve_detail.html b/security/templates/security/cve_detail.html index 63f71385..28ef831c 100644 --- a/security/templates/security/cve_detail.html +++ b/security/templates/security/cve_detail.html @@ -30,7 +30,11 @@ CVSS Scores {% for score in cve.cvss_scores.all %} - {{ score.score }} - {{ score.severity }} (CVSS {{ score.version }}) {% bootstrap_icon "link" %}
    + {% if score.version|stringformat:"d" == "2" %} + {{ score.score }} (CVSS {{ score.version }}) {% bootstrap_icon "link" %}
    + {% else %} + {{ score.score }} - {{ score.severity }} (CVSS {{ score.version }}) {% bootstrap_icon "link" %}
    + {% endif %} {% endfor %} From d2d778211ca7e19a35a07a0ffc8bc79c067f35c4 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 5 Mar 2025 23:01:10 -0500 Subject: [PATCH 146/199] use upstream nvd cpe names --- reports/utils.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/reports/utils.py b/reports/utils.py index eb5b7481..e0075226 100644 --- a/reports/utils.py +++ b/reports/utils.py @@ -410,17 +410,13 @@ def get_os(os, arch): if os.startswith('Gentoo'): osrelease_name = 'Gentoo Linux' - # presumptive, can be changed once a real cpe is assigned/used - cpe_name = 'cpe:2.3:o:gentoo:gentoo_linux:::' + cpe_name = 'cpe:2.3:o:gentoo:linux:-:*:*:*:*:*:*:*' elif os.startswith('Arch'): - # presumptive, can be changed once a real cpe is assigned/used - cpe_name = 'cpe:2.3:o:archlinux:arch_linux:::' + cpe_name = 'cpe:2.3:o:archlinux:arch_linux:-:*:*:*:*:*:*:*' elif os.startswith('Debian'): major, minor = os.split(' ')[1].split('.') - debian_version = f'{major}.{minor}' osrelease_name = f'Debian {major}' - # presumptive, can be changed once a real cpe is assigned/used - cpe_name = f'cpe:2.3:o:debian:debian_linux:{debian_version}::' + cpe_name = f'cpe:2.3:o:debian:debian_linux:{major}.0:*:*:*:*:*:*:*' elif os.startswith('Ubuntu'): lts = '' if 'LTS' in os: @@ -428,7 +424,7 @@ def get_os(os, arch): major, minor, patch = os.split(' ')[1].split('.') ubuntu_version = f'{major}_{minor}' osrelease_name = f'Ubuntu {major}.{minor}{lts}' - cpe_name = f'cpe:2.3:o:canonical:ubuntu_linux:{ubuntu_version}::' + cpe_name = f"cpe:2.3:o:canonical:ubuntu_linux:{ubuntu_version}:*:*:*:{'lts' if lts else '*'}:*:*:*" elif os.startswith('AlmaLinux'): osvariant_name = os.replace('AlmaLinux', 'Alma Linux') osrelease_name = osvariant_name.split('.')[0] From 57f8d666dab502e3c8bedab1e7b8d03e606b0a7b Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 9 Mar 2025 13:01:39 -0400 Subject: [PATCH 147/199] fix metalink detection --- client/patchman-client | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/patchman-client b/client/patchman-client index 7188a675..44215456 100755 --- a/client/patchman-client +++ b/client/patchman-client @@ -467,7 +467,7 @@ get_repos() { fi # replace this with a dedicated awk or simple python script? yum_repolist=$(yum repolist enabled --verbose 2>/dev/null | sed -e "s/:\? *([0-9]\+ more)$//g" -e "s/ ([0-9]\+$//g" -e "s/:\? more)$//g" -e "s/'//g" -e "s/%/%%/g") - for i in $(echo "${yum_repolist}" | awk '{ if ($1=="Repo-id") {printf "'"'"'"; for (i=3; i Date: Sun, 9 Mar 2025 13:02:49 -0400 Subject: [PATCH 148/199] remove square brackets from urls --- client/patchman-client | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/patchman-client b/client/patchman-client index 44215456..1addc6ae 100755 --- a/client/patchman-client +++ b/client/patchman-client @@ -488,7 +488,7 @@ get_repos() { if [ ! -z ${CPE_NAME} ] ; then id="${CPE_NAME}-${id}" fi - j=$(echo ${i} | sed -e "s#'${full_id}' '${name}'#'${name}' '${id}' '${priority}'#") + j=$(echo ${i} | sed -e "s#'${full_id}' '${name}'#'${name}' '${id}' '${priority}'#" | sed -e "s/'\[/'/g" -e "s/\]'/'/g") echo "'rpm' ${j}" >> "${tmpfile_rep}" unset priority done From c4fb3befa7dad4e728d8d2e6fa4fb95ac7e034c0 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 9 Mar 2025 13:04:14 -0400 Subject: [PATCH 149/199] use python etree for centos --- errata/sources/distros/centos.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/errata/sources/distros/centos.py b/errata/sources/distros/centos.py index 9b0017a9..cbfd0a2b 100644 --- a/errata/sources/distros/centos.py +++ b/errata/sources/distros/centos.py @@ -15,7 +15,7 @@ # along with Patchman. If not, see import re -from lxml import etree +from defusedxml import ElementTree as ET from operatingsystems.utils import get_or_create_osrelease from packages.models import Package @@ -65,7 +65,7 @@ def parse_centos_errata_checksum(data): def parse_centos_errata(data): """ Parse CentOS errata from https://cefs.steve-meier.de/ """ - result = etree.XML(data) + result = ET.XML(data) errata_xml = result.findall('*') elen = len(errata_xml) pbar_start.send(sender=None, ptext=f'Processing {elen} CentOS Errata', plen=elen) @@ -76,7 +76,7 @@ def parse_centos_errata(data): continue e = parse_centos_errata_tag(child.tag, child.attrib) if e is not None: - parse_centos_errata_children(e, child.getchildren()) + parse_centos_errata_children(e, child.iter()) def parse_centos_errata_tag(name, attribs): From 22c42e32098409eb583e331a0b4068f71c222c60 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 9 Mar 2025 13:05:39 -0400 Subject: [PATCH 150/199] don't create all-arch packages --- errata/sources/distros/debian.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/errata/sources/distros/debian.py b/errata/sources/distros/debian.py index 3a6753b5..bd0aef6f 100644 --- a/errata/sources/distros/debian.py +++ b/errata/sources/distros/debian.py @@ -288,11 +288,12 @@ def process_debian_dsc_arches(arches): 'ppc64el', 'riscv64', 's390x', - 'all', # architecture-independent packages ] for arch in arches.split(','): if arch == 'any': return official_ports + elif arch == 'all': + return ['all'] # architecture-independent packages elif arch in official_ports: accepted_arches.append(arch) continue From 20a8e562c499a11548735dda0f33503260fc619f Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 9 Mar 2025 13:08:35 -0400 Subject: [PATCH 151/199] add support for Amazon Linux 1 --- errata/sources/repos/yum.py | 2 ++ reports/utils.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/errata/sources/repos/yum.py b/errata/sources/repos/yum.py index 36e5d1f8..45fe08ad 100644 --- a/errata/sources/repos/yum.py +++ b/errata/sources/repos/yum.py @@ -128,6 +128,8 @@ def add_updateinfo_osreleases(e, collection, osrelease_names): elif osrelease_name.startswith('rocky-linux'): version = osrelease_name.split('-')[2] osrelease_name = 'Rocky Linux ' + version + elif osrelease_name in ['Amazon Linux', 'Amazon Linux AMI']: + osrelease_name = 'Amazon Linux 1' osrelease = get_or_create_osrelease(name=osrelease_name) e.osreleases.add(osrelease) diff --git a/reports/utils.py b/reports/utils.py index e0075226..54b2fa60 100644 --- a/reports/utils.py +++ b/reports/utils.py @@ -443,6 +443,8 @@ def get_os(os, arch): elif os.startswith('Oracle'): osvariant_name = os.replace(' Server', '') osrelease_name = osvariant_name.split('.')[0] + elif os.startswith('Amazon Linux AMI 2018.03'): + osrelease_name = osvariant_name = 'Amazon Linux 1' osrelease = get_or_create_osrelease(name=osrelease_name, codename=osrelease_codename, cpe_name=cpe_name) osvariant = get_or_create_osvariant( From 385ac0d37ac8454715555ddf7ba827e42d2f3524 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 9 Mar 2025 13:09:34 -0400 Subject: [PATCH 152/199] move smaller fixup url functions --- security/utils.py | 50 ++++++++++++++++++++++++++++++++--------------- 1 file changed, 34 insertions(+), 16 deletions(-) diff --git a/security/utils.py b/security/utils.py index ca67af32..37e2311f 100644 --- a/security/utils.py +++ b/security/utils.py @@ -59,6 +59,37 @@ def update_cwes(cve_id=None): cwe.fetch_cwe_data() +def fixup_bugzilla_url(url): + bugzilla_hosts = [ + 'bugzilla.redhat.com', 'bugzilla.opensuse.org', 'bugzilla.suse.com', + 'bugs.debian.org', 'bugs.kde.org', 'bugzilla.mozilla.org', 'bugzilla.gnome.org', + ] + if url.hostname in bugzilla_hosts and url.path == '/show_bug.cgi': + bug = url.query.split('=')[1] + path = f'/{bug}' + url = url._replace(path=path, query='') + return url + + +def fixup_rhn_url(url): + if url.hostname == 'rhn.redhat.com': + netloc = url.netloc.replace('rhn', 'access') + path = url.path.replace('.html', '') + url = url._replace(netloc=netloc, path=path) + return url + + +def fixup_ubuntu_usn_url(url): + if ('ubuntu.com' in url.hostname and 'usn/' in url.path) or url.hostname == 'usn.ubuntu.com': + netloc = url.netloc.replace('usn.', '').replace('www.', '') + path = url.path.replace('usn/', 'security/notices/').replace('usn', 'USN').rstrip('/') + usn_id = path.split('/')[-1] + if 'USN' not in usn_id: + path = '/'.join(path.split('/')[:-1]) + '/USN-' + usn_id + url = url._replace(netloc=netloc, path=path) + return url + + def fixup_reference(ref): """ Fix up a Security Reference object to normalize the URL and type """ @@ -68,13 +99,7 @@ def fixup_reference(ref): ref_type = 'Mailing List' if ref_type == 'bugzilla' or 'bug' in url.hostname or 'bugs' in url.path: ref_type = 'Bug Tracker' - if ('ubuntu.com' in url.hostname and 'usn/' in url.path) or url.hostname == 'usn.ubuntu.com': - netloc = url.netloc.replace('usn.', '').replace('www.', '') - path = url.path.replace('usn/', 'security/notices/').replace('usn', 'USN').rstrip('/') - usn_id = path.split('/')[-1] - if 'USN' not in usn_id: - path = '/'.join(path.split('/')[:-1]) + '/USN-' + usn_id - url = url._replace(netloc=netloc, path=path) + url = fixup_ubuntu_usn_url(url) if url.hostname == 'ubuntu.com' and url.path.startswith('/security/notices/USN'): ref_type = 'USN' if 'launchpad.net' in url.hostname: @@ -83,15 +108,8 @@ def fixup_reference(ref): bug = url.path.split('/')[-1] path = f'/bugs/{bug}' url = url._replace(netloc=netloc, path=path) - if url.hostname in ['bugzilla.redhat.com', 'bugzilla.opensuse.org', 'bugs.suse.com'] and \ - url.path == '/show_bug.cgi': - bug = url.query.split('=')[1] - path = f'/{bug}' - url = url._replace(path=path, query='') - if url.hostname == 'rhn.redhat.com': - netloc = url.netloc.replace('rhn', 'access') - path = url.path.replace('.html', '') - url = url._replace(netloc=netloc, path=path) + url = fixup_bugzilla_url(url) + url = fixup_rhn_url(url) if url.hostname == 'access.redhat.com': if 'l1d-cache-eviction-and-vector-register-sampling' in url.path or \ 'security/vulnerabilities/speculativeexecution' in url.path or \ From fdb97f941dd9e9f0f267c65c2b689687e8e6e438 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 9 Mar 2025 13:11:02 -0400 Subject: [PATCH 153/199] use reverse-date ordering for cves and errata --- errata/models.py | 1 + security/models.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/errata/models.py b/errata/models.py index 8490c5a9..cc4b58da 100644 --- a/errata/models.py +++ b/errata/models.py @@ -42,6 +42,7 @@ class Erratum(models.Model): class Meta: verbose_name = 'Erratum' verbose_name_plural = 'Errata' + ordering = ['-issue_date', 'name'] def __str__(self): text = f'{self.name} ({self.e_type}), {self.cves.count()} related CVEs, ' diff --git a/security/models.py b/security/models.py index 4929bbf2..5bd42c27 100644 --- a/security/models.py +++ b/security/models.py @@ -101,7 +101,7 @@ class CVE(models.Model): objects = CVEManager() class Meta: - ordering = ['cve_id'] + ordering = ['-cve_id'] def __str__(self): return self.cve_id From 9378543fb34cf19696084d175ce2ff4026854ad3 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 9 Mar 2025 13:11:23 -0400 Subject: [PATCH 154/199] better cve mismatch message --- security/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/security/models.py b/security/models.py index 5bd42c27..997fc26b 100644 --- a/security/models.py +++ b/security/models.py @@ -141,7 +141,7 @@ def parse_nist_cve_data(self, cve_json): cve = vulnerability.get('cve') cve_id = cve.get('id') if cve_id != self.cve_id: - error_message.send(sender=None, text=f'CVE ID mismatch - {self.cve_id} - {cve_id}') + error_message.send(sender=None, text=f'CVE ID mismatch - {self.cve_id} != {cve_id}') return metrics = cve.get('metrics') for metric, score_data in metrics.items(): From 6538fc7fd51caeead70e82522e4a5ab183b7c646 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 9 Mar 2025 13:17:44 -0400 Subject: [PATCH 155/199] handle irregularly-named CVEs --- errata/models.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/errata/models.py b/errata/models.py index cc4b58da..c8614df7 100644 --- a/errata/models.py +++ b/errata/models.py @@ -57,7 +57,7 @@ def scan_for_security_updates(self): for package in self.packages.all(): affected_updates = PackageUpdate.objects.filter( newpackage=package, - security=False + security=False, ) for affected_update in affected_updates: if not affected_update.security: @@ -77,6 +77,9 @@ def add_packages(self, packages): def add_cve(self, cve_id): """ Add a CVE to an Erratum object """ + if not cve_id.startswith('CVE') or not cve_id.split('-')[1].isdigit(): + error_message.send(sender=None, text=f'Not a CVE ID: {cve_id}') + return self.cves.add(get_or_create_cve(cve_id)) def add_reference(self, ref_type, url): From 6d66c4090c65135ceed0023ffe9c81ec53ba8d68 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 9 Mar 2025 14:06:42 -0400 Subject: [PATCH 156/199] get debian DSCs from package file maps --- errata/sources/distros/debian.py | 111 ++++++++++++++++++++++--------- 1 file changed, 79 insertions(+), 32 deletions(-) diff --git a/errata/sources/distros/debian.py b/errata/sources/distros/debian.py index bd0aef6f..16bd906c 100644 --- a/errata/sources/distros/debian.py +++ b/errata/sources/distros/debian.py @@ -20,14 +20,15 @@ from datetime import datetime from debian.deb822 import Dsc from io import StringIO -from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential from operatingsystems.models import OSRelease from operatingsystems.utils import get_or_create_osrelease from packages.models import Package from packages.utils import get_or_create_package, find_evr -from patchman.signals import error_message, pbar_start, pbar_update -from util import get_url, fetch_content, get_setting_of_type +from patchman.signals import error_message, pbar_start, pbar_update, warning_message +from util import get_url, fetch_content, get_setting_of_type, extract + +DSCs = {} def update_debian_errata(concurrent_processing=True): @@ -40,6 +41,7 @@ def update_debian_errata(concurrent_processing=True): dsas = fetch_debian_dsa_advisories() dlas = fetch_debian_dla_advisories() advisories = dsas + dlas + fetch_dscs_from_debian_package_file_maps() accepted_codenames = get_accepted_debian_codenames() errata = parse_debian_errata(advisories, accepted_codenames) create_debian_errata(errata, accepted_codenames, concurrent_processing) @@ -63,6 +65,47 @@ def fetch_debian_dla_advisories(): return data.decode() +def fetch_dscs_from_debian_package_file_maps(): + """ Fetch the current Debian package file maps + """ + repos = ['debian', 'debian-security'] + for repo in repos: + file_map_url = f'https://deb.debian.org/{repo}/indices/package-file.map.bz2' + res = get_url(file_map_url) + data = fetch_content(res, f'Fetching `{repo}` package file map') + file_map_data = extract(data, file_map_url).decode() + parse_debian_package_file_map(file_map_data, repo) + + +def parse_debian_package_file_map(data, repo): + """ Parse the a Debian package file map + Format: + Path: ./pool/updates/main/3/389-ds-base/389-ds-base_1.4.0.21-1+deb10u1.dsc + Source: 389-ds-base + Source-Version: 1.4.0.21-1+deb10u1 + """ + global DSCs + parsing_dsc = False + for line in data.splitlines(): + if line.startswith('Path:'): + if line.endswith('.dsc'): + parsing_dsc = True + path = line.split(' ')[1].lstrip('./') + url = f'https://deb.debian.org/{repo}/{path}' + else: + parsing_dsc = False + elif line.startswith('Source:') and parsing_dsc: + source = line.split(' ')[1] + elif line.startswith('Source-Version:') and parsing_dsc: + version = line.split(' ')[1] + if not DSCs.get(source): + DSCs[source] = {} + if not DSCs[source].get(version): + DSCs[source][version] = {} + DSCs[source][version] = {'url': url} + parsing_dsc = False + + def parse_debian_errata(advisories, accepted_codenames): """ Parse Debian DSA/DLA files for security advisories """ @@ -87,7 +130,7 @@ def parse_debian_errata(advisories, accepted_codenames): e['releases'].append(release) if not e.get('packages').get(release): e['packages'][release] = [] - e['packages'][release].append(parse_debian_erratum_packages(line, accepted_codenames)) + e['packages'][release].append(parse_debian_erratum_package(line, accepted_codenames)) # add the last one errata = add_errata_by_codename(errata, e, accepted_codenames) return errata @@ -175,9 +218,9 @@ def process_debian_erratum(erratum, accepted_codenames): error_message.send(sender=None, text=exc) -def parse_debian_erratum_packages(line, accepted_codenames): - """ Parse the codename and source packages from a DSA/DLA file - Return the DSC object +def parse_debian_erratum_package(line, accepted_codenames): + """ Parse the codename and source package from a DSA/DLA file + Returns the source package and source version """ distro_package_pattern = re.compile(r'^\t\[(.+?)\] - (.+?) (.*)') match = re.match(distro_package_pattern, line) @@ -186,29 +229,34 @@ def parse_debian_erratum_packages(line, accepted_codenames): if codename in accepted_codenames: source_package = match.group(2) source_version = match.group(3) - return [codename, source_package, source_version] - - -@retry( - retry=retry_if_exception_type(ConnectionError), - stop=stop_after_attempt(10), - wait=wait_exponential(multiplier=1, min=2, max=15), -) -def fetch_debian_package_dsc(codename, package, version): - """ Fetch a DSC file for the given source package - From this we can determine which packages are built from - a given source package + fetch_debian_dsc_package_list(source_package, source_version) + return source_package, source_version + + +def get_debian_dsc_package_list(package, version): + """ Get the package list from a DSC file for a given source package/version """ - dsc_pattern = re.compile(r'.*"(http.*dsc)"') - source_url = f'https://packages.debian.org/source/{codename}/{package}' + global DSCs + if not DSCs.get(package) or not DSCs[package].get(version): + return + package_list = DSCs[package][version].get('package_list') + if package_list: + return package_list + + +def fetch_debian_dsc_package_list(package, version): + """ Fetch the package list from a DSC file for a given source package/version + """ + global DSCs + if not DSCs.get(package) or not DSCs[package].get(version): + warning_message.send(sender=None, text=f'No DSC found for {package} {version}') + return + source_url = DSCs[package][version]['url'] res = get_url(source_url) data = res.content - dscs = re.findall(dsc_pattern, data.decode()) - if dscs: - dsc_url = dscs[0] - res = get_url(dsc_url) - data = res.content - return Dsc(data.decode()) + dsc = Dsc(data.decode()) + package_list = dsc.get('package-list') + DSCs[package][version]['package_list'] = package_list def get_accepted_debian_codenames(): @@ -252,12 +300,11 @@ def create_debian_os_releases(codename_to_version): def process_debian_erratum_affected_packages(e, package_data): """ Process packages affected by Debian errata """ - codename, source_package, source_version = package_data - dsc = fetch_debian_package_dsc(codename, source_package, source_version) - if not dsc: + source_package, source_version = package_data + epoch, ver, rel = find_evr(source_version) + package_list = get_debian_dsc_package_list(source_package, source_version) + if not package_list: return - epoch, ver, rel = find_evr(str(dsc.get_version())) - package_list = dsc.get('package-list') for line in package_list.splitlines(): if not line: continue From 7489754635aff7d917f0baded097de378ffd0366 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 9 Mar 2025 14:10:05 -0400 Subject: [PATCH 157/199] improve get_matching_packages --- packages/utils.py | 27 ++++++++++++++++++++++----- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/packages/utils.py b/packages/utils.py index 9a8fa20c..f14884a9 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -224,22 +224,39 @@ def get_or_create_package_update(oldpackage, newpackage, security): return update -def get_matching_packages(name, epoch, version, release, p_type): - """ Get packages matching certain criteria - Returns the matching packages or None +def get_matching_packages(name, epoch, version, release, p_type, arch=None): + """ Get packages matching the name, epoch, version, release, and package_type + Arch can be omitted if unknown + Returns the matching packages or an empty list """ try: package_name = PackageName.objects.get(name=name) except PackageName.DoesNotExist: - return - if package_name: + return [] + if arch: + if not isinstance(arch, PackageArchitecture): + try: + arch = PackageArchitecture.objects.get_or_create(name=arch) + except PackageArchitecture.DoesNotExist: + return [] packages = Package.objects.filter( + epoch=epoch, name=package_name, version=version, release=release, + arch=arch, packagetype=p_type, ) return packages + else: + packages = Package.objects.filter( + epoch=epoch, + name=package_name, + version=version, + release=release, + packagetype=p_type, + ) + return packages def clean_packageupdates(): From 14485b103fd9d7bbcfec5d631964fa0432ce920b Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 9 Mar 2025 14:12:03 -0400 Subject: [PATCH 158/199] use package-specific display options --- packages/models.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/packages/models.py b/packages/models.py index 0f2cf5c2..137694c0 100644 --- a/packages/models.py +++ b/packages/models.py @@ -68,8 +68,8 @@ class Package(models.Model): PACKAGE_TYPES = ( (RPM, 'rpm'), (DEB, 'deb'), - (ARCH, 'arch'), - (GENTOO, 'gentoo'), + (ARCH, 'pkgbuild'), + (GENTOO, 'ebuild'), (UNKNOWN, 'unknown'), ) @@ -98,10 +98,14 @@ def __str__(self): rel = f'-{self.release}' else: rel = '' - if self.packagetype == 'G': - return f'{self.category}/{self.name}-{epo}{self.version}{rel}-{self.arch}' + if self.packagetype == self.GENTOO: + return f'{self.category}/{self.name}_{epo}{self.version}{rel}_{self.arch}.{self.get_packagetype_display()}' + elif self.packagetype in [self.DEB, self.ARCH]: + return f'{self.name}_{epo}{self.version}{rel}_{self.arch}.{self.get_packagetype_display()}' + elif self.packagetype == self.RPM: + return f'{self.name}-{epo}{self.version}{rel}-{self.arch}.{self.get_packagetype_display()}' else: - return f'{self.name}-{epo}{self.version}{rel}-{self.arch}' + return f'{self.name}-{epo}{self.version}{rel}-{self.arch}.{self.get_packagetype_display()}' def get_absolute_url(self): return reverse('packages:package_detail', args=[self.id]) From 14129b146cd79223f5cc4e07f67b8e87e61f49a2 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 9 Mar 2025 14:13:06 -0400 Subject: [PATCH 159/199] add concurrent processing for yum repo errata --- errata/sources/repos/yum.py | 37 ++++++++++++++++++++++++++++--------- 1 file changed, 28 insertions(+), 9 deletions(-) diff --git a/errata/sources/repos/yum.py b/errata/sources/repos/yum.py index 45fe08ad..6d98dcb4 100644 --- a/errata/sources/repos/yum.py +++ b/errata/sources/repos/yum.py @@ -22,10 +22,10 @@ from packages.models import Package from packages.utils import get_or_create_package from patchman.signals import pbar_start, pbar_update, error_message -from util import extract +from util import extract, get_url -def extract_updateinfo(data, url): +def extract_updateinfo(data, url, concurrent_processing=True): """ Parses updateinfo.xml and extracts package/errata information """ extracted = extract(data, url) @@ -33,15 +33,34 @@ def extract_updateinfo(data, url): tree = ET.parse(BytesIO(extracted)) root = tree.getroot() elen = root.__len__() - pbar_start.send(sender=None, ptext=f'Extracting {elen} updateinfo Errata', plen=elen) - i = 0 - with concurrent.futures.ProcessPoolExecutor(max_workers=100) as executor: - futures = [executor.submit(process_updateinfo_erratum, update) for update in root.findall('update')] - for future in concurrent.futures.as_completed(futures): - i += 1 - pbar_update.send(sender=None, index=i + 1) + updates = root.findall('update') except ET.ParseError as e: error_message.send(sender=None, text=f'Error parsing updateinfo file from {url} : {e}') + if concurrent_processing: + extract_updateinfo_concurrently(updates, elen) + else: + extract_updateinfo_serially(updates, elen) + + +def extract_updateinfo_serially(updates, elen): + """ Parses updateinfo.xml and extracts package/errata information serially + """ + pbar_start.send(sender=None, ptext=f'Extracting {elen} updateinfo Errata', plen=elen) + for i, update in enumerate(updates): + process_updateinfo_erratum(update) + pbar_update.send(sender=None, index=i + 1) + + +def extract_updateinfo_concurrently(updates, elen): + """ Parses updateinfo.xml and extracts package/errata information concurrently + """ + pbar_start.send(sender=None, ptext=f'Extracting {elen} updateinfo Errata', plen=elen) + i = 0 + with concurrent.futures.ProcessPoolExecutor(max_workers=100) as executor: + futures = [executor.submit(process_updateinfo_erratum, update) for update in updates] + for future in concurrent.futures.as_completed(futures): + i += 1 + pbar_update.send(sender=None, index=i + 1) def process_updateinfo_erratum(update): From ef36646723ced016b4af6c4660a71b25c4808a08 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 9 Mar 2025 15:36:07 -0400 Subject: [PATCH 160/199] additional options for yum repo errata handling --- errata/tasks.py | 19 ++++++++++++------- sbin/patchman | 6 +++--- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/errata/tasks.py b/errata/tasks.py index c0a644b2..fe53b415 100644 --- a/errata/tasks.py +++ b/errata/tasks.py @@ -29,22 +29,27 @@ @shared_task -def update_yum_repo_errata(): +def update_yum_repo_errata(repo_id=None, force=False): """ Update all yum repos errata """ - for repo in Repository.objects.filter(repotype=Repository.RPM): - repo.refresh_errata() + if repo_id: + repo = Repository.objects.get(id=repo_id) + repo.refresh_errata(force) + else: + for repo in Repository.objects.filter(repotype=Repository.RPM): + repo.refresh_errata(force) @shared_task -def update_errata(erratum_type=None): +def update_errata(erratum_type=None, force=False, repo=None): """ Update all distros errata """ errata_os_updates = [] + erratum_types = ['yum', 'rocky', 'alma', 'arch', 'ubuntu', 'debian', 'centos'] erratum_type_defaults = ['yum', 'rocky', 'alma', 'arch', 'ubuntu', 'debian'] if erratum_type: - if erratum_type not in erratum_type_defaults: - error_message.send(sender=None, text=f'Erratum type `{erratum_type}` not in {erratum_type_defaults}') + if erratum_type not in erratum_types: + error_message.send(sender=None, text=f'Erratum type `{erratum_type}` not in {erratum_types}') else: errata_os_updates = erratum_type else: @@ -54,7 +59,7 @@ def update_errata(erratum_type=None): default=erratum_type_defaults, ) if 'yum' in errata_os_updates: - update_yum_repo_errata() + update_yum_repo_errata(repo_id=repo, force=force) if 'arch' in errata_os_updates: update_arch_errata() if 'alma' in errata_os_updates: diff --git a/sbin/patchman b/sbin/patchman index 72b2f012..36bf7c7e 100755 --- a/sbin/patchman +++ b/sbin/patchman @@ -517,19 +517,19 @@ def process_args(args): host_updates_alt(args.host) showhelp = False recheck = True - if args.dbcheck and recheck: - dbcheck(args.remove_duplicates) if args.dns_checks: dns_checks(args.host) showhelp = False if args.update_errata: - update_errata(args.erratum_type) + update_errata(args.erratum_type, args.force, args.repo) mark_errata_security_updates() showhelp = False if args.update_cves: update_cves(args.cve, args.fetch_nist_data) update_cwes(args.cve) showhelp = False + if args.dbcheck and recheck: + dbcheck(args.remove_duplicates) return showhelp From f77806e82ef5090fd459ffb0a2f515a45e01e22d Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 9 Mar 2025 15:39:29 -0400 Subject: [PATCH 161/199] remove errata view ordering --- errata/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/errata/views.py b/errata/views.py index 0564e1dd..cc5eabcb 100644 --- a/errata/views.py +++ b/errata/views.py @@ -29,7 +29,7 @@ @login_required def erratum_list(request): - errata = Erratum.objects.select_related().order_by('name') + errata = Erratum.objects.select_related() if 'e_type' in request.GET: errata = errata.filter(e_type=request.GET['e_type']).distinct() From 423ca73ce6ab23a140146466f75cd91a7a944646 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 9 Mar 2025 20:39:08 -0400 Subject: [PATCH 162/199] use arch_id for packages --- packages/views.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/views.py b/packages/views.py index fd66e7a6..42a2a345 100644 --- a/packages/views.py +++ b/packages/views.py @@ -113,8 +113,8 @@ def package_list(request): def package_name_list(request): packages = PackageName.objects.select_related() - if 'arch' in request.GET: - packages = packages.filter(package__arch=request.GET['arch']).distinct() + if 'arch_id' in request.GET: + packages = packages.filter(package__arch=request.GET['arch_id']).distinct() if 'packagetype' in request.GET: packages = packages.filter(package__packagetype=request.GET['packagetype']).distinct() @@ -141,7 +141,7 @@ def package_name_list(request): filter_list = [] filter_list.append(Filter(request, 'Package Type', 'packagetype', Package.PACKAGE_TYPES)) - filter_list.append(Filter(request, 'Architecture', 'arch', PackageArchitecture.objects.all())) + filter_list.append(Filter(request, 'Architecture', 'arch_id', PackageArchitecture.objects.all())) filter_bar = FilterBar(request, filter_list) return render(request, From 89b3f1aeedc74ff09d770ed01a3882c2dd4d04cd Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 9 Mar 2025 22:26:16 -0400 Subject: [PATCH 163/199] add distro-specific references to errata --- errata/sources/distros/rocky.py | 2 ++ errata/sources/repos/yum.py | 46 +++++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+) diff --git a/errata/sources/distros/rocky.py b/errata/sources/distros/rocky.py index 0a4ce41d..5579b195 100644 --- a/errata/sources/distros/rocky.py +++ b/errata/sources/distros/rocky.py @@ -198,6 +198,8 @@ def process_rocky_erratum(advisory): def add_rocky_erratum_references(e, advisory): """ Add Rocky Linux errata references """ + e.add_reference('Rocky Advisory', 'https://apollo.build.resf.org/{e.name}') + e.add_reference('Rocky Advisory', 'https://errata.rockylinux.org/{e.name}') advisory_cves = advisory.get('cves') for a_cve in advisory_cves: cve_id = a_cve.get('cve') diff --git a/errata/sources/repos/yum.py b/errata/sources/repos/yum.py index 6d98dcb4..95d564ae 100644 --- a/errata/sources/repos/yum.py +++ b/errata/sources/repos/yum.py @@ -77,9 +77,55 @@ def process_updateinfo_erratum(update): update.clear() +def add_distro_references(e): + """ Adds distro-specific references to an Erratum + """ + urls = [] + name = e.name + e_type = e.erratum_type + if name.startswith('ALAS'): + ref_type = 'Amazon Advisory' + if name[4] == '-': + update_path = '' + elif name[4:6] == '2-': + update_path = 'AL2/' + name = name.replace('ALAS2', 'ALAS') + elif name[4:8] == '2023': + update_path = 'AL2023/' + name = name.replace('ALAS2023', 'ALAS') + urls.append(f'https://alas.aws.amazon.com/{update_path}{name}.html') + elif name.startswith('openSUSE-SLE'): + ref_type = 'SUSE Advisory' + update_type = e_type[0].upper() + 'U' + year = name.split('-')[-2] + number = name.split('-')[-1].zfill(4) + identifier = f'{year}:{number}' + prefix = f'SUSE-{update_type}' + name = f'{prefix}-{identifier}-1' + url_root = 'https://www.suse.com/support/update/announcement/' + url_path = f'{year}/{prefix}-{year}{number}-' + for i in range(1, 10): + url = f'{url_root}{url_path}{i}' + res = get_url(url) + if res.status_code != 200: + break + urls.append(f'{url_root}{url_path}{i}') + elif name.startswith('EL'): + ref_type = 'Oracle Advisory' + urls.append(f'https://linux.oracle.com/errata/{name}.html') + elif name.startswith('RL'): + ref_type = 'Rocky Advisory' + urls.append(f'https://errata.rockylinux.org/{name}') + urls.append(f'https://apollo.build.resf.org/{name}') + if urls: + for url in urls: + e.add_reference(ref_type, url) + + def add_updateinfo_erratum_references(e, update): """ Adds references to an Erratum """ + add_distro_references(e) references = update.find('references') for reference in references.findall('reference'): if reference.attrib.get('type') == 'cve': From 084840e504e7263553e3b0b489a4c69ef83e9c95 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 9 Mar 2025 22:39:05 -0400 Subject: [PATCH 164/199] don't use acronym, keep flake8 happy --- errata/sources/distros/centos.py | 4 ++-- errata/sources/repos/yum.py | 6 +++--- repos/repo_types/gentoo.py | 10 +++++----- repos/repo_types/yum.py | 10 +++++----- repos/utils.py | 7 +++---- 5 files changed, 18 insertions(+), 19 deletions(-) diff --git a/errata/sources/distros/centos.py b/errata/sources/distros/centos.py index cbfd0a2b..b8e7e561 100644 --- a/errata/sources/distros/centos.py +++ b/errata/sources/distros/centos.py @@ -15,7 +15,7 @@ # along with Patchman. If not, see import re -from defusedxml import ElementTree as ET +from defusedxml import ElementTree from operatingsystems.utils import get_or_create_osrelease from packages.models import Package @@ -65,7 +65,7 @@ def parse_centos_errata_checksum(data): def parse_centos_errata(data): """ Parse CentOS errata from https://cefs.steve-meier.de/ """ - result = ET.XML(data) + result = ElementTree.XML(data) errata_xml = result.findall('*') elen = len(errata_xml) pbar_start.send(sender=None, ptext=f'Processing {elen} CentOS Errata', plen=elen) diff --git a/errata/sources/repos/yum.py b/errata/sources/repos/yum.py index 95d564ae..2f672832 100644 --- a/errata/sources/repos/yum.py +++ b/errata/sources/repos/yum.py @@ -16,7 +16,7 @@ import concurrent.futures from io import BytesIO -import defusedxml.ElementTree as ET +from defusedxml import ElementTree from operatingsystems.utils import get_or_create_osrelease from packages.models import Package @@ -30,11 +30,11 @@ def extract_updateinfo(data, url, concurrent_processing=True): """ extracted = extract(data, url) try: - tree = ET.parse(BytesIO(extracted)) + tree = ElementTree.parse(BytesIO(extracted)) root = tree.getroot() elen = root.__len__() updates = root.findall('update') - except ET.ParseError as e: + except ElementTree.ParseError as e: error_message.send(sender=None, text=f'Error parsing updateinfo file from {url} : {e}') if concurrent_processing: extract_updateinfo_concurrently(updates, elen) diff --git a/repos/repo_types/gentoo.py b/repos/repo_types/gentoo.py index fb27ff74..21a69fbe 100644 --- a/repos/repo_types/gentoo.py +++ b/repos/repo_types/gentoo.py @@ -20,7 +20,7 @@ import shutil import tarfile import tempfile -from defusedxml import ElementTree as ET +from defusedxml import ElementTree from fnmatch import fnmatch from io import BytesIO @@ -94,7 +94,7 @@ def get_gentoo_overlay_mirrors(repo_name): return mirrors = [] try: - tree = ET.parse(BytesIO(res.content)) + tree = ElementTree.parse(BytesIO(res.content)) root = tree.getroot() for child in root: if child.tag == 'repo': @@ -105,7 +105,7 @@ def get_gentoo_overlay_mirrors(repo_name): if found and element.tag == 'source': if element.text.startswith('http'): mirrors.append(element.text) - except ET.ParseError as e: + except ElementTree.ParseError as e: error_message.send(sender=None, text=f'Error parsing {gentoo_overlays_url}: {e}') return mirrors @@ -119,7 +119,7 @@ def get_gentoo_mirror_urls(): return mirrors = {} try: - tree = ET.parse(BytesIO(res.content)) + tree = ElementTree.parse(BytesIO(res.content)) root = tree.getroot() for child in root: if child.tag == 'mirrorgroup': @@ -139,7 +139,7 @@ def get_gentoo_mirror_urls(): elif element.tag == 'uri': if element.get('protocol') == 'http': mirrors[name]['urls'].append(element.text) - except ET.ParseError as e: + except ElementTree.ParseError as e: error_message.send(sender=None, text=f'Error parsing {gentoo_distfiles_url}: {e}') mirror_urls = [] # for now, ignore region data and choose MAX_MIRRORS mirrors at random diff --git a/repos/repo_types/yum.py b/repos/repo_types/yum.py index e8a5791f..d08c7393 100644 --- a/repos/repo_types/yum.py +++ b/repos/repo_types/yum.py @@ -16,7 +16,7 @@ import re import yaml -from defusedxml import ElementTree as ET +from defusedxml import ElementTree from io import BytesIO from errata.sources.repos.yum import extract_updateinfo @@ -39,7 +39,7 @@ def get_repomd_url(mirror_url, data, url_type='primary'): extracted = extract(data, mirror_url) location = None try: - tree = ET.parse(BytesIO(extracted)) + tree = ElementTree.parse(BytesIO(extracted)) root = tree.getroot() for child in root: if child.attrib.get('type') == url_type: @@ -49,7 +49,7 @@ def get_repomd_url(mirror_url, data, url_type='primary'): if grandchild.tag == f'{{{ns}}}checksum': checksum = grandchild.text checksum_type = grandchild.attrib.get('type') - except ET.ParseError as e: + except ElementTree.ParseError as e: error_message.send(sender=None, text=(f'Error parsing repomd from {mirror_url}: {e}')) if not location: return None, None, None @@ -110,7 +110,7 @@ def extract_yum_packages(data, url): ns = 'http://linux.duke.edu/metadata/common' packages = set() try: - context = ET.iterparse(BytesIO(extracted), events=('start', 'end')) + context = ElementTree.iterparse(BytesIO(extracted), events=('start', 'end')) for event, elem in context: if event == 'start': if elem.tag == f'{{{ns}}}metadata': @@ -152,7 +152,7 @@ def extract_yum_packages(data, url): text = f'Error parsing Package: {name} {epoch} {version} {release} {arch}' error_message.send(sender=None, text=text) elem.clear() - except ET.ParseError as e: + except ElementTree.ParseError as e: error_message.send(sender=None, text=f'Error parsing yum primary.xml from {url}: {e}') return packages diff --git a/repos/utils.py b/repos/utils.py index 003b612e..ea48f94f 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -17,8 +17,7 @@ import re from io import BytesIO - -from defusedxml import ElementTree as ET +from defusedxml import ElementTree from tenacity import RetryError from django.db import IntegrityError @@ -121,7 +120,7 @@ def get_metalink_urls(url): extracted = extract(data, url) ns = 'http://www.metalinker.org/' try: - tree = ET.parse(BytesIO(extracted)) + tree = ElementTree.parse(BytesIO(extracted)) root = tree.getroot() for child in root: if child.tag == f'{{{ns}}}files': @@ -133,7 +132,7 @@ def get_metalink_urls(url): if greatgreatgrandchild.tag == f'{{{ns}}}url': if greatgreatgrandchild.attrib.get('protocol') in ['https', 'http']: metalink_urls.append(greatgreatgrandchild.text) - except ET.ParseError as e: + except ElementTree.ParseError as e: error_message.send(sender=None, text=f'Error parsing metalink {url}: {e}') return metalink_urls From 3eb2d94a61eb612251f26ae0461a7368b97616b0 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 11 Mar 2025 00:20:52 -0400 Subject: [PATCH 165/199] update gentoo repo handling --- client/patchman-client | 56 +++++++++---- reports/utils.py | 20 +---- repos/repo_types/gentoo.py | 162 ++++++++++++++++++++----------------- repos/utils.py | 2 +- 4 files changed, 136 insertions(+), 104 deletions(-) diff --git a/client/patchman-client b/client/patchman-client index 1addc6ae..bf4bc5c9 100755 --- a/client/patchman-client +++ b/client/patchman-client @@ -574,24 +574,52 @@ get_repos() { fi # Gentoo - if [ "${os}" == "Gentoo" ] ; then + if [[ "${os}" =~ "Gentoo" ]] ; then if [ ${verbose} == 1 ] ; then echo 'Finding portage repos...' fi - declare -A repos - repos[gentoo]='-1000' - repos_conf=$(awk '/\[/{prefix=$0; next} $1{print prefix $0}' /etc/portage/repos.conf/*.conf | grep '^\[') - for stanza in ${repos_conf} ; do - repo=$(echo ${stanza} | cut -d ']' -f 1 | sed -e 's/\[//') - rhs=$(echo ${stanza} | cut -d ']' -f 2 | grep -v '^#') - if [[ ${rhs} =~ "priority" ]] ; then - priority=$(echo ${rhs} | sed -e 's/^ *priority *= *//') - repos[${repo}]+=${priority} - unset priority + declare -A repo_info + repos_output=$(portageq repos_config /) + repo_name="" + priority="" + sync_uri="" + + while IFS= read -r line; do + # if the line starts with a section header (e.g., [gentoo], [guru]), it's the repo name + if [[ "${line}" =~ ^\[(.*)\] ]]; then + # if we already have a repo_name, save the previous entry + if [[ -n "${repo_name}" && -n "${sync_uri}" ]]; then + repo_info["${repo_name}"]="${priority},${sync_uri}" + fi + # else start new repo parsing, resetting vars + repo_name="${BASH_REMATCH[1]}" + priority="" + sync_uri="" fi - done - for r in "${!repos[@]}"; do - echo "'gentoo' 'Gentoo Linux ${r} ${host_arch}' '${r}' '${repos[${r}]}'" >> "${tmpfile_rep}" + + # if the line contains "priority", extract the value, 0 if it doesnt exist + if [[ "${line}" =~ "priority" ]]; then + priority=$(echo "${line}" | cut -d'=' -f2 | xargs) + fi + + # if the line contains "sync-uri", extract the value + if [[ "${line}" =~ "sync-uri" ]]; then + sync_uri=$(echo "${line}" | cut -d'=' -f2 | xargs) + fi + done <<< "${repos_output}" + + # save the last repository entry if it's available + if [[ -n "${repo_name}" && -n "${sync_uri}" ]]; then + repo_info["${repo_name}"]="${priority},${sync_uri}" + fi + + for repo in "${!repo_info[@]}"; do + priority=$(echo ${repo_info[$repo]} | cut -d',' -f1) + sync_uri=$(echo ${repo_info[$repo]} | cut -d',' -f2) + if [ "${priority}" == "" ] ; then + priority=0 + fi + echo "'gentoo' 'Gentoo Linux ${repo} Repo ${host_arch}' '${repo}' '${priority}' '${sync_uri}'" >> "${tmpfile_rep}" done fi diff --git a/reports/utils.py b/reports/utils.py index 54b2fa60..641f90df 100644 --- a/reports/utils.py +++ b/reports/utils.py @@ -34,10 +34,6 @@ def process_repos(report, host): """ Processes the quoted repos string sent with a report """ - if host.osvariant.name.startswith('Gentoo'): - gentoo_repo = Repository.objects.get(repo_id='gentoo') - host_repos = HostRepo.objects.filter(host=host) - hostrepo, c = host_repos.get_or_create(host=host, repo=gentoo_repo) if report.repos: repo_ids = [] host_repos = HostRepo.objects.filter(host=host) @@ -224,6 +220,7 @@ def process_repo(repo, arch): r_type = Repository.GENTOO r_id = repo.pop(2) r_priority = repo[2] + arch = 'any' if repo[1]: r_name = repo[1] @@ -232,6 +229,8 @@ def process_repo(repo, arch): unknown = [] for r_url in repo[3:]: + if r_type == Repository.GENTOO and r_url.startswith('rsync'): + r_url = 'https://api.gentoo.org/mirrors/distfiles.xml' try: mirror = Mirror.objects.get(url=r_url.strip('/')) except Mirror.DoesNotExist: @@ -360,19 +359,6 @@ def process_gentoo_package(package, name, category, repo): package.category = category package.save() - repo_arch, created = MachineArchitecture.objects.get_or_create(name='any') - repo_name = 'Gentoo Linux' - gentoo_repo = get_or_create_repo(repo_name, repo_arch, Repository.GENTOO, repo) - - if repo == 'gentoo': - url = 'https://api.gentoo.org/mirrors/distfiles.xml' - else: - # this may not be correct. the urls are hardcoded anyway in repos/utils.py - # need to figure out a better way to determine which repo/repo url to use - url = 'https://api.gentoo.org/overlays/repositories.xml' - mirror, c = Mirror.objects.get_or_create(repo=gentoo_repo, url=url, mirrorlist=True) - MirrorPackage.objects.create(mirror=mirror, package=package) - def get_arch(arch): """ Get or create MachineArchitecture from arch diff --git a/repos/repo_types/gentoo.py b/repos/repo_types/gentoo.py index 21a69fbe..94df139a 100644 --- a/repos/repo_types/gentoo.py +++ b/repos/repo_types/gentoo.py @@ -16,15 +16,14 @@ import git import os -import re import shutil import tarfile import tempfile from defusedxml import ElementTree from fnmatch import fnmatch from io import BytesIO +from pathlib import Path -from arch.models import PackageArchitecture from packages.models import PackageString from packages.utils import find_evr from patchman.signals import info_message, warning_message, error_message, pbar_start, pbar_update @@ -37,6 +36,55 @@ def refresh_gentoo_main_repo(repo): """ mirrors = get_gentoo_mirror_urls() add_mirrors_from_urls(repo, mirrors) + ts = get_datetime_now() + for mirror in repo.mirror_set.filter(mirrorlist=False, refresh=True, enabled=True): + if mirror.url == 'https://api.gentoo.org/mirrors/distfiles.xml': + mirror.mirrorlist = True + mirror.save() + continue + + res = get_url(mirror.url + '.md5sum') + data = fetch_content(res, 'Fetching Repo checksum') + if data is None: + mirror.fail() + continue + + checksum = data.decode().split()[0] + if checksum is None: + mirror.fail() + continue + + if mirror.packages_checksum == checksum: + text = 'Mirror checksum has not changed, not refreshing Package metadata' + warning_message.send(sender=None, text=text) + continue + + res = get_url(mirror.url) + mirror.last_access_ok = response_is_valid(res) + if not mirror.last_access_ok: + mirror.fail() + continue + + data = fetch_content(res, 'Fetching Repo data') + if data is None: + mirror.fail() + continue + extracted = extract(data, mirror.url) + info_message.send(sender=None, text=f'Found Gentoo Repo - {mirror.url}') + + computed_checksum = get_checksum(data, Checksum.md5) + if not mirror_checksum_is_valid(computed_checksum, checksum, mirror, 'package'): + mirror.fail() + continue + else: + mirror.packages_checksum = checksum + + packages = extract_gentoo_packages(mirror, extracted) + if packages: + update_mirror_packages(mirror, packages) + + mirror.timestamp = ts + mirror.save() def refresh_gentoo_overlay_repo(repo): @@ -44,6 +92,14 @@ def refresh_gentoo_overlay_repo(repo): """ mirrors = get_gentoo_overlay_mirrors(repo.repo_id) add_mirrors_from_urls(repo, mirrors) + ts = get_datetime_now() + for mirror in repo.mirror_set.filter(mirrorlist=False, refresh=True, enabled=True): + # FIXME: need to check for failure + packages = extract_gentoo_overlay_packages(mirror) + if packages: + update_mirror_packages(mirror, packages) + mirror.timestamp = ts + mirror.save() def get_gentoo_ebuild_keywords(content): @@ -82,7 +138,10 @@ def get_gentoo_ebuild_keywords(content): continue keywords.add(keyword) break - return keywords + if keywords: + return keywords + else: + return default_keywords def get_gentoo_overlay_mirrors(repo_name): @@ -157,7 +216,25 @@ def extract_gentoo_ebuilds(data): for member in tar.getmembers(): if member.isfile() and member.name.endswith('ebuild') and not member.name.endswith('skel.ebuild'): file_content = tar.extractfile(member).read() - extracted_ebuilds[member.name] = file_content + full_path = Path(member.name) + ebuild_path = Path(*full_path.parts[1:]) + extracted_ebuilds[str(ebuild_path)] = file_content + return extracted_ebuilds + + +def extract_gentoo_overlay_ebuilds(t): + """ Extract ebuilds from a Gentoo overlay tarball + """ + extracted_ebuilds = {} + for root, dirs, files in os.walk(t): + for name in files: + if fnmatch(name, '*.ebuild'): + package_name = root.replace(t + '/', '') + if len(package_name.split('/')) > 2: + continue + with open(os.path.join(root, name), 'rb') as f: + content = f.read() + extracted_ebuilds[f'{package_name}/{name}'] = content return extracted_ebuilds @@ -175,14 +252,14 @@ def extract_gentoo_packages_from_ebuilds(extracted_ebuilds): return packages = set() - flen = len(extracted_ebuilds) - pbar_start.send(sender=None, ptext=f'Processing {flen} ebuilds', plen=flen) + elen = len(extracted_ebuilds) + pbar_start.send(sender=None, ptext=f'Processing {elen} ebuilds', plen=elen) for i, (path, content) in enumerate(extracted_ebuilds.items()): pbar_update.send(sender=None, index=i + 1) components = path.split(os.sep) - category = components[1] - name = components[2] - evr = components[3].replace(f'{name}-', '').replace('.ebuild', '') + category = components[0] + name = components[1] + evr = components[2].replace(f'{name}-', '').replace('.ebuild', '') epoch, version, release = find_evr(evr) arches = get_gentoo_ebuild_keywords(content) for arch in arches: @@ -205,29 +282,12 @@ def extract_gentoo_overlay_packages(mirror): """ Extract packages from gentoo overlay repo """ t = tempfile.mkdtemp() - git.Repo.clone_from(mirror.url, t, branch='master', depth=1) + info_message.send(sender=None, text=f'Extracting Gentoo packages from {mirror.url}') + git.Repo.clone_from(mirror.url, t, depth=1) packages = set() - arch, c = PackageArchitecture.objects.get_or_create(name='any') - for root, dirs, files in os.walk(t): - for name in files: - if fnmatch(name, '*.ebuild'): - full_name = root.replace(t + '/', '') - p_category, p_name = full_name.split('/') - m = re.match(fr'{p_name}-(.*)\.ebuild', name) - if m: - p_evr = m.group(1) - epoch, version, release = find_evr(p_evr) - package = PackageString( - name=p_name.lower(), - epoch=epoch, - version=version, - release=release, - arch=arch, - packagetype='G', - category=p_category, - ) - packages.add(package) + extracted_ebuilds = extract_gentoo_overlay_ebuilds(t) shutil.rmtree(t) + packages = extract_gentoo_packages_from_ebuilds(extracted_ebuilds) return packages @@ -235,48 +295,6 @@ def refresh_gentoo_repo(repo): """ Refresh a Gentoo repo """ if repo.repo_id == 'gentoo': - repo_type = 'main' refresh_gentoo_main_repo(repo) else: refresh_gentoo_overlay_repo(repo) - repo_type = 'overlay' - ts = get_datetime_now() - for mirror in repo.mirror_set.filter(mirrorlist=False, refresh=True, enabled=True): - res = get_url(mirror.url + '.md5sum') - data = fetch_content(res, 'Fetching Repo checksum') - if data is None: - mirror.fail() - continue - checksum = data.decode().split()[0] - if checksum is None: - mirror.fail() - continue - if mirror.packages_checksum == checksum: - text = 'Mirror checksum has not changed, not refreshing Package metadata' - warning_message.send(sender=None, text=text) - continue - res = get_url(mirror.url) - mirror.last_access_ok = response_is_valid(res) - if mirror.last_access_ok: - data = fetch_content(res, 'Fetching Repo data') - if data is None: - mirror.fail() - continue - extracted = extract(data, mirror.url) - text = f'Found Gentoo Repo - {mirror.url}' - info_message.send(sender=None, text=text) - computed_checksum = get_checksum(data, Checksum.md5) - if not mirror_checksum_is_valid(computed_checksum, checksum, mirror, 'package'): - continue - else: - mirror.packages_checksum = checksum - if repo_type == 'main': - packages = extract_gentoo_packages(mirror, extracted) - elif repo_type == 'overlay': - packages = extract_gentoo_overlay_packages(mirror) - mirror.timestamp = ts - if packages: - update_mirror_packages(mirror, packages) - else: - mirror.fail() - mirror.save() diff --git a/repos/utils.py b/repos/utils.py index ea48f94f..49b5d07f 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -177,7 +177,7 @@ def add_mirrors_from_urls(repo, mirror_urls): from repos.models import Mirror # FIXME: maybe we should store the mirrorlist url with full path to repomd.xml? # that is what metalink urls return now - m, c = Mirror.objects.get_or_create(repo=repo, url=mirror_url.rstrip('/').rstrip('repodata/repomd.xml')) + m, c = Mirror.objects.get_or_create(repo=repo, url=mirror_url.rstrip('/').replace('repodata/repomd.xml', '')) if c: text = f'Added Mirror - {mirror_url}' info_message.send(sender=None, text=text) From 6964bedd3784ed2bd3047e7f32829f5dd37de6a8 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 11 Mar 2025 21:11:03 -0400 Subject: [PATCH 166/199] add errata affected and fixed packages --- errata/admin.py | 2 +- ..._rename_packages_erratum_fixed_packages.py | 18 +++ ...0005_erratum_affected_packages_and_more.py | 24 ++++ .../migrations/0006_alter_erratum_options.py | 17 +++ .../0007_alter_erratum_fixed_packages.py | 19 ++++ errata/models.py | 107 +++++++++++++++--- errata/sources/distros/alma.py | 14 ++- errata/sources/distros/arch.py | 58 +++++++++- errata/sources/distros/centos.py | 6 +- errata/sources/distros/debian.py | 12 +- errata/sources/distros/rocky.py | 8 +- errata/sources/distros/ubuntu.py | 27 +++-- errata/sources/repos/yum.py | 2 +- errata/templates/errata/erratum_detail.html | 33 ++++-- errata/templates/errata/erratum_table.html | 14 ++- errata/utils.py | 17 +++ errata/views.py | 8 +- hosts/models.py | 43 ++++--- hosts/templates/hosts/host_detail.html | 3 +- hosts/templates/hosts/host_table.html | 4 +- .../0005_alter_package_packagetype.py | 18 +++ .../templates/packages/package_detail.html | 9 +- .../packages/package_name_detail.html | 11 +- .../templates/packages/package_table.html | 5 +- packages/utils.py | 3 +- packages/views.py | 10 +- sbin/patchman | 3 +- security/templates/security/cve_detail.html | 19 +++- security/views.py | 6 +- 29 files changed, 413 insertions(+), 107 deletions(-) create mode 100644 errata/migrations/0004_rename_packages_erratum_fixed_packages.py create mode 100644 errata/migrations/0005_erratum_affected_packages_and_more.py create mode 100644 errata/migrations/0006_alter_erratum_options.py create mode 100644 errata/migrations/0007_alter_erratum_fixed_packages.py create mode 100644 packages/migrations/0005_alter_package_packagetype.py diff --git a/errata/admin.py b/errata/admin.py index 7bffc909..88190ff6 100644 --- a/errata/admin.py +++ b/errata/admin.py @@ -19,7 +19,7 @@ class ErratumAdmin(admin.ModelAdmin): - readonly_fields = ('packages', 'references') + readonly_fields = ('affected_packages', 'fixed_packages', 'references') admin.site.register(Erratum, ErratumAdmin) diff --git a/errata/migrations/0004_rename_packages_erratum_fixed_packages.py b/errata/migrations/0004_rename_packages_erratum_fixed_packages.py new file mode 100644 index 00000000..770ed814 --- /dev/null +++ b/errata/migrations/0004_rename_packages_erratum_fixed_packages.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.19 on 2025-03-06 04:41 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('errata', '0003_delete_erratumreference_alter_erratum_references'), + ] + + operations = [ + migrations.RenameField( + model_name='erratum', + old_name='packages', + new_name='fixed_packages', + ), + ] diff --git a/errata/migrations/0005_erratum_affected_packages_and_more.py b/errata/migrations/0005_erratum_affected_packages_and_more.py new file mode 100644 index 00000000..f5cc9571 --- /dev/null +++ b/errata/migrations/0005_erratum_affected_packages_and_more.py @@ -0,0 +1,24 @@ +# Generated by Django 4.2.19 on 2025-03-06 05:23 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('packages', '0004_alter_package_options_alter_packagecategory_options_and_more'), + ('errata', '0004_rename_packages_erratum_fixed_packages'), + ] + + operations = [ + migrations.AddField( + model_name='erratum', + name='affected_packages', + field=models.ManyToManyField(blank=True, related_name='affected_by_erratum', to='packages.package'), + ), + migrations.AlterField( + model_name='erratum', + name='fixed_packages', + field=models.ManyToManyField(blank=True, related_name='fixed_by_erratum', to='packages.package'), + ), + ] diff --git a/errata/migrations/0006_alter_erratum_options.py b/errata/migrations/0006_alter_erratum_options.py new file mode 100644 index 00000000..22cad601 --- /dev/null +++ b/errata/migrations/0006_alter_erratum_options.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.19 on 2025-03-07 03:06 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('errata', '0005_erratum_affected_packages_and_more'), + ] + + operations = [ + migrations.AlterModelOptions( + name='erratum', + options={'ordering': ['-issue_date', 'name'], 'verbose_name': 'Erratum', 'verbose_name_plural': 'Errata'}, + ), + ] diff --git a/errata/migrations/0007_alter_erratum_fixed_packages.py b/errata/migrations/0007_alter_erratum_fixed_packages.py new file mode 100644 index 00000000..5cf9ec56 --- /dev/null +++ b/errata/migrations/0007_alter_erratum_fixed_packages.py @@ -0,0 +1,19 @@ +# Generated by Django 4.2.19 on 2025-03-10 23:49 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('packages', '0005_alter_package_packagetype'), + ('errata', '0006_alter_erratum_options'), + ] + + operations = [ + migrations.AlterField( + model_name='erratum', + name='fixed_packages', + field=models.ManyToManyField(blank=True, related_name='provides_fix_in_erratum', to='packages.package'), + ), + ] diff --git a/errata/models.py b/errata/models.py index c8614df7..b10daf4d 100644 --- a/errata/models.py +++ b/errata/models.py @@ -14,15 +14,19 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see +import json + from django.db import models from django.urls import reverse from django.db import IntegrityError from packages.models import Package, PackageUpdate +from packages.utils import find_evr, get_matching_packages from errata.managers import ErratumManager from security.models import CVE, Reference from security.utils import get_or_create_cve, get_or_create_reference from patchman.signals import error_message +from util import get_url class Erratum(models.Model): @@ -31,7 +35,8 @@ class Erratum(models.Model): e_type = models.CharField(max_length=255) issue_date = models.DateTimeField() synopsis = models.CharField(max_length=255) - packages = models.ManyToManyField(Package, blank=True) + affected_packages = models.ManyToManyField(Package, blank=True, related_name='affected_by_erratum') + fixed_packages = models.ManyToManyField(Package, blank=True, related_name='provides_fix_in_erratum') from operatingsystems.models import OSRelease osreleases = models.ManyToManyField(OSRelease, blank=True) cves = models.ManyToManyField(CVE, blank=True) @@ -46,7 +51,8 @@ class Meta: def __str__(self): text = f'{self.name} ({self.e_type}), {self.cves.count()} related CVEs, ' - text += f'affecting {self.packages.count()} packages and {self.osreleases.count()} OS Releases' + text += f'affecting {self.osreleases.count()} OS Releases, ' + text += f'providing {self.fixed_packages.count()} fixed Packages' return text def get_absolute_url(self): @@ -54,25 +60,96 @@ def get_absolute_url(self): def scan_for_security_updates(self): if self.e_type == 'security': - for package in self.packages.all(): + for package in self.fixed_packages.all(): affected_updates = PackageUpdate.objects.filter( newpackage=package, security=False, ) for affected_update in affected_updates: - if not affected_update.security: - affected_update.security = True - try: - affected_update.save() - except IntegrityError as e: - error_message.send(sender=None, text=e) - # a version of this update already exists that is - # marked as a security update, so delete this one - affected_update.delete() - - def add_packages(self, packages): + affected_update.security = True + try: + affected_update.save() + except IntegrityError as e: + error_message.send(sender=None, text=e) + # a version of this update already exists that is + # marked as a security update, so delete this one + affected_update.delete() + for package in self.affected_packages.all(): + affected_updates = PackageUpdate.objects.filter( + oldpackage=package, + security=False, + ) + for affected_update in affected_updates: + affected_update.security = True + try: + affected_update.save() + except IntegrityError as e: + error_message.send(sender=None, text=e) + # a version of this update already exists that is + # marked as a security update, so delete this one + affected_update.delete() + + def fetch_osv_dev_data(self): + osv_dev_url = f'https://api.osv.dev/v1/vulns/{self.name}' + res = get_url(osv_dev_url) + if res.status_code == 404: + error_message.send(sender=None, text=f'404 - Skipping {self.name} - {osv_dev_url}') + return + data = res.content + osv_dev_json = json.loads(data) + self.parse_osv_dev_data(osv_dev_json) + + def parse_osv_dev_data(self, osv_dev_json): + name = osv_dev_json.get('id') + if name != self.name: + error_message.send(sender=None, text=f'Erratum name mismatch - {self.name} != {name}') + return + related = osv_dev_json.get('related') + if related: + for vuln in related: + if vuln.startswith('CVE'): + self.add_cve(vuln) + affected = osv_dev_json.get('affected') + if not affected: + return + affected_packages = set() + for package in affected: + fixed_packages = set() + ranges = package.get('ranges') + for affected_range in ranges: + for event in affected_range.get('events'): + fixed_version = event.get('fixed') + if fixed_version: + epoch, ver, rel = find_evr(fixed_version) + matching_packages = self.fixed_packages.filter(epoch=epoch, version=ver, release=rel).all() + for match in matching_packages: + fixed_packages.add(match) + affected_versions = package.get('versions') + if not affected_versions: + continue + for package in fixed_packages: + for version in affected_versions: + epoch, ver, rel = find_evr(version) + matching_packages = get_matching_packages( + name=package.name, + epoch=epoch, + version=ver, + release=rel, + arch=package.arch, + p_type=package.packagetype, + ) + for match in matching_packages: + affected_packages.add(match) + self.add_affected_packages(affected_packages) + + def add_fixed_packages(self, packages): + for package in packages: + self.fixed_packages.add(package) + self.save() + + def add_affected_packages(self, packages): for package in packages: - self.packages.add(package) + self.affected_packages.add(package) def add_cve(self, cve_id): """ Add a CVE to an Erratum object diff --git a/errata/sources/distros/alma.py b/errata/sources/distros/alma.py index eaed150c..7094b57b 100644 --- a/errata/sources/distros/alma.py +++ b/errata/sources/distros/alma.py @@ -130,20 +130,23 @@ def add_alma_erratum_references(e, advisory): def add_alma_erratum_packages(e, advisory): """ Parse and add packages for Alma Linux errata """ + fixed_packages = set() packages = advisory.get('packages') for package in packages: package_name = package.get('filename') if package_name: name, epoch, ver, rel, dist, arch = parse_package_string(package_name) p_type = Package.RPM - pkg = get_or_create_package(name, epoch, ver, rel, arch, p_type) - e.packages.add(pkg) + fixed_package = get_or_create_package(name, epoch, ver, rel, arch, p_type) + fixed_packages.add(fixed_package) + e.add_fixed_packages(fixed_packages) def add_alma_erratum_modules(e, advisory): """ Parse and add modules for Alma Linux errata """ from modules.utils import get_matching_modules + fixed_packages = set() modules = advisory.get('modules') for module in modules: name = module.get('name') @@ -153,6 +156,7 @@ def add_alma_erratum_modules(e, advisory): version = module.get('version') matching_modules = get_matching_modules(name, stream, version, context, arch) for match in matching_modules: - for package in match.packages.all(): - match.packages.add(package) - e.packages.add(package) + for fixed_package in match.packages.all(): + match.packages.add(fixed_package) + fixed_packages.add(fixed_package) + e.add_fixed_packages(fixed_packages) diff --git a/errata/sources/distros/arch.py b/errata/sources/distros/arch.py index 4821ca13..c0330dc8 100644 --- a/errata/sources/distros/arch.py +++ b/errata/sources/distros/arch.py @@ -20,7 +20,7 @@ from operatingsystems.utils import get_or_create_osrelease from patchman.signals import error_message, pbar_start, pbar_update from packages.models import Package -from packages.utils import find_evr, get_matching_packages +from packages.utils import find_evr, get_matching_packages, get_or_create_package from util import get_url, fetch_content @@ -149,17 +149,65 @@ def add_arch_erratum_packages(e, advisory): data = res.content group = json.loads(data) packages = group.get('packages') + affected = group.get('affected') - epoch, version, release = find_evr(affected) + affected_packages = find_arch_affected_packages(affected, packages) + e.add_affected_packages(affected_packages) + + fixed = group.get('fixed') + fixed_packages = find_arch_fixed_packages(fixed, packages) + e.add_fixed_packages(fixed_packages) + + add_arch_erratum_group_references(e, group) + add_arch_erratum_group_cves(e, group) + + +def find_arch_affected_packages(affected, packages): + """ Find Arch Linux Erratum Affected Packages + This checks existing packages for matches and does not + require an architecture + """ package_type = Package.ARCH + epoch, version, release = find_evr(affected) + affected_packages = set() for package in packages: matching_packages = get_matching_packages(package, epoch, version, release, package_type) - if matching_packages: - for match in matching_packages: - e.packages.add(match) + for match in matching_packages: + affected_packages.add(match) + return affected_packages + + +def find_arch_fixed_packages(fixed, packages): + """ Find Arch Linux Erratum Fixed Packages + This adds new packages with arch x86_64 only + """ + package_type = Package.ARCH + epoch, version, release = find_evr(fixed) + fixed_packages = set() + for package in packages: + fixed_package = get_or_create_package( + name=package, + epoch=epoch, + version=version, + release=release, + arch='x86_64', + p_type=package_type + ) + fixed_packages.add(fixed_package) + return fixed_packages + + +def add_arch_erratum_group_references(e, group): + """ Add Arch Linux Erratum References + """ references = group.get('references') for reference in references: e.add_reference('Link', reference) + + +def add_arch_erratum_group_cves(e, group): + """ Add Arch Linux Erratum CVEs + """ cve_ids = group.get('issues') for cve_id in cve_ids: e.add_cve(cve_id) diff --git a/errata/sources/distros/centos.py b/errata/sources/distros/centos.py index b8e7e561..eefb2b88 100644 --- a/errata/sources/distros/centos.py +++ b/errata/sources/distros/centos.py @@ -115,6 +115,7 @@ def add_centos_erratum_references(e, references): def parse_centos_errata_children(e, children): """ Parse errata children to obtain architecture, release and packages """ + fixed_packages = set() for c in children: if c.tag == 'os_arch': pass @@ -130,8 +131,9 @@ def parse_centos_errata_children(e, children): release = match.group(1) if accepted_centos_release([release]): p_type = Package.RPM - pkg = get_or_create_package(name, epoch, ver, rel, arch, p_type) - e.packages.add(pkg) + fixed_package = get_or_create_package(name, epoch, ver, rel, arch, p_type) + fixed_packages.add(fixed_package) + e.add_fixed_packages(fixed_packages) def get_centos_erratum_releases(releases_xml): diff --git a/errata/sources/distros/debian.py b/errata/sources/distros/debian.py index 16bd906c..5eb0ea91 100644 --- a/errata/sources/distros/debian.py +++ b/errata/sources/distros/debian.py @@ -213,7 +213,7 @@ def process_debian_erratum(erratum, accepted_codenames): osrelease = OSRelease.objects.get(codename=codename) e.osreleases.add(osrelease) for package in packages: - process_debian_erratum_affected_packages(e, package) + process_debian_erratum_fixed_packages(e, package) except Exception as exc: error_message.send(sender=None, text=exc) @@ -297,14 +297,15 @@ def create_debian_os_releases(codename_to_version): get_or_create_osrelease(name=osrelease_name, codename=codename) -def process_debian_erratum_affected_packages(e, package_data): - """ Process packages affected by Debian errata +def process_debian_erratum_fixed_packages(e, package_data): + """ Process packages fixed in a Debian errata """ source_package, source_version = package_data epoch, ver, rel = find_evr(source_version) package_list = get_debian_dsc_package_list(source_package, source_version) if not package_list: return + fixed_packages = set() for line in package_list.splitlines(): if not line: continue @@ -314,8 +315,9 @@ def process_debian_erratum_affected_packages(e, package_data): name = line_parts[0] arches = process_debian_dsc_arches(line_parts[4]) for arch in arches: - package = get_or_create_package(name, epoch, ver, rel, arch, Package.DEB) - e.packages.add(package) + fixed_package = get_or_create_package(name, epoch, ver, rel, arch, Package.DEB) + fixed_packages.add(fixed_package) + e.add_fixed_packages(fixed_packages) def process_debian_dsc_arches(arches): diff --git a/errata/sources/distros/rocky.py b/errata/sources/distros/rocky.py index 5579b195..6231e9e9 100644 --- a/errata/sources/distros/rocky.py +++ b/errata/sources/distros/rocky.py @@ -227,13 +227,14 @@ def add_rocky_erratum_packages(e, advisory): """ from modules.utils import get_matching_modules packages = advisory.get('packages') + fixed_packages = set() for package in packages: package_name = package.get('nevra') if package_name: name, epoch, ver, rel, dist, arch = parse_package_string(package_name) p_type = Package.RPM - pkg = get_or_create_package(name, epoch, ver, rel, arch, p_type) - e.packages.add(pkg) + fixed_package = get_or_create_package(name, epoch, ver, rel, arch, p_type) + fixed_packages.add(fixed_package) module_name = package.get('module_name') module_context = package.get('module_context') module_stream = package.get('module_stream') @@ -247,4 +248,5 @@ def add_rocky_erratum_packages(e, advisory): arch, ) for match in matching_modules: - match.packages.add(pkg) + match.packages.add(fixed_package) + e.add_fixed_packages(fixed_packages) diff --git a/errata/sources/distros/ubuntu.py b/errata/sources/distros/ubuntu.py index 5490909e..8580e12b 100644 --- a/errata/sources/distros/ubuntu.py +++ b/errata/sources/distros/ubuntu.py @@ -23,8 +23,8 @@ from operatingsystems.models import OSRelease, OSVariant from operatingsystems.utils import get_or_create_osrelease -from packages.models import Package, PackageName -from packages.utils import get_or_create_package, parse_package_string, find_evr +from packages.models import Package +from packages.utils import get_or_create_package, parse_package_string, find_evr, get_matching_packages from util import get_url, fetch_content, get_sha256, bunzip2, get_setting_of_type from patchman.signals import error_message, pbar_start, pbar_update @@ -162,8 +162,8 @@ def add_ubuntu_erratum_packages(e, advisory): """ Add Ubuntu erratum packages """ affected_releases = advisory.get('releases') - package_names = PackageName.objects.all() p_type = Package.DEB + fixed_packages = set() for release, packages in affected_releases.items(): if release in get_accepted_ubuntu_codenames(): arches = packages.get('archs') @@ -174,26 +174,25 @@ def add_ubuntu_erratum_packages(e, advisory): package_name = os.path.basename(path) if package_name.endswith('.deb'): name, epoch, ver, rel, dist, arch = parse_package_string(package_name) - pkg = get_or_create_package(name, epoch, ver, rel, arch, p_type) - e.packages.add(pkg) + fixed_package = get_or_create_package(name, epoch, ver, rel, arch, p_type) + fixed_packages.add(fixed_package) else: binaries = packages.get('binaries') allbinaries = packages.get('allbinaries') for package_name, package_data in (binaries | allbinaries).items(): + # we don't know the architecture so this requires the packages to + # exist (e.g. on a host or a mirror) to be captured epoch, ver, rel = find_evr(package_data.get('version')) - try: - p_name = package_names.get(name=package_name) - except PackageName.DoesNotExist: - continue - matching_packages = Package.objects.filter( - name=p_name, + matching_packages = get_matching_packages( + name=package_name, epoch=epoch, version=ver, release=rel, - packagetype=p_type, + p_type=p_type, ) - for package in matching_packages: - e.packages.add(package) + for fixed_package in matching_packages: + fixed_packages.add(fixed_package) + e.add_fixed_packages(fixed_packages) def get_accepted_ubuntu_codenames(): diff --git a/errata/sources/repos/yum.py b/errata/sources/repos/yum.py index 2f672832..a7a7cb5a 100644 --- a/errata/sources/repos/yum.py +++ b/errata/sources/repos/yum.py @@ -222,4 +222,4 @@ def add_updateinfo_packages(e, update): p_type=Package.RPM, ) packages.add(package) - e.add_packages(packages) + e.add_fixed_packages(packages) diff --git a/errata/templates/errata/erratum_detail.html b/errata/templates/errata/erratum_detail.html index 71e5d2e1..af12369b 100644 --- a/errata/templates/errata/erratum_detail.html +++ b/errata/templates/errata/erratum_detail.html @@ -10,7 +10,8 @@
    @@ -21,7 +22,8 @@ Type {{ erratum.e_type }} Published Date{{ erratum.issue_date|date|default_if_none:'' }} Synopsis {{ erratum.synopsis }} - Packages Affected {{ erratum.packages.count }} + Packages Affected {{ erratum.affected_packages.count }} + Packages Fixed {{ erratum.fixed_packages.count }} OS Releases Affected @@ -56,16 +58,25 @@
    -
    +
    -
    -
    - {% for package in erratum.packages.select_related %} - - {{ package }} - - {% endfor %} -
    +
    + {% for package in erratum.affected_packages.all %} + + {{ package }} + + {% endfor %} +
    +
    +
    +
    +
    +
    + {% for package in erratum.fixed_packages.all %} + + {{ package }} + + {% endfor %}
    diff --git a/errata/templates/errata/erratum_table.html b/errata/templates/errata/erratum_table.html index 2ea388b9..c319cbb5 100644 --- a/errata/templates/errata/erratum_table.html +++ b/errata/templates/errata/erratum_table.html @@ -2,11 +2,12 @@ - + - + + @@ -19,10 +20,11 @@ - - - - + + + + + {% endfor %} diff --git a/errata/utils.py b/errata/utils.py index 4c915535..8437312c 100644 --- a/errata/utils.py +++ b/errata/utils.py @@ -16,6 +16,7 @@ from util import tz_aware_datetime from errata.models import Erratum +from packages.models import PackageUpdate from patchman.signals import pbar_start, pbar_update, warning_message @@ -65,3 +66,19 @@ def mark_errata_security_updates(): for i, e in enumerate(Erratum.objects.all()): pbar_update.send(sender=None, index=i + 1) e.scan_for_security_updates() + + +def scan_package_updates_for_affected_packages(): + """ Scan PackageUpdates for packages affected by errata + """ + for pu in PackageUpdate.objects.all(): + for e in pu.newpackage.provides_fix_in_erratum.all(): + e.affected_packages.add(pu.oldpackage) + + +def add_errata_affected_packages(): + elen = Erratum.objects.count() + pbar_start.send(sender=None, ptext=f'Adding affected packages to {elen} Errata', plen=elen) + for i, e in enumerate(Erratum.objects.all()): + pbar_update.send(sender=None, index=i + 1) + e.fetch_osv_dev_data() diff --git a/errata/views.py b/errata/views.py index cc5eabcb..42d12f71 100644 --- a/errata/views.py +++ b/errata/views.py @@ -41,11 +41,17 @@ def erratum_list(request): errata = errata.filter(cves__cve_id=request.GET['cve_id']) if 'package_id' in request.GET: - errata = errata.filter(packages=request.GET['package_id']) + if request.GET['type'] == 'affected': + errata = errata.filter(affected_packages=request.GET['package_id']) + elif request.GET['type'] == 'fixed': + errata = errata.filter(fixed_packages=request.GET['package_id']) if 'osrelease_id' in request.GET: errata = errata.filter(osreleases=request.GET['osrelease_id']) + if 'host' in request.GET: + errata = errata.filter(host__hostname=request.GET['host']) + if 'search' in request.GET: terms = request.GET['search'].lower() query = Q() diff --git a/hosts/models.py b/hosts/models.py index 186d6b91..a6c451b5 100644 --- a/hosts/models.py +++ b/hosts/models.py @@ -28,6 +28,7 @@ from arch.models import MachineArchitecture from domains.models import Domain +from errata.models import Erratum from hosts.utils import update_rdns from modules.models import Module from operatingsystems.models import OSVariant @@ -57,6 +58,7 @@ class Host(models.Model): host_repos_only = models.BooleanField(default=True) tags = TaggableManager(blank=True) updated_at = models.DateTimeField(default=timezone.now) + errata = models.ManyToManyField(Erratum, blank=True) from hosts.managers import HostManager objects = HostManager() @@ -240,19 +242,23 @@ def find_host_repo_updates(self, host_packages, repo_packages): if pu_is_module_package: if not pu_in_enabled_modules: continue - if highest_package.compare_version(pu) == -1 \ - and package.compare_version(pu) == -1: - - if priority is not None: - # proceed only if the package is from a repo with a - # priority and that priority is >= the repo priority - pu_best_repo = find_best_repo(pu, hostrepos) - if pu_best_repo: - pu_priority = pu_best_repo.priority - if pu_priority >= priority: - highest_package = pu - else: - highest_package = pu + if package.compare_version(pu) == -1: + # package updates that are fixed by erratum (may already be superceded by another update) + errata = pu.provides_fix_in_erratum.all() + if errata: + for erratum in errata: + self.errata.add(erratum) + if highest_package.compare_version(pu) == -1: + if priority is not None: + # proceed only if the package is from a repo with a + # priority and that priority is >= the repo priority + pu_best_repo = find_best_repo(pu, hostrepos) + if pu_best_repo: + pu_priority = pu_best_repo.priority + if pu_priority >= priority: + highest_package = pu + else: + highest_package = pu if highest_package != package: uid = self.process_update(package, highest_package) @@ -282,9 +288,14 @@ def find_osrelease_repo_updates(self, host_packages, repo_packages): if pu_is_module_package: if not pu_in_enabled_modules: continue - if highest_package.compare_version(pu) == -1 \ - and package.compare_version(pu) == -1: - highest_package = pu + if package.compare_version(pu) == -1: + # package updates that are fixed by erratum (may already be superceded by another update) + errata = pu.provides_fix_in_erratum.all() + if errata: + for erratum in errata: + self.errata.add(erratum) + if highest_package.compare_version(pu) == -1: + highest_package = pu if highest_package != package: uid = self.process_update(package, highest_package) diff --git a/hosts/templates/hosts/host_detail.html b/hosts/templates/hosts/host_detail.html index ea1267a5..f12bf22b 100644 --- a/hosts/templates/hosts/host_detail.html +++ b/hosts/templates/hosts/host_detail.html @@ -40,9 +40,10 @@ + + - diff --git a/hosts/templates/hosts/host_table.html b/hosts/templates/hosts/host_table.html index da3f760e..bebb7723 100644 --- a/hosts/templates/hosts/host_table.html +++ b/hosts/templates/hosts/host_table.html @@ -4,8 +4,9 @@ + - + @@ -16,6 +17,7 @@ + diff --git a/packages/migrations/0005_alter_package_packagetype.py b/packages/migrations/0005_alter_package_packagetype.py new file mode 100644 index 00000000..06a0ecec --- /dev/null +++ b/packages/migrations/0005_alter_package_packagetype.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.19 on 2025-03-10 17:36 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('packages', '0004_alter_package_options_alter_packagecategory_options_and_more'), + ] + + operations = [ + migrations.AlterField( + model_name='package', + name='packagetype', + field=models.CharField(blank=True, choices=[('R', 'rpm'), ('D', 'deb'), ('A', 'pkgbuild'), ('G', 'ebuild'), ('U', 'unknown')], max_length=1, null=True), + ), + ] diff --git a/packages/templates/packages/package_detail.html b/packages/templates/packages/package_detail.html index 112eacf4..a982c8af 100644 --- a/packages/templates/packages/package_detail.html +++ b/packages/templates/packages/package_detail.html @@ -16,9 +16,9 @@ - - - + + + @@ -26,7 +26,8 @@ - + +
    IDID TypePublished DatePublished Date Synopsis Packages AffectedPackages Fixed OS Releases Affected CVEs References{{ erratum.e_type }} {{ erratum.issue_date|date|default_if_none:'' }} {{ erratum.synopsis }}{{ erratum.packages.count }}{{ erratum.osreleases.count }}{{ erratum.cves.count }}{{ erratum.references.count }}{% with count=erratum.affected_packages.count %}{% if count != 0 %}{{ count }}{% else %} {% endif %}{% endwith %}{% with count=erratum.fixed_packages.count %}{% if count != 0 %}{{ count }}{% else %} {% endif %}{% endwith %}{% with count=erratum.osreleases.count %}{% if count != 0 %}{{ count }}{% else %} {% endif %}{% endwith %}{% with count=erratum.cves.count %}{% if count != 0 %}{{ count }}{% else %} {% endif %}{% endwith %}{% with count=erratum.references.count %}{% if count != 0 %}{{ count }}{% else %} {% endif %}{% endwith %}
    Updated {{ host.updated_at }}
    Last Report {{ host.lastreport }}
    Packages Installed {{ host.packages.count}}
    Updates Available {{ host.updates.count }}
    Errata{{ host.errata.count }}
    Reboot Required {{ host.reboot_required }}
    Packages Installed {{ host.packages.count}}
    Repos In Use{% if host.host_repos_only %}Host Repos{% else %}Host and OS Release Repos{% endif %}
    Last 3 reports
    Hostname UpdatesAffected by Errata Running KernelOS VariantOS Variant Last Report Reboot Status
    {{ host }} {% with count=host.get_num_security_updates %}{% if count != 0 %}{{ count }}{% else %} {% endif %}{% endwith %} {% with count=host.get_num_bugfix_updates %}{% if count != 0 %}{{ count }}{% else %} {% endif %}{% endwith %}{% with count=host.errata.count %}{% if count != 0 %}{{ count }}{% else %} {% endif %}{% endwith %} {{ host.kernel }} {{ host.osvariant }} {{ host.lastreport }}{% report_alert host.lastreport %}Release Arch TypeRepositoriesHostsErrataRepositoriesHostsErrata
    {{ package.epoch }} {{ package.arch }} {{ package.get_packagetype_display }} Available from {{ package.repo_count }} Repositories Installed on {{ package.host_set.count }} Hosts Affected by {{ package.erratum_set.count }} Errata Affected by {{ package.affected_by_erratum.count }} Errata Provides fix in {{ package.provides_fix_in_erratum.count }} Errata
    See All Versions of this Package
    diff --git a/packages/templates/packages/package_name_detail.html b/packages/templates/packages/package_name_detail.html index 7bba3306..5cef00ec 100644 --- a/packages/templates/packages/package_name_detail.html +++ b/packages/templates/packages/package_name_detail.html @@ -12,23 +12,26 @@ {% if allversions %} + - - - + + + {% for version in allversions %} + - + + {% endfor %}
    Package Epoch Version Release Arch TypeRepositoriesHostsErrataRepositoriesHostsErrata
    {{ version }} {{ version.epoch }} {{ version.version }} {{ version.release }} {{ version.arch }} {{ version.get_packagetype_display }} Available from {{ version.repo_count }} Repositories Installed on {{ version.host_set.count }} Hosts Affected by {{ version.erratum_set.count }} Errata Affected by {{ version.affected_by_erratum.count }} Errata Provides fix in {{ version.provides_fix_in_erratum.count }} Errata
    diff --git a/packages/templates/packages/package_table.html b/packages/templates/packages/package_table.html index 9163130b..06316521 100644 --- a/packages/templates/packages/package_table.html +++ b/packages/templates/packages/package_table.html @@ -10,7 +10,7 @@ Type Repositories Hosts - Errata + Errata @@ -22,7 +22,8 @@ {{ package.arch }} {{ package.get_packagetype_display }} Available from {{ package.repo_count }} Repositories Installed on {{ package.host_set.count }} Hosts - Affected by {{ package.erratum_set.count }} Errata + Affected by {{ package.affected_by_erratum.count }} Errata + Provides fix in {{ package.provides_fix_in_erratum.count }} Errata {% endfor %} diff --git a/packages/utils.py b/packages/utils.py index f14884a9..d4399dcc 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -286,7 +286,8 @@ def clean_packages(remove_duplicates=False): packages = Package.objects.filter( mirror__isnull=True, host__isnull=True, - erratum__isnull=True, + affected_by_erratum__isnull=True, + provides_fix_in_erratum__isnull=True, module__isnull=True, ) plen = packages.count() diff --git a/packages/views.py b/packages/views.py index 42a2a345..cd53fa6e 100644 --- a/packages/views.py +++ b/packages/views.py @@ -39,13 +39,19 @@ def package_list(request): packages = packages.filter(packagetype=request.GET['packagetype']).distinct() if 'erratum_id' in request.GET: - packages = packages.filter(erratum=request.GET['erratum_id']).distinct() + if request.GET['type'] == 'affected': + packages = packages.filter(affected_by_erratum=request.GET['erratum_id']).distinct() + elif request.GET['type'] == 'fixed': + packages = packages.filter(provides_fix_in_erratum=request.GET['erratum_id']).distinct() if 'host' in request.GET: packages = packages.filter(host__hostname=request.GET['host']).distinct() if 'cve_id' in request.GET: - packages = packages.filter(erratum__cves__cve_id=request.GET['cve_id']).distinct() + if request.GET['type'] == 'affected': + packages = packages.filter(affected_by_erratum__cves__cve_id=request.GET['cve_id']).distinct() + elif request.GET['type'] == 'fixed': + packages = packages.filter(provides_fix_in_erratum__cves__cve_id=request.GET['cve_id']).distinct() if 'mirror_id' in request.GET: packages = packages.filter(mirror=request.GET['mirror_id']).distinct() diff --git a/sbin/patchman b/sbin/patchman index 36bf7c7e..47a449d0 100755 --- a/sbin/patchman +++ b/sbin/patchman @@ -30,7 +30,7 @@ from django.conf import settings # noqa django_setup() from arch.utils import clean_architectures -from errata.utils import mark_errata_security_updates +from errata.utils import mark_errata_security_updates, add_errata_affected_packages from errata.tasks import update_errata from hosts.models import Host from modules.utils import clean_modules @@ -523,6 +523,7 @@ def process_args(args): if args.update_errata: update_errata(args.erratum_type, args.force, args.repo) mark_errata_security_updates() + add_errata_affected_packages() showhelp = False if args.update_cves: update_cves(args.cve, args.fetch_nist_data) diff --git a/security/templates/security/cve_detail.html b/security/templates/security/cve_detail.html index 28ef831c..2fe6f8e4 100644 --- a/security/templates/security/cve_detail.html +++ b/security/templates/security/cve_detail.html @@ -12,7 +12,8 @@
    @@ -46,7 +47,8 @@ {% endfor %} - Affected Packages{{ packages|length }} + Affected Packages{{ affected_packages|length }} + Fixed Packages{{ fixed_packages|length }} Errata{{ cve.erratum_set.count }} OSes Affected @@ -79,9 +81,18 @@
    -
    +
    - {% for package in packages %} + {% for package in affected_packages %} + + {{ package }} + + {% endfor %} +
    +
    +
    +
    + {% for package in fixed_packages %} {{ package }} diff --git a/security/views.py b/security/views.py index 8a885e76..58a686b5 100644 --- a/security/views.py +++ b/security/views.py @@ -113,13 +113,15 @@ def cve_list(request): @login_required def cve_detail(request, cve_id): cve = get_object_or_404(CVE, cve_id=cve_id) - packages = Package.objects.filter(erratum__in=cve.erratum_set.all()).distinct() + affected_packages = Package.objects.filter(affected_by_erratum__in=cve.erratum_set.all()).distinct() + fixed_packages = Package.objects.filter(provides_fix_in_erratum__in=cve.erratum_set.all()).distinct() osreleases = OSRelease.objects.filter(erratum__in=cve.erratum_set.all()).distinct() references = Reference.objects.filter(Q(erratum__in=cve.erratum_set.all()) | Q(cve=cve)).distinct() return render(request, 'security/cve_detail.html', {'cve': cve, - 'packages': packages, + 'affected_packages': affected_packages, + 'fixed_packages': fixed_packages, 'osreleases': osreleases, 'references': references, }) From 1e7790a45be0bcebddfd02ffef578c4c003c76cb Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 11 Mar 2025 21:13:50 -0400 Subject: [PATCH 167/199] tweak yum reference handling --- errata/sources/distros/alma.py | 2 +- errata/sources/repos/yum.py | 27 +++++++++++++++------------ security/utils.py | 17 ++++++++++++----- 3 files changed, 28 insertions(+), 18 deletions(-) diff --git a/errata/sources/distros/alma.py b/errata/sources/distros/alma.py index 7094b57b..193127e4 100644 --- a/errata/sources/distros/alma.py +++ b/errata/sources/distros/alma.py @@ -123,7 +123,7 @@ def add_alma_erratum_references(e, advisory): e.add_cve(ref_id) continue if ref_type == 'self': - ref_type = ref_id.split('-')[0].upper() + ref_type = 'Alma Advisory' e.add_reference(ref_type, er_url) diff --git a/errata/sources/repos/yum.py b/errata/sources/repos/yum.py index a7a7cb5a..242e6d1a 100644 --- a/errata/sources/repos/yum.py +++ b/errata/sources/repos/yum.py @@ -22,6 +22,7 @@ from packages.models import Package from packages.utils import get_or_create_package from patchman.signals import pbar_start, pbar_update, error_message +from security.models import Reference from util import extract, get_url @@ -68,21 +69,21 @@ def process_updateinfo_erratum(update): """ from errata.utils import get_or_create_erratum e_type = update.attrib.get('type') - name = update.find('id').text + e_name = update.find('id').text + name, ref_type, urls = get_distro_data(e_name, e_type) synopsis = update.find('title').text issue_date = update.find('issued').attrib.get('date') e, created = get_or_create_erratum(name, e_type, issue_date, synopsis) - add_updateinfo_erratum_references(e, update) + add_updateinfo_erratum_references(e, update, ref_type, urls) add_updateinfo_packages(e, update) update.clear() -def add_distro_references(e): - """ Adds distro-specific references to an Erratum +def get_distro_data(name, e_type): + """ Adds distro-specific names and references to an Erratum """ urls = [] - name = e.name - e_type = e.erratum_type + ref_type = 'Link' if name.startswith('ALAS'): ref_type = 'Amazon Advisory' if name[4] == '-': @@ -94,7 +95,7 @@ def add_distro_references(e): update_path = 'AL2023/' name = name.replace('ALAS2023', 'ALAS') urls.append(f'https://alas.aws.amazon.com/{update_path}{name}.html') - elif name.startswith('openSUSE-SLE'): + elif name.startswith('openSUSE-SLE') or name.startswith('openSUSE'): ref_type = 'SUSE Advisory' update_type = e_type[0].upper() + 'U' year = name.split('-')[-2] @@ -106,6 +107,8 @@ def add_distro_references(e): url_path = f'{year}/{prefix}-{year}{number}-' for i in range(1, 10): url = f'{url_root}{url_path}{i}' + if Reference.objects.filter(url=url).exists(): + continue res = get_url(url) if res.status_code != 200: break @@ -117,15 +120,15 @@ def add_distro_references(e): ref_type = 'Rocky Advisory' urls.append(f'https://errata.rockylinux.org/{name}') urls.append(f'https://apollo.build.resf.org/{name}') - if urls: - for url in urls: - e.add_reference(ref_type, url) + return name, ref_type, urls -def add_updateinfo_erratum_references(e, update): +def add_updateinfo_erratum_references(e, update, ref_type, urls): """ Adds references to an Erratum """ - add_distro_references(e) + if urls: + for url in urls: + e.add_reference(ref_type, url) references = update.find('references') for reference in references.findall('reference'): if reference.attrib.get('type') == 'cve': diff --git a/security/utils.py b/security/utils.py index 37e2311f..343442a7 100644 --- a/security/utils.py +++ b/security/utils.py @@ -136,13 +136,20 @@ def fixup_reference(ref): return ref -def get_or_create_reference(ref_type, url): +def get_or_create_reference(ref_type, url, update_ref_type=False): """ Get or create a Reference object. """ reference = fixup_reference({'ref_type': ref_type, 'url': url}) if reference: - ref, created = Reference.objects.get_or_create( - ref_type=reference.get('ref_type'), - url=reference.get('url'), - ) + refs = Reference.objects.filter(url=reference.get('url')) + if refs: + ref = refs.first() + if ref.url != reference.get('url') and update_ref_type: + ref.ref_type = ref_type + ref.save() + else: + ref, created = Reference.objects.get_or_create( + ref_type=reference.get('ref_type'), + url=reference.get('url'), + ) return ref From f6d75679567629e4bbbf9c816a0cca52bbb65946 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 11 Mar 2025 21:14:14 -0400 Subject: [PATCH 168/199] add osv.dev reference to errata detail page --- errata/templates/errata/erratum_detail.html | 1 + 1 file changed, 1 insertion(+) diff --git a/errata/templates/errata/erratum_detail.html b/errata/templates/errata/erratum_detail.html index af12369b..4738154e 100644 --- a/errata/templates/errata/erratum_detail.html +++ b/errata/templates/errata/erratum_detail.html @@ -46,6 +46,7 @@ References + {% for reference in erratum.references.all %} From 93dbdcfbf7b2f951cee26acbc350dcd5e24f7a2a Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 11 Mar 2025 21:16:00 -0400 Subject: [PATCH 169/199] make packagestring distro-specific handling match package --- packages/models.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/packages/models.py b/packages/models.py index 137694c0..f4c9c59e 100644 --- a/packages/models.py +++ b/packages/models.py @@ -99,7 +99,7 @@ def __str__(self): else: rel = '' if self.packagetype == self.GENTOO: - return f'{self.category}/{self.name}_{epo}{self.version}{rel}_{self.arch}.{self.get_packagetype_display()}' + return f'{self.category}/{self.name}-{epo}{self.version}{rel}-{self.arch}.{self.get_packagetype_display()}' elif self.packagetype in [self.DEB, self.ARCH]: return f'{self.name}_{epo}{self.version}{rel}_{self.arch}.{self.get_packagetype_display()}' elif self.packagetype == self.RPM: @@ -195,10 +195,14 @@ def __str__(self): rel = f'-{self.release}' else: rel = '' - if self.packagetype == 'G': - return f'{self.category}/{self.name}-{epo}{self.version}{rel}-{self.arch}' + if self.packagetype == self.GENTOO: + return f'{self.category}/{self.name}-{epo}{self.version}{rel}-{self.arch}.{self.get_packagetype_display()}' + elif self.packagetype in [self.DEB, self.ARCH]: + return f'{self.name}_{epo}{self.version}{rel}_{self.arch}.{self.get_packagetype_display()}' + elif self.packagetype == self.RPM: + return f'{self.name}-{epo}{self.version}{rel}-{self.arch}.{self.get_packagetype_display()}' else: - return f'{self.name}-{epo}{self.version}{rel}-{self.arch}' + return f'{self.name}-{epo}{self.version}{rel}-{self.arch}.{self.get_packagetype_display()}' def __key(self): return (self.name, self.epoch, self.version, self.release, self.arch, self.packagetype, self.category) From 487a3ca5a122eb9f930ee23a9995024fee3435b4 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 11 Mar 2025 21:16:14 -0400 Subject: [PATCH 170/199] add host errata migration --- hosts/migrations/0009_host_errata.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 hosts/migrations/0009_host_errata.py diff --git a/hosts/migrations/0009_host_errata.py b/hosts/migrations/0009_host_errata.py new file mode 100644 index 00000000..0f6dd1d4 --- /dev/null +++ b/hosts/migrations/0009_host_errata.py @@ -0,0 +1,19 @@ +# Generated by Django 4.2.19 on 2025-03-10 19:52 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('errata', '0006_alter_erratum_options'), + ('hosts', '0008_alter_host_options'), + ] + + operations = [ + migrations.AddField( + model_name='host', + name='errata', + field=models.ManyToManyField(blank=True, to='errata.erratum'), + ), + ] From e09037c1eccb8272b0613128d976235d6c2776eb Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 11 Mar 2025 21:17:53 -0400 Subject: [PATCH 171/199] bump to latest 4.2 django --- hooks/zypper/patchman.py | 6 +++--- requirements.txt | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/hooks/zypper/patchman.py b/hooks/zypper/patchman.py index 5ee8aa46..14781565 100755 --- a/hooks/zypper/patchman.py +++ b/hooks/zypper/patchman.py @@ -25,12 +25,12 @@ class MyPlugin(Plugin): - def PLUGINBEGIN(self, headers, body): + def PLUGINBEGIN(self, headers, body): # noqa logging.info('PLUGINBEGIN') logging.debug(f'headers: {headers}') self.ack() - def PACKAGESETCHANGED(self, headers, body): + def PACKAGESETCHANGED(self, headers, body): # noqa logging.info('PACKAGESETCHANGED') logging.debug(f'headers: {headers}') print('Sending report to patchman server...') @@ -40,7 +40,7 @@ def PACKAGESETCHANGED(self, headers, body): os.system(command) self.ack() - def PLUGINEND(self, headers, body): + def PLUGINEND(self, headers, body): # noqa logging.info('PLUGINEND') logging.debug(f'headers: {headers}') self.ack() diff --git a/requirements.txt b/requirements.txt index 7b044b82..bdb9bca8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -Django==4.2.19 +Django==4.2.20 django-taggit==4.0.0 django-extensions==3.2.1 django-bootstrap3==23.1 From c1001dfea58c968291da39eefa42c28fc2a4c3ae Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 11 Mar 2025 21:19:09 -0400 Subject: [PATCH 172/199] updates for cvss and cve handling --- requirements.txt | 1 + ..._cve_options_alter_cvss_unique_together.py | 21 ++++++ security/models.py | 70 ++++++++++++++++--- 3 files changed, 84 insertions(+), 8 deletions(-) create mode 100644 security/migrations/0006_alter_cve_options_alter_cvss_unique_together.py diff --git a/requirements.txt b/requirements.txt index bdb9bca8..19b0e59a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,3 +19,4 @@ celery==5.4.0 redis==5.2.1 django-celery-beat==2.7.0 tqdm==4.67.1 +cvss==3.4 diff --git a/security/migrations/0006_alter_cve_options_alter_cvss_unique_together.py b/security/migrations/0006_alter_cve_options_alter_cvss_unique_together.py new file mode 100644 index 00000000..515c5217 --- /dev/null +++ b/security/migrations/0006_alter_cve_options_alter_cvss_unique_together.py @@ -0,0 +1,21 @@ +# Generated by Django 4.2.19 on 2025-03-10 17:36 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('security', '0005_reference_cve_references'), + ] + + operations = [ + migrations.AlterModelOptions( + name='cve', + options={'ordering': ['-cve_id']}, + ), + migrations.AlterUniqueTogether( + name='cvss', + unique_together={('score', 'severity', 'version', 'vector_string')}, + ), + ] diff --git a/security/models.py b/security/models.py index 997fc26b..5704ae47 100644 --- a/security/models.py +++ b/security/models.py @@ -16,6 +16,7 @@ import json import re +from cvss import CVSS2, CVSS3, CVSS4 from time import sleep from django.db import models @@ -81,6 +82,9 @@ class CVSS(models.Model): version = models.DecimalField(max_digits=2, decimal_places=1) vector_string = models.CharField(max_length=255, blank=True, null=True) + class Meta: + unique_together = ['score', 'severity', 'version', 'vector_string'] + def __str__(self): return f'{self.score} ({self.severity}) [{self.vector_string}]' @@ -109,8 +113,37 @@ def __str__(self): def get_absolute_url(self): return reverse('security:cve_detail', args=[self.cve_id]) + def add_cvss_score(self, vector_string, score=None, severity=None, version=None): + if not version: + version = vector_string.split('/')[0].replace('CVSS:', '') + if version.startswith('2'): + cvss_score = CVSS2(vector_string) + elif version.startswith('3'): + cvss_score = CVSS3(vector_string) + elif version.startswith('4'): + cvss_score = CVSS4(vector_string) + if not score: + score = cvss_score.base_score + if not severity: + severity = cvss_score.severities()[0] + existing = self.cvss_scores.filter(version=version, vector_string=vector_string) + if existing: + cvss = existing.first() + else: + cvss, created = CVSS.objects.get_or_create( + version=version, + vector_string=vector_string, + score=score, + severity=severity, + ) + cvss.score = score + cvss.severity = severity + cvss.save() + self.cvss_scores.add(cvss) + def fetch_cve_data(self, fetch_nist_data=False, sleep_secs=6): self.fetch_mitre_cve_data() + self.fetch_osv_dev_cve_data() if fetch_nist_data: self.fetch_nist_cve_data() sleep(sleep_secs) # rate limited, see https://nvd.nist.gov/developers/start-here @@ -125,6 +158,29 @@ def fetch_mitre_cve_data(self): cve_json = json.loads(data) self.parse_mitre_cve_data(cve_json) + def fetch_osv_dev_cve_data(self): + osv_dev_cve_url = f'https://api.osv.dev/v1/vulns/{self.cve_id}' + res = get_url(osv_dev_cve_url) + if res.status_code == 404: + error_message.send(sender=None, text=f'404 - Skipping {self.cve_id} - {osv_dev_cve_url}') + return + data = fetch_content(res, f'Fetching {self.cve_id} OSV data') + cve_json = json.loads(data) + self.parse_osv_dev_cve_data(cve_json) + + def parse_osv_dev_cve_data(self, cve_json): + from security.utils import get_or_create_reference + references = cve_json.get('references') + if references: + for reference in references: + ref_type = reference.get('type').capitalize() + url = reference.get('url') + get_or_create_reference(ref_type, url) + scores = cve_json.get('severity') + if scores: + for score in scores: + self.add_cvss_score(vector_string=score.get('score')) + def fetch_nist_cve_data(self): nist_cve_url = f'https://services.nvd.nist.gov/rest/json/cves/2.0?cveId={self.cve_id}' res = get_url(nist_cve_url) @@ -149,13 +205,12 @@ def parse_nist_cve_data(self, cve_json): for scores in score_data: for key, value in scores.items(): if key.startswith('cvssData'): - cvss_score, created = CVSS.objects.get_or_create( + self.add_cvss_score( + vector_string=value.get('vectorString'), score=value.get('baseScore'), severity=value.get('baseSeverity'), - version=value.get('version'), - vector_string=value.get('vectorString'), + version=value.get('version') ) - self.cvss_scores.add(cvss_score) references = cve.get('references') for reference in references: ref_type = 'Link' @@ -210,11 +265,10 @@ def parse_mitre_cve_data(self, cve_json): if metric.get('format') == 'CVSS': for key, value in metric.items(): if key.startswith('cvss'): - cvss_score, created = CVSS.objects.get_or_create( + self.add_cvss_score( + vector_string=value.get('vectorString'), score=value.get('baseScore'), severity=value.get('baseSeverity'), - version=value.get('version'), - vector_string=value.get('vectorString'), + version=value.get('version') ) - self.cvss_scores.add(cvss_score) self.save() From c3fe0e9e59365decb0e6567c35f896903062edf8 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 11 Mar 2025 22:31:56 -0400 Subject: [PATCH 173/199] allow search to keep existing filters --- util/templates/objectlist.html | 3 ++- util/templates/searchbar.html | 3 +++ util/templatetags/common.py | 14 ++++++++++++-- 3 files changed, 17 insertions(+), 3 deletions(-) diff --git a/util/templates/objectlist.html b/util/templates/objectlist.html index c7b61c92..f2b4fcf9 100644 --- a/util/templates/objectlist.html +++ b/util/templates/objectlist.html @@ -6,7 +6,8 @@
    - {% searchform terms %} + {% get_querydict request as querydict %} + {% searchform terms querydict %} {% gen_table page.object_list table_template %}
    {% object_count page %} diff --git a/util/templates/searchbar.html b/util/templates/searchbar.html index c3ce5327..9930a6b3 100644 --- a/util/templates/searchbar.html +++ b/util/templates/searchbar.html @@ -3,6 +3,9 @@
    + {% for key, value in querydict.items %} + + {% endfor %}
    diff --git a/util/templatetags/common.py b/util/templatetags/common.py index 0d5480fa..6737c438 100644 --- a/util/templatetags/common.py +++ b/util/templatetags/common.py @@ -85,6 +85,16 @@ def object_count(page): return f'{page.paginator.count} {name}' +@register.simple_tag +def get_querydict(request): + get = request.GET.copy() + if 'page' in get: + del get['page'] + if 'search' in get: + del get['search'] + return get + + @register.simple_tag def get_querystring(request): get = request.GET.copy() @@ -94,9 +104,9 @@ def get_querystring(request): @register.simple_tag -def searchform(terms): +def searchform(terms, querydict): template = get_template('searchbar.html') - html = template.render({'post_url': '.', 'terms': terms}) + html = template.render({'post_url': '.', 'terms': terms, 'querydict': querydict}) return html From 651e89e5894dbb36f8ea1f72de409afcf2cafaa1 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Fri, 21 Mar 2025 22:07:55 -0400 Subject: [PATCH 174/199] fix up rpm/deb package builds --- debian/control | 15 +++++++-------- etc/systemd/{ => system}/patchman-celery.service | 0 requirements.txt | 2 +- setup.cfg | 4 ++-- 4 files changed, 10 insertions(+), 11 deletions(-) rename etc/systemd/{ => system}/patchman-celery.service (100%) diff --git a/debian/control b/debian/control index 6db37085..f224e512 100644 --- a/debian/control +++ b/debian/control @@ -3,25 +3,24 @@ Section: python Priority: optional Maintainer: Marcus Furlong Uploaders: Marcus Furlong -Build-Depends: debhelper (>=13), python3 (>= 3.10), dh-python, dh-exec +Build-Depends: debhelper (>=13), python3 (>= 3.11), dh-python, dh-exec Standards-Version: 4.6.2 Homepage: https://github.com/furlongm/patchman Vcs-Git: git://github.com/furlongm/patchman Vcs-Browser: https://github.com/furlongm/patchman -X-Python3-Version: >= 3.10 +X-Python3-Version: >= 3.11 Package: python3-patchman Architecture: all Homepage: https://github.com/furlongm/patchman -Depends: ${misc:Depends}, python3 (>= 3.10), python3-django (>= 3.2), - python3-django-tagging, python3-django-extensions, python3-django-bootstrap3, - python3-djangorestframework, python3-django-filters, python3-debian, +Depends: ${misc:Depends}, python3 (>= 3.11), python3-django (>= 4.2), + python3-django-extensions, python3-django-bootstrap3, python3-cvss, + python3-djangorestframework, python3-debian, python3-django-filters, python3-rpm, python3-tqdm, python3-lxml, python3-defusedxml, python3-requests, python3-colorama, python3-magic, python3-humanize, python3-pip, python3-pymemcache, python3-yaml, memcached, libapache2-mod-wsgi-py3, - apache2, python3-django-taggit ->>>>>>> 922796e (switch from obsolete django-tagging to django-taggit) -Suggests: python3-django-celery, python3-mysqldb, python3-psycopg2 + apache2, python3-django-taggit, python3-django-celery, python3-django-celery-beat +Suggests: python3-mysqldb, python3-psycopg2 Description: Django-based patch status monitoring tool for linux systems. . Patchman provides a web interface for monitoring host package updates. diff --git a/etc/systemd/patchman-celery.service b/etc/systemd/system/patchman-celery.service similarity index 100% rename from etc/systemd/patchman-celery.service rename to etc/systemd/system/patchman-celery.service diff --git a/requirements.txt b/requirements.txt index 19b0e59a..1df0c5e1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,7 +9,7 @@ chardet==4.0.0 requests==2.32.3 colorama==0.4.4 djangorestframework==3.14.0 -django-filter==21.1 +django-filter==25.1 humanize==3.13.1 version-utils==0.3.0 python-magic==0.4.25 diff --git a/setup.cfg b/setup.cfg index 6d213cbf..59161406 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,8 +3,7 @@ doc_files = README.md AUTHORS COPYING INSTALL.md install-script = scripts/rpm-install.sh post-install = scripts/rpm-post-install.sh requires = /usr/bin/python3 - python3-django >= 3.2.20 - python3-django-tagging + python3-django >= 4.2.20 python3-django-taggit python3-django-extensions python3-django-bootstrap3 @@ -24,6 +23,7 @@ requires = /usr/bin/python3 python3-pymemcache python3-mod_wsgi python3-importlib-metadata + python3-cvss policycoreutils-python-utils httpd python3-dnf-plugin-post-transaction-actions From 8f6495fb5b1bcbc17f3432cabbaf698498f3ea8b Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Fri, 21 Mar 2025 23:33:38 -0400 Subject: [PATCH 175/199] add missing dependencies --- debian/control | 9 +++++---- setup.cfg | 4 +++- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/debian/control b/debian/control index f224e512..63a58356 100644 --- a/debian/control +++ b/debian/control @@ -15,11 +15,12 @@ Architecture: all Homepage: https://github.com/furlongm/patchman Depends: ${misc:Depends}, python3 (>= 3.11), python3-django (>= 4.2), python3-django-extensions, python3-django-bootstrap3, python3-cvss, - python3-djangorestframework, python3-debian, python3-django-filters, - python3-rpm, python3-tqdm, python3-lxml, python3-defusedxml, + python3-djangorestframework, python3-django-filters, python3-debian, + python3-rpm, python3-tqdm, python3-defusedxml, python3-pip, python3-tenacity, python3-requests, python3-colorama, python3-magic, python3-humanize, - python3-pip, python3-pymemcache, python3-yaml, memcached, libapache2-mod-wsgi-py3, - apache2, python3-django-taggit, python3-django-celery, python3-django-celery-beat + python3-pymemcache, python3-yaml, memcached, libapache2-mod-wsgi-py3, + apache2, python3-django-taggit, python3-celery, python3-django-celery-beat, + python3-redis, python3-git Suggests: python3-mysqldb, python3-psycopg2 Description: Django-based patch status monitoring tool for linux systems. . diff --git a/setup.cfg b/setup.cfg index 59161406..25fea4a1 100644 --- a/setup.cfg +++ b/setup.cfg @@ -12,7 +12,7 @@ requires = /usr/bin/python3 python3-debian python3-rpm python3-tqdm - python3-lxml + python3-tenacity python3-defusedxml python3-requests python3-colorama @@ -24,6 +24,8 @@ requires = /usr/bin/python3 python3-mod_wsgi python3-importlib-metadata python3-cvss + python3-redis + python3-GitPython policycoreutils-python-utils httpd python3-dnf-plugin-post-transaction-actions From 2c5da764b2af6c506c0f800c4c62615c456e50aa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 22 Mar 2025 03:36:03 +0000 Subject: [PATCH 176/199] Bump djangorestframework from 3.13.1 to 3.15.2 Bumps [djangorestframework](https://github.com/encode/django-rest-framework) from 3.13.1 to 3.15.2. - [Release notes](https://github.com/encode/django-rest-framework/releases) - [Commits](https://github.com/encode/django-rest-framework/compare/3.13.1...3.15.2) --- updated-dependencies: - dependency-name: djangorestframework dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 1df0c5e1..d4cbb5b5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,7 +8,7 @@ PyYAML==6.0.1 chardet==4.0.0 requests==2.32.3 colorama==0.4.4 -djangorestframework==3.14.0 +djangorestframework==3.15.2 django-filter==25.1 humanize==3.13.1 version-utils==0.3.0 From cb96685120f7b8887011911f0901d4548fd8eced Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 22 Mar 2025 00:41:56 -0400 Subject: [PATCH 177/199] packaging updates --- debian/control | 10 +++++----- debian/python3-patchman.postinst | 2 ++ debian/rules | 2 +- errata/__init__.py | 0 etc/patchman/celery.conf | 2 ++ etc/systemd/system/patchman-celery.service | 2 +- security/__init__.py | 0 setup.cfg | 3 +++ 8 files changed, 14 insertions(+), 7 deletions(-) create mode 100644 errata/__init__.py create mode 100644 etc/patchman/celery.conf create mode 100644 security/__init__.py diff --git a/debian/control b/debian/control index 63a58356..0e3bf4c9 100644 --- a/debian/control +++ b/debian/control @@ -3,7 +3,7 @@ Section: python Priority: optional Maintainer: Marcus Furlong Uploaders: Marcus Furlong -Build-Depends: debhelper (>=13), python3 (>= 3.11), dh-python, dh-exec +Build-Depends: debhelper (>=13), python3 (>= 3.11), dh-python, dh-exec, dh-systemd Standards-Version: 4.6.2 Homepage: https://github.com/furlongm/patchman Vcs-Git: git://github.com/furlongm/patchman @@ -18,10 +18,10 @@ Depends: ${misc:Depends}, python3 (>= 3.11), python3-django (>= 4.2), python3-djangorestframework, python3-django-filters, python3-debian, python3-rpm, python3-tqdm, python3-defusedxml, python3-pip, python3-tenacity, python3-requests, python3-colorama, python3-magic, python3-humanize, - python3-pymemcache, python3-yaml, memcached, libapache2-mod-wsgi-py3, - apache2, python3-django-taggit, python3-celery, python3-django-celery-beat, - python3-redis, python3-git -Suggests: python3-mysqldb, python3-psycopg2 + python3-yaml, libapache2-mod-wsgi-py3, apache2, + celery, python3-celery, python3-django-celery-beat, redis-server, + python3-redis, python3-git, python3-django-taggit +Suggests: python3-mysqldb, python3-psycopg2, python3-pymemcache, memcached Description: Django-based patch status monitoring tool for linux systems. . Patchman provides a web interface for monitoring host package updates. diff --git a/debian/python3-patchman.postinst b/debian/python3-patchman.postinst index ade265b8..94983068 100644 --- a/debian/python3-patchman.postinst +++ b/debian/python3-patchman.postinst @@ -12,6 +12,8 @@ if [ "$1" = "configure" ] ; then . /usr/share/apache2/apache2-maintscript-helper apache2_invoke enconf patchman.conf + adduser --system --group patchman-celery + patchman-set-secret-key chown www-data /etc/patchman/local_settings.py diff --git a/debian/rules b/debian/rules index 63a1916b..c1612816 100755 --- a/debian/rules +++ b/debian/rules @@ -9,7 +9,7 @@ clean:: export PYBUILD_NAME=patchman %: - dh $@ --with python3 --buildsystem=pybuild + dh $@ --with=python3 --buildsystem=pybuild --with=systemd override_dh_auto_test: true diff --git a/errata/__init__.py b/errata/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/etc/patchman/celery.conf b/etc/patchman/celery.conf new file mode 100644 index 00000000..7afc96ee --- /dev/null +++ b/etc/patchman/celery.conf @@ -0,0 +1,2 @@ +REDIS_HOST=127.0.0.1 +REDIS_PORT=6379 diff --git a/etc/systemd/system/patchman-celery.service b/etc/systemd/system/patchman-celery.service index 805a3d19..6408d818 100644 --- a/etc/systemd/system/patchman-celery.service +++ b/etc/systemd/system/patchman-celery.service @@ -7,7 +7,7 @@ After=network-onlne.target Type=simple User=patchman-celery Group=patchman-celery -EnvironmentFile=/etc/patchman/celery +EnvironmentFile=/etc/patchman/celery.conf ExecStart=/usr/bin/celery --broker redis://${REDIS_HOST}:${REDIS_PORT}/0 --app patchman worker --loglevel info --beat --scheduler django_celery_beat.schedulers:DatabaseScheduler --task-events --pool threads [Install] diff --git a/security/__init__.py b/security/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/setup.cfg b/setup.cfg index 25fea4a1..b7253320 100644 --- a/setup.cfg +++ b/setup.cfg @@ -25,6 +25,9 @@ requires = /usr/bin/python3 python3-importlib-metadata python3-cvss python3-redis + redis-server + celery + python3-django-celery-beat python3-GitPython policycoreutils-python-utils httpd From 8cf8564710febbc6c3c079d027b76ad4d8995e6b Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 22 Mar 2025 00:56:00 -0400 Subject: [PATCH 178/199] remove dh-systemd build dependency --- debian/control | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/debian/control b/debian/control index 0e3bf4c9..18e98613 100644 --- a/debian/control +++ b/debian/control @@ -3,7 +3,7 @@ Section: python Priority: optional Maintainer: Marcus Furlong Uploaders: Marcus Furlong -Build-Depends: debhelper (>=13), python3 (>= 3.11), dh-python, dh-exec, dh-systemd +Build-Depends: debhelper (>=13), python3 (>= 3.11), dh-python, dh-exec Standards-Version: 4.6.2 Homepage: https://github.com/furlongm/patchman Vcs-Git: git://github.com/furlongm/patchman From 7bb69c07fad1b25f860889c389f27c4d16984b69 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 22 Mar 2025 01:04:42 -0400 Subject: [PATCH 179/199] add missing module __init__s --- errata/sources/__init__.py | 0 errata/sources/distros/__init__.py | 0 errata/sources/repos/__init__.py | 0 repos/repo_types/__init__.py | 0 4 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 errata/sources/__init__.py create mode 100644 errata/sources/distros/__init__.py create mode 100644 errata/sources/repos/__init__.py create mode 100644 repos/repo_types/__init__.py diff --git a/errata/sources/__init__.py b/errata/sources/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/errata/sources/distros/__init__.py b/errata/sources/distros/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/errata/sources/repos/__init__.py b/errata/sources/repos/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/repos/repo_types/__init__.py b/repos/repo_types/__init__.py new file mode 100644 index 00000000..e69de29b From 4f3d69ff770aa62f8fe8247a7229d75c3e689410 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 22 Mar 2025 01:28:27 -0400 Subject: [PATCH 180/199] debian packaging updates --- MANIFEST.in | 2 ++ debian/control | 2 +- debian/python3-patchman.postinst | 1 + etc/patchman/local_settings.py | 2 +- 4 files changed, 5 insertions(+), 2 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index 293b1da2..5ea60ab7 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -16,5 +16,7 @@ recursive-include packages * recursive-include repos * recursive-include reports * recursive-include modules * +recursive-include errata * +recursive-include security * recursive-include sbin * recursive-include etc * diff --git a/debian/control b/debian/control index 18e98613..67026269 100644 --- a/debian/control +++ b/debian/control @@ -18,7 +18,7 @@ Depends: ${misc:Depends}, python3 (>= 3.11), python3-django (>= 4.2), python3-djangorestframework, python3-django-filters, python3-debian, python3-rpm, python3-tqdm, python3-defusedxml, python3-pip, python3-tenacity, python3-requests, python3-colorama, python3-magic, python3-humanize, - python3-yaml, libapache2-mod-wsgi-py3, apache2, + python3-yaml, libapache2-mod-wsgi-py3, apache2, sqlite3, celery, python3-celery, python3-django-celery-beat, redis-server, python3-redis, python3-git, python3-django-taggit Suggests: python3-mysqldb, python3-psycopg2, python3-pymemcache, memcached diff --git a/debian/python3-patchman.postinst b/debian/python3-patchman.postinst index 94983068..9fa07a29 100644 --- a/debian/python3-patchman.postinst +++ b/debian/python3-patchman.postinst @@ -13,6 +13,7 @@ if [ "$1" = "configure" ] ; then apache2_invoke enconf patchman.conf adduser --system --group patchman-celery + usermod -a -G www-data patchman-celery patchman-set-secret-key chown www-data /etc/patchman/local_settings.py diff --git a/etc/patchman/local_settings.py b/etc/patchman/local_settings.py index a8269f5d..33a7d52f 100644 --- a/etc/patchman/local_settings.py +++ b/etc/patchman/local_settings.py @@ -39,7 +39,7 @@ DAYS_WITHOUT_REPORT = 14 # Whether to run patchman under the gunicorn web server -RUN_GUNICORN = True +RUN_GUNICORN = False CACHES = { 'default': { From d47a3b9efbf53c5808c113c2bff24f1f785111af Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sat, 22 Mar 2025 17:14:34 -0400 Subject: [PATCH 181/199] fix db permissions for celery --- debian/python3-patchman.postinst | 6 +++--- scripts/rpm-post-install.sh | 3 +++ 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/debian/python3-patchman.postinst b/debian/python3-patchman.postinst index 9fa07a29..b64cb816 100644 --- a/debian/python3-patchman.postinst +++ b/debian/python3-patchman.postinst @@ -12,9 +12,6 @@ if [ "$1" = "configure" ] ; then . /usr/share/apache2/apache2-maintscript-helper apache2_invoke enconf patchman.conf - adduser --system --group patchman-celery - usermod -a -G www-data patchman-celery - patchman-set-secret-key chown www-data /etc/patchman/local_settings.py @@ -26,6 +23,9 @@ if [ "$1" = "configure" ] ; then sqlite3 /var/lib/patchman/db/patchman.db 'PRAGMA journal_mode=WAL;' chown -R www-data:www-data /var/lib/patchman + adduser --system --group patchman-celery + usermod -a -G www-data patchman-celery + chmod g+w /var/lib/patchman /var/lib/patchman/db /var/lib/patchman/db/patchman.db echo echo "Remember to run 'patchman-manage createsuperuser' to create a user." diff --git a/scripts/rpm-post-install.sh b/scripts/rpm-post-install.sh index 6c88a917..5efcc24b 100644 --- a/scripts/rpm-post-install.sh +++ b/scripts/rpm-post-install.sh @@ -23,6 +23,9 @@ patchman-manage migrate --run-syncdb --fake-initial sqlite3 /var/lib/patchman/db/patchman.db 'PRAGMA journal_mode=WAL;' chown -R apache:apache /var/lib/patchman +adduser --system --group patchman-celery +usermod -a -G apache patchman-celery +chmod g+w /var/lib/patchman /var/lib/patchman/db /var/lib/patchman/db/patchman.db chcon --type httpd_sys_rw_content_t /var/lib/patchman/db/patchman.db semanage port -a -t http_port_t -p tcp 5672 setsebool -P httpd_can_network_memcache 1 From ed005e54621c1a0723902dda39a1b4e69e73dfc4 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Wed, 16 Apr 2025 09:38:17 -0400 Subject: [PATCH 182/199] fix rpm building --- patchman-client.spec | 2 +- scripts/rpm-post-install.sh | 2 ++ setup.cfg | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/patchman-client.spec b/patchman-client.spec index ac7a7385..68736038 100644 --- a/patchman-client.spec +++ b/patchman-client.spec @@ -8,7 +8,7 @@ License: GPLv3 URL: http://patchman.openbytes.ie Source: %{expand:%%(pwd)} BuildArch: noarch -Requires: curl which coreutils util-linux awk +Requires: curl which coreutils util-linux gawk %define binary_payload w9.gzdio diff --git a/scripts/rpm-post-install.sh b/scripts/rpm-post-install.sh index 5efcc24b..24ade8af 100644 --- a/scripts/rpm-post-install.sh +++ b/scripts/rpm-post-install.sh @@ -11,6 +11,8 @@ fi systemctl enable httpd systemctl restart httpd +systemctl enable redis +systemctl start redis patchman-set-secret-key chown apache /etc/patchman/local_settings.py diff --git a/setup.cfg b/setup.cfg index b7253320..7af9ccb0 100644 --- a/setup.cfg +++ b/setup.cfg @@ -25,7 +25,7 @@ requires = /usr/bin/python3 python3-importlib-metadata python3-cvss python3-redis - redis-server + redis celery python3-django-celery-beat python3-GitPython From ceab20b4facd8656d5501855ef83bb82d839ec2b Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Thu, 17 Apr 2025 15:38:19 -0400 Subject: [PATCH 183/199] remove unnessecary globals to appease flake8 --- errata/sources/distros/debian.py | 3 --- util/__init__.py | 6 ++---- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/errata/sources/distros/debian.py b/errata/sources/distros/debian.py index 5eb0ea91..710faeeb 100644 --- a/errata/sources/distros/debian.py +++ b/errata/sources/distros/debian.py @@ -84,7 +84,6 @@ def parse_debian_package_file_map(data, repo): Source: 389-ds-base Source-Version: 1.4.0.21-1+deb10u1 """ - global DSCs parsing_dsc = False for line in data.splitlines(): if line.startswith('Path:'): @@ -236,7 +235,6 @@ def parse_debian_erratum_package(line, accepted_codenames): def get_debian_dsc_package_list(package, version): """ Get the package list from a DSC file for a given source package/version """ - global DSCs if not DSCs.get(package) or not DSCs[package].get(version): return package_list = DSCs[package][version].get('package_list') @@ -247,7 +245,6 @@ def get_debian_dsc_package_list(package, version): def fetch_debian_dsc_package_list(package, version): """ Fetch the package list from a DSC file for a given source package/version """ - global DSCs if not DSCs.get(package) or not DSCs[package].get(version): warning_message.send(sender=None, text=f'No DSC found for {package} {version}') return diff --git a/util/__init__.py b/util/__init__.py index 6744e1e8..a56ed3b6 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -43,7 +43,6 @@ def get_verbosity(): """ Get the global verbosity level """ - global verbose return verbose @@ -57,7 +56,7 @@ def set_verbosity(value): def create_pbar(ptext, plength, ljust=35, **kwargs): """ Create a global progress bar if global verbose is True """ - global pbar, verbose + global pbar if verbose and plength > 0: jtext = str(ptext).ljust(ljust) pbar = tqdm(total=plength, desc=jtext, position=0, leave=True, ascii=' >=') @@ -67,7 +66,7 @@ def create_pbar(ptext, plength, ljust=35, **kwargs): def update_pbar(index, **kwargs): """ Update the global progress bar if global verbose is True """ - global pbar, verbose + global pbar if verbose and pbar: pbar.update(n=index-pbar.n) if index >= pbar.total: @@ -79,7 +78,6 @@ def fetch_content(response, text='', ljust=35): """ Display a progress bar to fetch the request content if verbose is True. Otherwise, just return the request content """ - global verbose if not response: return if verbose: From 6596588f95eecf14796b319273b87d002a0b2162 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 20 Apr 2025 16:22:03 -0400 Subject: [PATCH 184/199] remove unused cve title field --- security/migrations/0007_remove_cve_title.py | 17 ++++++++++++ security/models.py | 28 +++++++------------- security/serializers.py | 2 +- security/templates/security/cve_detail.html | 1 - 4 files changed, 27 insertions(+), 21 deletions(-) create mode 100644 security/migrations/0007_remove_cve_title.py diff --git a/security/migrations/0007_remove_cve_title.py b/security/migrations/0007_remove_cve_title.py new file mode 100644 index 00000000..64c79f13 --- /dev/null +++ b/security/migrations/0007_remove_cve_title.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.20 on 2025-04-20 20:15 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('security', '0006_alter_cve_options_alter_cvss_unique_together'), + ] + + operations = [ + migrations.RemoveField( + model_name='cve', + name='title', + ), + ] diff --git a/security/models.py b/security/models.py index 5704ae47..a847ab02 100644 --- a/security/models.py +++ b/security/models.py @@ -92,7 +92,6 @@ def __str__(self): class CVE(models.Model): cve_id = models.CharField(max_length=255, unique=True) - title = models.CharField(max_length=255, blank=True, null=True) description = models.TextField(blank=True, default='') reserved_date = models.DateTimeField(blank=True, null=True) published_date = models.DateTimeField(blank=True, null=True) @@ -233,32 +232,23 @@ def parse_mitre_cve_data(self, cve_json): if updated_date: self.updated_date = tz_aware_datetime(cve_metadata.get('dateUpdated')) cna_container = cve_json.get('containers').get('cna') - title = cna_container.get('title') - if not title: - product = cna_container.get('product') descriptions = cna_container.get('descriptions') if descriptions: self.description = descriptions[0].get('value') problem_types = cna_container.get('problemTypes', []) for problem_type in problem_types: descriptions = problem_type.get('descriptions') - if descriptions: - for description in descriptions: - cwe_description = description.get('description') - if description.get('type') == 'CWE': - cwe_id = description.get('cweId') - if cwe_id: - cwe, created = CWE.objects.get_or_create(cwe_id=cwe_id) - self.cwes.add(cwe) - cwe_ids = re.findall(r'CWE-\d+', cwe_description) - for cwe_id in cwe_ids: + for description in descriptions: + if description.get('type') == 'CWE': + cwe_id = description.get('cweId') + if cwe_id: cwe, created = CWE.objects.get_or_create(cwe_id=cwe_id) self.cwes.add(cwe) - if not title: - if product and cwe_description: - self.title = f'{product} - {cwe_description}' - else: - self.title = '' + cwe_description = description.get('description') + cwe_ids = re.findall(r'CWE-\d+', cwe_description) + for cwe_id in cwe_ids: + cwe, created = CWE.objects.get_or_create(cwe_id=cwe_id) + self.cwes.add(cwe) metrics = cna_container.get('metrics') if metrics: for metric in metrics: diff --git a/security/serializers.py b/security/serializers.py index 31730c53..979fc2c9 100644 --- a/security/serializers.py +++ b/security/serializers.py @@ -28,7 +28,7 @@ class Meta: class CVESerializer(serializers.HyperlinkedModelSerializer): class Meta: model = CVE - fields = ('cve_id', 'title', 'description', 'cvss_score', 'cwe', + fields = ('cve_id', 'description', 'cvss_score', 'cwe', 'registered_date', 'published_date', 'updated_date') diff --git a/security/templates/security/cve_detail.html b/security/templates/security/cve_detail.html index 2fe6f8e4..6c86197a 100644 --- a/security/templates/security/cve_detail.html +++ b/security/templates/security/cve_detail.html @@ -21,7 +21,6 @@
    osv.devhttps://osv.dev/vulnerability/{{ erratum.name }}
    {{ reference.ref_type }}
    - From 77bc10ea45e7aae5d64c2a18c0a9b4f21652d77e Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 20 Apr 2025 16:21:31 -0400 Subject: [PATCH 185/199] handle existing duplicate packages in get_or_create --- packages/utils.py | 33 ++++++++++++++++++++++++--------- 1 file changed, 24 insertions(+), 9 deletions(-) diff --git a/packages/utils.py b/packages/utils.py index d4399dcc..9b098225 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -22,7 +22,7 @@ from arch.models import PackageArchitecture from packages.models import PackageName, Package, PackageUpdate, PackageCategory, PackageString -from patchman.signals import error_message, info_message +from patchman.signals import error_message, info_message, warning_message def convert_package_to_packagestring(package): @@ -174,14 +174,29 @@ def get_or_create_package(name, epoch, version, release, arch, p_type): package_name, c = PackageName.objects.get_or_create(name=name) package_arch, c = PackageArchitecture.objects.get_or_create(name=arch) with transaction.atomic(): - package, c = Package.objects.get_or_create( - name=package_name, - arch=package_arch, - epoch=epoch, - version=version, - release=release, - packagetype=p_type, - ) + try: + package, c = Package.objects.get_or_create( + name=package_name, + arch=package_arch, + epoch=epoch, + version=version, + release=release, + packagetype=p_type, + ) + except MultipleObjectsReturned: + packages = Package.objects.filter( + name=package_name, + arch=package_arch, + epoch=epoch, + version=version, + release=release, + packagetype=p_type, + ) + package = packages.first() + # TODO this should handle gentoo package categories too, otherwise we may be deleting packages + # that should be kept + warning_message.send(sender=None, text=f'Deleting duplicate packages: {packages.exclude(id=package.id)}') + packages.exclude(id=package.id).delete() return package From 697ffb2ae7bd04de62ea7184ffe7a7a1731bad54 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 20 Apr 2025 16:20:54 -0400 Subject: [PATCH 186/199] speed up errata processing --- errata/utils.py | 37 +++++++++++++++++++++++++++---------- sbin/patchman | 6 ++++-- 2 files changed, 31 insertions(+), 12 deletions(-) diff --git a/errata/utils.py b/errata/utils.py index 8437312c..d8099db4 100644 --- a/errata/utils.py +++ b/errata/utils.py @@ -14,6 +14,10 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see +import concurrent.futures + +from django.db import connections + from util import tz_aware_datetime from errata.models import Erratum from packages.models import PackageUpdate @@ -61,24 +65,37 @@ def mark_errata_security_updates(): """ For each set of erratum packages, modify any PackageUpdate that should be marked as a security update. """ + connections.close_all() elen = Erratum.objects.count() - pbar_start.send(sender=None, ptext=f'Scanning {elen} Errata', plen=elen) - for i, e in enumerate(Erratum.objects.all()): - pbar_update.send(sender=None, index=i + 1) - e.scan_for_security_updates() + pbar_start.send(sender=None, ptext=f'Scanning {elen} Errata for security updates', plen=elen) + i = 0 + with concurrent.futures.ProcessPoolExecutor(max_workers=25) as executor: + futures = [executor.submit(e.scan_for_security_updates) for e in Erratum.objects.all()] + for future in concurrent.futures.as_completed(futures): + pbar_update.send(sender=None, index=i + 1) + i += 1 def scan_package_updates_for_affected_packages(): """ Scan PackageUpdates for packages affected by errata """ - for pu in PackageUpdate.objects.all(): + plen = PackageUpdate.objects.count() + pbar_start.send(sender=None, ptext=f'Scanning {plen} Updates for affected packages', plen=plen) + for i, pu in enumerate(PackageUpdate.objects.all()): + pbar_update.send(sender=None, index=i + 1) for e in pu.newpackage.provides_fix_in_erratum.all(): e.affected_packages.add(pu.oldpackage) -def add_errata_affected_packages(): +def enrich_errata(): + """ Enrich Errata with data from osv.dev + """ + connections.close_all() elen = Erratum.objects.count() - pbar_start.send(sender=None, ptext=f'Adding affected packages to {elen} Errata', plen=elen) - for i, e in enumerate(Erratum.objects.all()): - pbar_update.send(sender=None, index=i + 1) - e.fetch_osv_dev_data() + pbar_start.send(sender=None, ptext=f'Adding osv.dev data to {elen} Errata', plen=elen) + i = 0 + with concurrent.futures.ProcessPoolExecutor(max_workers=25) as executor: + futures = [executor.submit(e.fetch_osv_dev_data) for e in Erratum.objects.all()] + for future in concurrent.futures.as_completed(futures): + pbar_update.send(sender=None, index=i + 1) + i += 1 diff --git a/sbin/patchman b/sbin/patchman index 47a449d0..9cc6048e 100755 --- a/sbin/patchman +++ b/sbin/patchman @@ -30,7 +30,8 @@ from django.conf import settings # noqa django_setup() from arch.utils import clean_architectures -from errata.utils import mark_errata_security_updates, add_errata_affected_packages +from errata.utils import mark_errata_security_updates, enrich_errata, \ + scan_package_updates_for_affected_packages from errata.tasks import update_errata from hosts.models import Host from modules.utils import clean_modules @@ -522,8 +523,9 @@ def process_args(args): showhelp = False if args.update_errata: update_errata(args.erratum_type, args.force, args.repo) + scan_package_updates_for_affected_packages() mark_errata_security_updates() - add_errata_affected_packages() + enrich_errata() showhelp = False if args.update_cves: update_cves(args.cve, args.fetch_nist_data) From 6a13d6fbf280fc93fb6e59b9af68630c9253f568 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 20 Apr 2025 16:19:37 -0400 Subject: [PATCH 187/199] correctly parse newer dsc representation --- errata/sources/distros/debian.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/errata/sources/distros/debian.py b/errata/sources/distros/debian.py index 710faeeb..e653d302 100644 --- a/errata/sources/distros/debian.py +++ b/errata/sources/distros/debian.py @@ -303,14 +303,11 @@ def process_debian_erratum_fixed_packages(e, package_data): if not package_list: return fixed_packages = set() - for line in package_list.splitlines(): - if not line: + for package in package_list: + if package.get('package-type') != 'deb': continue - line_parts = line.split() - if line_parts[1] != 'deb': - continue - name = line_parts[0] - arches = process_debian_dsc_arches(line_parts[4]) + name = package.get('package') + arches = process_debian_dsc_arches(package.get('_other')) for arch in arches: fixed_package = get_or_create_package(name, epoch, ver, rel, arch, Package.DEB) fixed_packages.add(fixed_package) From c877223ce82ff20b93645266590747a2e7f3959c Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Sun, 20 Apr 2025 16:18:51 -0400 Subject: [PATCH 188/199] handle subprocesses closing db connections --- errata/sources/distros/alma.py | 3 +++ errata/sources/distros/arch.py | 3 +++ errata/sources/distros/debian.py | 3 +++ errata/sources/distros/rocky.py | 2 ++ errata/sources/distros/ubuntu.py | 3 +++ errata/sources/repos/yum.py | 3 +++ etc/patchman/local_settings.py | 3 +++ 7 files changed, 20 insertions(+) diff --git a/errata/sources/distros/alma.py b/errata/sources/distros/alma.py index 193127e4..e0f2d4ae 100644 --- a/errata/sources/distros/alma.py +++ b/errata/sources/distros/alma.py @@ -17,6 +17,8 @@ import concurrent.futures import json +from django.db import connections + from operatingsystems.utils import get_or_create_osrelease from packages.models import Package from packages.utils import get_or_create_package, parse_package_string @@ -74,6 +76,7 @@ def process_alma_errata_serially(release, advisories): def process_alma_errata_concurrently(release, advisories): """ Process Alma Linux Errata concurrently """ + connections.close_all() elen = len(advisories) pbar_start.send(sender=None, ptext=f'Processing {elen} Alma {release} Errata', plen=elen) i = 0 diff --git a/errata/sources/distros/arch.py b/errata/sources/distros/arch.py index c0330dc8..40d0dada 100644 --- a/errata/sources/distros/arch.py +++ b/errata/sources/distros/arch.py @@ -17,6 +17,8 @@ import concurrent.futures import json +from django.db import connections + from operatingsystems.utils import get_or_create_osrelease from patchman.signals import error_message, pbar_start, pbar_update from packages.models import Package @@ -66,6 +68,7 @@ def parse_arch_errata_concurrently(advisories): """ Parse Arch Linux Errata Advisories concurrently """ osrelease = get_or_create_osrelease(name='Arch Linux') + connections.close_all() elen = len(advisories) pbar_start.send(sender=None, ptext=f'Processing {elen} Arch Advisories', plen=elen) i = 0 diff --git a/errata/sources/distros/debian.py b/errata/sources/distros/debian.py index 710faeeb..29ec5312 100644 --- a/errata/sources/distros/debian.py +++ b/errata/sources/distros/debian.py @@ -21,6 +21,8 @@ from debian.deb822 import Dsc from io import StringIO +from django.db import connections + from operatingsystems.models import OSRelease from operatingsystems.utils import get_or_create_osrelease from packages.models import Package @@ -181,6 +183,7 @@ def create_debian_errata_serially(errata, accepted_codenames): def create_debian_errata_concurrently(errata, accepted_codenames): """ Create Debian Errata concurrently """ + connections.close_all() elen = len(errata) pbar_start.send(sender=None, ptext=f'Processing {elen} Debian Errata', plen=elen) i = 0 diff --git a/errata/sources/distros/rocky.py b/errata/sources/distros/rocky.py index 6231e9e9..693d7b0c 100644 --- a/errata/sources/distros/rocky.py +++ b/errata/sources/distros/rocky.py @@ -18,6 +18,7 @@ import concurrent.futures from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential +from django.db import connections from django.db.utils import OperationalError from operatingsystems.utils import get_or_create_osrelease @@ -158,6 +159,7 @@ def process_rocky_errata_serially(advisories): def process_rocky_errata_concurrently(advisories): """ Process Rocky Linux errata concurrently """ + connections.close_all() elen = len(advisories) pbar_start.send(sender=None, ptext=f'Processing {elen} Rocky Errata', plen=elen) i = 0 diff --git a/errata/sources/distros/ubuntu.py b/errata/sources/distros/ubuntu.py index 8580e12b..7f50962c 100644 --- a/errata/sources/distros/ubuntu.py +++ b/errata/sources/distros/ubuntu.py @@ -21,6 +21,8 @@ from io import StringIO from urllib.parse import urlparse +from django.db import connections + from operatingsystems.models import OSRelease, OSVariant from operatingsystems.utils import get_or_create_osrelease from packages.models import Package @@ -87,6 +89,7 @@ def parse_usn_data_serially(advisories, accepted_releases): def parse_usn_data_concurrently(advisories, accepted_releases): """ Parse the Ubuntu USN data concurrently """ + connections.close_all() elen = len(advisories) pbar_start.send(sender=None, ptext=f'Processing {elen} Ubuntu Errata', plen=elen) i = 0 diff --git a/errata/sources/repos/yum.py b/errata/sources/repos/yum.py index 242e6d1a..dfeed879 100644 --- a/errata/sources/repos/yum.py +++ b/errata/sources/repos/yum.py @@ -18,6 +18,8 @@ from io import BytesIO from defusedxml import ElementTree +from django.db import connections + from operatingsystems.utils import get_or_create_osrelease from packages.models import Package from packages.utils import get_or_create_package @@ -55,6 +57,7 @@ def extract_updateinfo_serially(updates, elen): def extract_updateinfo_concurrently(updates, elen): """ Parses updateinfo.xml and extracts package/errata information concurrently """ + connections.close_all() pbar_start.send(sender=None, ptext=f'Extracting {elen} updateinfo Errata', plen=elen) i = 0 with concurrent.futures.ProcessPoolExecutor(max_workers=100) as executor: diff --git a/etc/patchman/local_settings.py b/etc/patchman/local_settings.py index 33a7d52f..181c4c4d 100644 --- a/etc/patchman/local_settings.py +++ b/etc/patchman/local_settings.py @@ -35,6 +35,9 @@ # Maximum number of mirrors to add or refresh per repo MAX_MIRRORS = 2 +# Maximum number of failures before disabling a mirror, set to -1 to never disable mirrors +MAX_MIRROR_FAILURES = 14 + # Number of days to wait before raising that a host has not reported DAYS_WITHOUT_REPORT = 14 From d44a2ce5d9ff30abb6caf3037307706cdbc4c5d5 Mon Sep 17 00:00:00 2001 From: "furlongm@gmail.com" Date: Wed, 23 Apr 2025 17:35:03 -0400 Subject: [PATCH 189/199] handle duplicate CVSSes better --- security/models.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/security/models.py b/security/models.py index a847ab02..b60acb47 100644 --- a/security/models.py +++ b/security/models.py @@ -125,19 +125,20 @@ def add_cvss_score(self, vector_string, score=None, severity=None, version=None) score = cvss_score.base_score if not severity: severity = cvss_score.severities()[0] - existing = self.cvss_scores.filter(version=version, vector_string=vector_string) - if existing: - cvss = existing.first() - else: + try: cvss, created = CVSS.objects.get_or_create( version=version, vector_string=vector_string, score=score, severity=severity, ) - cvss.score = score - cvss.severity = severity - cvss.save() + except CVSS.MultipleObjectsReturned: + matching_cvsses = CVSS.objects.filter( + version=version, + vector_string=vector_string, + ) + cvss = matching_cvsses.first() + matching_cvsses.exclude(id=cvss.id).delete() self.cvss_scores.add(cvss) def fetch_cve_data(self, fetch_nist_data=False, sleep_secs=6): From bf626c90dd697dfd7bc41f542ae55c88af296e2a Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 8 Apr 2025 15:49:31 -0400 Subject: [PATCH 190/199] reduce max charfield length for mysql --- security/migrations/0001_initial.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/security/migrations/0001_initial.py b/security/migrations/0001_initial.py index 5655f8b0..c22d1727 100644 --- a/security/migrations/0001_initial.py +++ b/security/migrations/0001_initial.py @@ -27,7 +27,7 @@ class Migration(migrations.Migration): ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('cwe_id', models.CharField(max_length=255, unique=True)), ('name', models.CharField(blank=True, max_length=255, null=True)), - ('description', models.CharField(blank=True, max_length=65535, null=True)), + ('description', models.CharField(blank=True, max_length=21844, null=True)), ], ), migrations.CreateModel( @@ -36,7 +36,7 @@ class Migration(migrations.Migration): ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('cve_id', models.CharField(max_length=255, unique=True)), ('title', models.CharField(blank=True, max_length=255, null=True)), - ('description', models.CharField(max_length=65535)), + ('description', models.CharField(max_length=21844)), ('reserved_date', models.DateTimeField(blank=True, null=True)), ('published_date', models.DateTimeField(blank=True, null=True)), ('rejected_date', models.DateTimeField(blank=True, null=True)), From 382cd29ad3aeba4481fa4d14e38ce75bcff37874 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Fri, 18 Apr 2025 00:05:34 -0400 Subject: [PATCH 191/199] further reduce charfield size for mysql --- security/migrations/0001_initial.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/security/migrations/0001_initial.py b/security/migrations/0001_initial.py index c22d1727..5f922c9a 100644 --- a/security/migrations/0001_initial.py +++ b/security/migrations/0001_initial.py @@ -27,7 +27,7 @@ class Migration(migrations.Migration): ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('cwe_id', models.CharField(max_length=255, unique=True)), ('name', models.CharField(blank=True, max_length=255, null=True)), - ('description', models.CharField(blank=True, max_length=21844, null=True)), + ('description', models.CharField(blank=True, max_length=255, null=True)), ], ), migrations.CreateModel( @@ -36,7 +36,7 @@ class Migration(migrations.Migration): ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('cve_id', models.CharField(max_length=255, unique=True)), ('title', models.CharField(blank=True, max_length=255, null=True)), - ('description', models.CharField(max_length=21844)), + ('description', models.CharField(max_length=255)), ('reserved_date', models.DateTimeField(blank=True, null=True)), ('published_date', models.DateTimeField(blank=True, null=True)), ('rejected_date', models.DateTimeField(blank=True, null=True)), From 20a42edbe72459f2ddb70c438e283262f5a9df95 Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 29 Apr 2025 16:10:52 -0400 Subject: [PATCH 192/199] reduce URLField max_length to 765 --- errata/migrations/0001_initial.py | 2 +- security/migrations/0005_reference_cve_references.py | 2 +- security/models.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/errata/migrations/0001_initial.py b/errata/migrations/0001_initial.py index 85fe88b4..d02a7dc8 100644 --- a/errata/migrations/0001_initial.py +++ b/errata/migrations/0001_initial.py @@ -19,7 +19,7 @@ class Migration(migrations.Migration): fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('er_type', models.CharField(max_length=255)), - ('url', models.URLField(max_length=2000)), + ('url', models.URLField(max_length=765)), ], ), migrations.CreateModel( diff --git a/security/migrations/0005_reference_cve_references.py b/security/migrations/0005_reference_cve_references.py index 97251add..f94cf7d5 100644 --- a/security/migrations/0005_reference_cve_references.py +++ b/security/migrations/0005_reference_cve_references.py @@ -15,7 +15,7 @@ class Migration(migrations.Migration): fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('ref_type', models.CharField(max_length=255)), - ('url', models.URLField(max_length=2000)), + ('url', models.URLField(max_length=765)), ], options={ 'unique_together': {('ref_type', 'url')}, diff --git a/security/models.py b/security/models.py index b60acb47..9c097eed 100644 --- a/security/models.py +++ b/security/models.py @@ -29,7 +29,7 @@ class Reference(models.Model): ref_type = models.CharField(max_length=255) - url = models.URLField(max_length=2000) + url = models.URLField(max_length=765) class Meta: unique_together = ['ref_type', 'url'] From bf5478cf5d5374b693d2641d526db1a1e02f47e9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 8 May 2025 15:45:31 +0000 Subject: [PATCH 193/199] Bump django from 4.2.20 to 4.2.21 Bumps [django](https://github.com/django/django) from 4.2.20 to 4.2.21. - [Commits](https://github.com/django/django/compare/4.2.20...4.2.21) --- updated-dependencies: - dependency-name: django dependency-version: 4.2.21 dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index dca4fe03..2f72fae5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -Django==4.2.20 +Django==4.2.21 django-taggit==4.0.0 django-extensions==3.2.3 django-bootstrap3==23.1 From 57e5c0d4861b7c80d081e1b85a8b48f9bd1cb332 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 6 Jun 2025 23:28:39 +0000 Subject: [PATCH 194/199] Bump django from 4.2.21 to 4.2.22 Bumps [django](https://github.com/django/django) from 4.2.21 to 4.2.22. - [Commits](https://github.com/django/django/compare/4.2.21...4.2.22) --- updated-dependencies: - dependency-name: django dependency-version: 4.2.22 dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 2f72fae5..3418b5d0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -Django==4.2.21 +Django==4.2.22 django-taggit==4.0.0 django-extensions==3.2.3 django-bootstrap3==23.1 From 6a45e90ed63efadd4e1e7c246d174feceb91c439 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Jun 2025 08:47:24 +0000 Subject: [PATCH 195/199] Bump requests from 2.32.3 to 2.32.4 Bumps [requests](https://github.com/psf/requests) from 2.32.3 to 2.32.4. - [Release notes](https://github.com/psf/requests/releases) - [Changelog](https://github.com/psf/requests/blob/main/HISTORY.md) - [Commits](https://github.com/psf/requests/compare/v2.32.3...v2.32.4) --- updated-dependencies: - dependency-name: requests dependency-version: 2.32.4 dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 2f72fae5..955c7bbb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,7 +6,7 @@ python-debian==1.0.1 defusedxml==0.7.1 PyYAML==6.0.2 chardet==5.2.0 -requests==2.32.3 +requests==2.32.4 colorama==0.4.6 djangorestframework==3.15.2 django-filter==25.1 From 56748532f099b6bd1dfaf202e55e4a160dc9fa3f Mon Sep 17 00:00:00 2001 From: vtalos Date: Thu, 17 Jul 2025 19:33:36 +0300 Subject: [PATCH 196/199] Remove unused dependency 'chardet' from requirements.txt --- requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 088f0870..a39eb8cc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,6 @@ django-bootstrap3==23.1 python-debian==1.0.1 defusedxml==0.7.1 PyYAML==6.0.2 -chardet==5.2.0 requests==2.32.4 colorama==0.4.6 djangorestframework==3.15.2 From 94fcb04694d204c32d4098fe9981f255eb4a843e Mon Sep 17 00:00:00 2001 From: Marcus Furlong Date: Tue, 5 Aug 2025 20:15:16 -0400 Subject: [PATCH 197/199] get_or_create_module only returns module --- modules/utils.py | 4 ++-- repos/repo_types/yum.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/utils.py b/modules/utils.py index 817a610c..f56a0f62 100644 --- a/modules/utils.py +++ b/modules/utils.py @@ -23,7 +23,7 @@ def get_or_create_module(name, stream, version, context, arch, repo): """ Get or create a module object - Returns the module and a boolean for created + Returns the module """ created = False m_arch, c = PackageArchitecture.objects.get_or_create(name=arch) @@ -46,7 +46,7 @@ def get_or_create_module(name, stream, version, context, arch, repo): arch=m_arch, repo=repo, ) - return module, created + return module def get_matching_modules(name, stream, version, context, arch): diff --git a/repos/repo_types/yum.py b/repos/repo_types/yum.py index d08c7393..7ac85816 100644 --- a/repos/repo_types/yum.py +++ b/repos/repo_types/yum.py @@ -91,7 +91,7 @@ def extract_module_metadata(data, url, repo): packages.add(package) from modules.utils import get_or_create_module - module, created = get_or_create_module(m_name, m_stream, m_version, m_context, arch, repo) + module = get_or_create_module(m_name, m_stream, m_version, m_context, arch, repo) package_ids = [] for package in packages: From 1480468f32ed5386c678e5fa0972eaf4c708c879 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 Sep 2025 02:19:45 +0000 Subject: [PATCH 198/199] Bump django from 4.2.22 to 4.2.24 Bumps [django](https://github.com/django/django) from 4.2.22 to 4.2.24. - [Commits](https://github.com/django/django/compare/4.2.22...4.2.24) --- updated-dependencies: - dependency-name: django dependency-version: 4.2.24 dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index a39eb8cc..9d2baa9e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -Django==4.2.22 +Django==4.2.24 django-taggit==4.0.0 django-extensions==3.2.3 django-bootstrap3==23.1 From b6162963e7811081eb7a4f60ef04a594d23d5866 Mon Sep 17 00:00:00 2001 From: Will Furnell Date: Fri, 12 Sep 2025 13:33:45 +0100 Subject: [PATCH 199/199] Package types are in the Package class --- packages/models.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/models.py b/packages/models.py index f4c9c59e..74a83c0c 100644 --- a/packages/models.py +++ b/packages/models.py @@ -195,11 +195,11 @@ def __str__(self): rel = f'-{self.release}' else: rel = '' - if self.packagetype == self.GENTOO: + if self.packagetype == Package.GENTOO: return f'{self.category}/{self.name}-{epo}{self.version}{rel}-{self.arch}.{self.get_packagetype_display()}' - elif self.packagetype in [self.DEB, self.ARCH]: + elif self.packagetype in [Package.DEB, Package.ARCH]: return f'{self.name}_{epo}{self.version}{rel}_{self.arch}.{self.get_packagetype_display()}' - elif self.packagetype == self.RPM: + elif self.packagetype == Package.RPM: return f'{self.name}-{epo}{self.version}{rel}-{self.arch}.{self.get_packagetype_display()}' else: return f'{self.name}-{epo}{self.version}{rel}-{self.arch}.{self.get_packagetype_display()}'
    CVE ID{{ cve.cve_id }}
    Title{{ cve.title }}
    Description{{ cve.description }}
    Reserved{{ cve.reserved_date|date|default_if_none:'' }}
    Rejected{{ cve.rejected_date|date|default_if_none:'' }}