← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] ~ilkeremrekoc/launchpad:update-cve-lib into launchpad:master

 

İlker Emre Koç has proposed merging ~ilkeremrekoc/launchpad:update-cve-lib into launchpad:master.

Commit message:
Update cve_lib to the latest version

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~ilkeremrekoc/launchpad/+git/launchpad/+merge/494624

Aside from the update, we also had to change the UCT parser and its
tests slightly since the library had changed its priority output type.
-- 
Your team Launchpad code reviewers is requested to review the proposed merge of ~ilkeremrekoc/launchpad:update-cve-lib into launchpad:master.
diff --git a/charm/launchpad-native-publisher/templates/celerybeat_native_publisher.service.j2 b/charm/launchpad-native-publisher/templates/celerybeat_native_publisher.service.j2
index 363c33e..f590a8d 100644
--- a/charm/launchpad-native-publisher/templates/celerybeat_native_publisher.service.j2
+++ b/charm/launchpad-native-publisher/templates/celerybeat_native_publisher.service.j2
@@ -6,8 +6,10 @@ After=syslog.target network.target remote-fs.target nss-lookup.target
 Environment=LPCONFIG=launchpad-native-publisher
 User=launchpad
 WorkingDirectory={{ code_dir }}
-ExecStart={{ code_dir }}/bin/celery beat \
+ExecStart={{ code_dir }}/bin/celery \
+    -A lp.services.job.celeryjob \
     --config=lp.services.job.celeryconfig \
+    beat \
     --pidfile={{ var_dir }}/celerybeat_native_publisher.pid \
     --logfile={{ logs_dir }}/celerybeat_native_publisher.log \
     --schedule={{ var_dir }}/celerybeat-schedule.db
diff --git a/charm/launchpad-native-publisher/templates/celeryd_native_publisher_job.service.j2 b/charm/launchpad-native-publisher/templates/celeryd_native_publisher_job.service.j2
index 472cc7a..1e28bcb 100644
--- a/charm/launchpad-native-publisher/templates/celeryd_native_publisher_job.service.j2
+++ b/charm/launchpad-native-publisher/templates/celeryd_native_publisher_job.service.j2
@@ -8,9 +8,11 @@ User=launchpad
 Group=launchpad
 Environment=LPCONFIG=launchpad-native-publisher
 WorkingDirectory={{ code_dir }}
-ExecStart={{ code_dir }}/bin/celery worker \
-    --queue native_publisher_job \
+ExecStart={{ code_dir }}/bin/celery \
+    -A lp.services.job.celeryjob \
     --config=lp.services.job.celeryconfig \
+    worker \
+    --queue native_publisher_job \
     --hostname=native_publisher_job \
     --loglevel=DEBUG \
     --logfile={{ logs_dir }}/celeryd_native_publisher_job.log
diff --git a/charm/launchpad-native-publisher/templates/celeryd_native_publisher_job_slow.service.j2 b/charm/launchpad-native-publisher/templates/celeryd_native_publisher_job_slow.service.j2
index ceb86a4..15430be 100644
--- a/charm/launchpad-native-publisher/templates/celeryd_native_publisher_job_slow.service.j2
+++ b/charm/launchpad-native-publisher/templates/celeryd_native_publisher_job_slow.service.j2
@@ -8,9 +8,11 @@ User=launchpad
 Group=launchpad
 Environment=LPCONFIG=launchpad-native-publisher
 WorkingDirectory={{ code_dir }}
-ExecStart={{ code_dir }}/bin/celery worker \
-    --queue native_publisher_job_slow \
+ExecStart={{ code_dir }}/bin/celery \
+    -A lp.services.job.celeryjob \
     --config=lp.services.job.celeryconfig \
+    worker \
+    --queue native_publisher_job_slow \
     --hostname=native_publisher_job_slow \
     --loglevel=DEBUG \
     --logfile={{ logs_dir }}/celeryd_native_publisher_job_slow.log
diff --git a/charm/launchpad-scripts-bzrsyncd/templates/celerybeat_bzrsyncd.service.j2 b/charm/launchpad-scripts-bzrsyncd/templates/celerybeat_bzrsyncd.service.j2
index 5ef40f8..52cfdf3 100644
--- a/charm/launchpad-scripts-bzrsyncd/templates/celerybeat_bzrsyncd.service.j2
+++ b/charm/launchpad-scripts-bzrsyncd/templates/celerybeat_bzrsyncd.service.j2
@@ -6,8 +6,10 @@ After=syslog.target network.target remote-fs.target nss-lookup.target
 Environment=LPCONFIG=launchpad-scripts-bzrsyncd
 User=launchpad
 WorkingDirectory={{ code_dir }}
-ExecStart={{ code_dir }}/bin/celery beat \
+ExecStart={{ code_dir }}/bin/celery \
+    -A lp.services.job.celeryjob \
     --config=lp.services.job.celeryconfig \
+    beat \
     --pidfile={{ var_dir }}/celerybeat_bzrsyncd.pid \
     --logfile={{ logs_dir }}/celerybeat_bzrsyncd.log \
     --schedule={{ var_dir }}/celerybeat-schedule.db
diff --git a/charm/launchpad-scripts-bzrsyncd/templates/celeryd_bzrsyncd_job.service.j2 b/charm/launchpad-scripts-bzrsyncd/templates/celeryd_bzrsyncd_job.service.j2
index 47d6eda..86a7c07 100644
--- a/charm/launchpad-scripts-bzrsyncd/templates/celeryd_bzrsyncd_job.service.j2
+++ b/charm/launchpad-scripts-bzrsyncd/templates/celeryd_bzrsyncd_job.service.j2
@@ -8,9 +8,11 @@ User=launchpad
 Group=launchpad
 Environment=LPCONFIG=launchpad-scripts-bzrsyncd
 WorkingDirectory={{ code_dir }}
-ExecStart={{ code_dir }}/bin/celery worker \
-    --queue bzrsyncd_job \
+ExecStart={{ code_dir }}/bin/celery \
+    -A lp.services.job.celeryjob \
     --config=lp.services.job.celeryconfig \
+    worker \
+    --queue bzrsyncd_job \
     --hostname=launchpad_bzrsyncd_job \
     --loglevel=DEBUG \
     --logfile={{ logs_dir }}/celeryd_launchpad_bzrsyncd_job.log
diff --git a/charm/launchpad-scripts-bzrsyncd/templates/celeryd_bzrsyncd_job_slow.service.j2 b/charm/launchpad-scripts-bzrsyncd/templates/celeryd_bzrsyncd_job_slow.service.j2
index 34923be..363206c 100644
--- a/charm/launchpad-scripts-bzrsyncd/templates/celeryd_bzrsyncd_job_slow.service.j2
+++ b/charm/launchpad-scripts-bzrsyncd/templates/celeryd_bzrsyncd_job_slow.service.j2
@@ -8,9 +8,11 @@ User=launchpad
 Group=launchpad
 Environment=LPCONFIG=launchpad-scripts-bzrsyncd
 WorkingDirectory={{ code_dir }}
-ExecStart={{ code_dir }}/bin/celery worker \
-    --queue bzrsyncd_job_slow \
+ExecStart={{ code_dir }}/bin/celery \
+    -A lp.services.job.celeryjob \
     --config=lp.services.job.celeryconfig \
+    worker \
+    --queue bzrsyncd_job_slow \
     --hostname=launchpad_bzrsyncd_job_slow \
     --loglevel=DEBUG \
     --logfile={{ logs_dir }}/celeryd_launchpad_bzrsyncd_job_slow.log
diff --git a/charm/launchpad-scripts/templates/celerybeat_launchpad.service.j2 b/charm/launchpad-scripts/templates/celerybeat_launchpad.service.j2
index e109591..4af0e8e 100644
--- a/charm/launchpad-scripts/templates/celerybeat_launchpad.service.j2
+++ b/charm/launchpad-scripts/templates/celerybeat_launchpad.service.j2
@@ -6,8 +6,10 @@ After=syslog.target network.target remote-fs.target nss-lookup.target
 Environment=LPCONFIG=launchpad-scripts
 User=launchpad
 WorkingDirectory={{ code_dir }}
-ExecStart={{ code_dir }}/bin/celery beat \
+ExecStart={{ code_dir }}/bin/celery \
+    -A lp.services.job.celeryjob \
     --config=lp.services.job.celeryconfig \
+    beat \
     --pidfile={{ var_dir }}/celerybeat_launchpad.pid \
     --logfile={{ logs_dir }}/celerybeat_launchpad.log \
     --schedule={{ var_dir }}/celerybeat-schedule.db
diff --git a/charm/launchpad-scripts/templates/celeryd_launchpad_job.service.j2 b/charm/launchpad-scripts/templates/celeryd_launchpad_job.service.j2
index 67f9f71..29f8b6f 100644
--- a/charm/launchpad-scripts/templates/celeryd_launchpad_job.service.j2
+++ b/charm/launchpad-scripts/templates/celeryd_launchpad_job.service.j2
@@ -8,9 +8,11 @@ User=launchpad
 Group=launchpad
 Environment=LPCONFIG=launchpad-scripts
 WorkingDirectory={{ code_dir }}
-ExecStart={{ code_dir }}/bin/celery worker \
-    --queue launchpad_job \
+ExecStart={{ code_dir }}/bin/celery \
+    -A lp.services.job.celeryjob \
     --config=lp.services.job.celeryconfig \
+    worker \
+    --queue launchpad_job \
     --hostname=launchpad_job \
     --loglevel=DEBUG \
     --logfile={{ logs_dir }}/celeryd_launchpad_job.log
diff --git a/charm/launchpad-scripts/templates/celeryd_launchpad_job_slow.service.j2 b/charm/launchpad-scripts/templates/celeryd_launchpad_job_slow.service.j2
index 9492e60..260ddf1 100644
--- a/charm/launchpad-scripts/templates/celeryd_launchpad_job_slow.service.j2
+++ b/charm/launchpad-scripts/templates/celeryd_launchpad_job_slow.service.j2
@@ -8,9 +8,11 @@ User=launchpad
 Group=launchpad
 Environment=LPCONFIG=launchpad-scripts
 WorkingDirectory={{ code_dir }}
-ExecStart={{ code_dir }}/bin/celery worker \
-    --queue launchpad_job_slow \
+ExecStart={{ code_dir }}/bin/celery \
+    -A lp.services.job.celeryjob \
     --config=lp.services.job.celeryconfig \
+    worker \
+    --queue launchpad_job_slow \
     --hostname=launchpad_job_slow \
     --loglevel=DEBUG \
     --logfile={{ logs_dir }}/celeryd_launchpad_job_slow.log
diff --git a/lib/contrib/cve_lib.py b/lib/contrib/cve_lib.py
index df01971..a39af97 100644
--- a/lib/contrib/cve_lib.py
+++ b/lib/contrib/cve_lib.py
@@ -7,56 +7,72 @@ A copy of `cve_lib` module from `ubuntu-cve-tracker`
 """
 import codecs
 import glob
-import math
 import os
 import re
 import sys
-from collections import OrderedDict
+from functools import lru_cache
 
 import yaml
 
-GLOBAL_TAGS_KEY = "*"
+CVE_FILTER_NAME = "cve_filter_name"
+CVE_FILTER_ARGS = "cve_filter_args"
 
+GLOBAL_TAGS_KEY = '*'
 
 def set_cve_dir(path):
-    """Return a path with CVEs in it. Specifically:
-    - if 'path' has CVEs in it, return path
-    - if 'path' is a relative directory with no CVEs, see if UCT is defined
-      and if so, see if 'UCT/path' has CVEs in it and return path
-    """
+    '''Return a path with CVEs in it. Specifically:
+       - if 'path' has CVEs in it, return path
+       - if 'path' is a relative directory with no CVEs, see if UCT is defined
+         and if so, see if 'UCT/path' has CVEs in it and return path
+    '''
     p = path
     found = False
     if len(glob.glob("%s/CVE-*" % path)) > 0:
         found = True
-    elif not path.startswith("/") and "UCT" in os.environ:
-        tmp = os.path.join(os.environ["UCT"], path)
+    elif not path.startswith('/') and 'UCT' in os.environ:
+        tmp = os.path.join(os.environ['UCT'], path)
         if len(glob.glob("%s/CVE-*" % tmp)) > 0:
             found = True
             p = tmp
-            # print("INFO: using '%s'" % p, file=sys.stderr)
+            #print("INFO: using '%s'" % p, file=sys.stderr)
 
-    if not found:
-        print(
-            "WARN: could not find CVEs in '%s' (or relative to UCT)" % path,
-            file=sys.stderr,
-        )
+    if not found and not path.endswith("ignored"):
+        print("WARN: could not find CVEs in '%s' (or relative to UCT)" % path, file=sys.stderr)
     return p
 
-
-if "UCT" in os.environ:
-    subprojects_dir = os.environ["UCT"] + "/subprojects"
+if 'UCT' in os.environ:
+    active_dir = set_cve_dir(os.environ['UCT'] + "/active")
+    retired_dir = set_cve_dir(os.environ['UCT'] + "/retired")
+    ignored_dir = set_cve_dir(os.environ['UCT'] + "/ignored")
+    embargoed_dir = os.environ['UCT'] + "/embargoed"
+    meta_dir = os.path.join(os.environ['UCT'], 'meta_lists')
+    subprojects_dir = os.environ['UCT'] + "/subprojects"
+    boilerplates_dir = os.environ['UCT'] + "/boilerplates"
 else:
+    active_dir = set_cve_dir("active")
+    retired_dir = set_cve_dir("retired")
+    ignored_dir = set_cve_dir("ignored")
+    embargoed_dir = "embargoed"     # Intentionally not using set_cve_dir()
+    meta_dir = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'meta_lists')
     subprojects_dir = "subprojects"
+    boilerplates_dir = "boilerplates"
 
 PRODUCT_UBUNTU = "ubuntu"
+PRODUCT_ESM = ["esm", "esm-infra", "esm-apps", "esm-infra-legacy", "esm-apps-legacy"]
+PRODUCT_FIPS = ["fips", "fips-updates", "fips-preview"]
+PRIORITY_REASON_REQUIRED = ["low", "high", "critical"]
+PRIORITY_REASON_DATE_START = "2023-07-11"
 
 # common to all scripts
 # these get populated by the contents of subprojects defined below
 all_releases = []
 eol_releases = []
 external_releases = []
+interim_releases = []
 releases = []
 devel_release = ""
+active_external_subprojects = {}
+eol_external_subprojects = {}
 
 # known subprojects which are supported by cve_lib - in general each
 # subproject is defined by the combination of a product and series as
@@ -73,7 +89,12 @@ devel_release = ""
 subprojects = {
     "bluefield/jammy": {
         "eol": False,
-        "oval": True,
+        "data_formats": {
+            "json-pkg": False,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "packages": ["bluefield-jammy-supported.txt"],
         "name": "Ubuntu 22.04 LTS for NVIDIA BlueField",
         "codename": "Jammy Jellyfish",
@@ -107,8 +128,13 @@ subprojects = {
         "stamp": 1493521200,
     },
     "esm/trusty": {
-        "eol": False,
-        "oval": True,
+        "eol": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "packages": ["trusty-esm-supported.txt"],
         "name": "Ubuntu 14.04 LTS",
         "codename": "Trusty Tahr",
@@ -123,7 +149,12 @@ subprojects = {
     },
     "esm-infra/xenial": {
         "eol": False,
-        "oval": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "components": ["main", "restricted"],
         "packages": ["esm-infra-xenial-supported.txt"],
         "name": "Ubuntu 16.04 LTS",
@@ -138,7 +169,12 @@ subprojects = {
     },
     "esm-infra/bionic": {
         "eol": False,
-        "oval": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "components": ["main", "restricted"],
         "packages": ["esm-infra-bionic-supported.txt"],
         "name": "Ubuntu 18.04 LTS",
@@ -151,9 +187,34 @@ subprojects = {
         "description": "Available with Ubuntu Pro (Infra-only): https://ubuntu.com/pro";,
         "stamp": 1685539024,
     },
+    "esm-infra/focal": {
+        "eol": False,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
+        "components": ["main", "restricted"],
+        "packages": ["esm-infra-focal-supported.txt"],
+        "name": "Ubuntu 20.04 LTS",
+        "codename": "Focal Fossa",
+        "ppas": [
+                 {"ppa": "ubuntu-esm/esm-infra-security", "pocket": "security"},
+                 {"ppa": "ubuntu-esm/esm-infra-updates",  "pocket": "updates"}
+                ],
+        "parent": "ubuntu/focal",
+        "description": "Available with Ubuntu Pro (Infra-only): https://ubuntu.com/pro";,
+        "stamp": 1748920791,
+    },
     "esm-infra-legacy/trusty": {
         "eol": False,
-        "oval": False, #TODO: Change to True when we are ready for generating data
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "packages": ["esm-infra-legacy-trusty-supported.txt"],
         "name": "Ubuntu 14.04 LTS",
         "codename": "Trusty Tahr",
@@ -163,11 +224,16 @@ subprojects = {
                 ],
         "parent": "esm/trusty",
         "description": "Available with Ubuntu Pro with Legacy support add-on: https://ubuntu.com/pro";,
-        "stamp": None, #TODO: to be calculate when finally public
+        "stamp": 1732637340,
     },
     "esm-apps/xenial": {
         "eol": False,
-        "oval": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "components": ["universe", "multiverse"],
         "packages": ["esm-apps-xenial-supported.txt"],
         "name": "Ubuntu 16.04 LTS",
@@ -182,7 +248,12 @@ subprojects = {
     },
     "esm-apps/bionic": {
         "eol": False,
-        "oval": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "components": ["universe", "multiverse"],
         "packages": ["esm-apps-bionic-supported.txt"],
         "name": "Ubuntu 18.04 LTS",
@@ -197,7 +268,12 @@ subprojects = {
     },
     "esm-apps/focal": {
         "eol": False,
-        "oval": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "components": ["universe", "multiverse"],
         "packages": ["esm-apps-focal-supported.txt"],
         "name": "Ubuntu 20.04 LTS",
@@ -206,13 +282,18 @@ subprojects = {
                  {"ppa": "ubuntu-esm/esm-apps-security", "pocket": "security"},
                  {"ppa": "ubuntu-esm/esm-apps-updates",  "pocket": "updates"}
                 ],
-        "parent": "ubuntu/focal",
+        "parent": "esm-infra/focal",
         "description": "Available with Ubuntu Pro: https://ubuntu.com/pro";,
         "stamp": 1587567600,
     },
     "esm-apps/jammy": {
         "eol": False,
-        "oval": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "components": ["universe", "multiverse"],
         "packages": ["esm-apps-jammy-supported.txt"],
         "name": "Ubuntu 22.04 LTS",
@@ -227,7 +308,12 @@ subprojects = {
     },
     "esm-apps/noble": {
         "eol": False,
-        "oval": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "components": ["universe", "multiverse"],
         "packages": ["esm-apps-noble-supported.txt"],
         "name": "Ubuntu 24.04 LTS",
@@ -242,85 +328,241 @@ subprojects = {
     },
     "fips/xenial": {
         "eol": False,
-        "oval": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "packages": ["fips-xenial-supported.txt"],
-        "name": "Ubuntu 16.04 FIPS Certified",
+        "name": "Ubuntu Pro FIPS 16.04 LTS",
         "codename": "Xenial Xerus",
-        "ppas": [{"ppa" : "ubuntu-advantage/fips", "pocket": "security"}],
-        "parent": "ubuntu/xenial",
+        "ppas": [
+            {"ppa" : "ubuntu-advantage/fips", "pocket": "security"},
+            {"ppa" : "ubuntu-advantage/pro-fips", "pocket": "security"}
+        ],
+        "parent": "esm-apps/xenial",
         "description": "Available with Ubuntu Pro: https://ubuntu.com/pro";,
     },
     "fips/bionic": {
         "eol": False,
-        "oval": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "packages": ["fips-bionic-supported.txt"],
-        "name": "Ubuntu 18.04 FIPS Certified",
+        "name": "Ubuntu Pro FIPS 18.04 LTS",
         "codename": "Bionic Beaver",
-        "ppas": [{"ppa" : "ubuntu-advantage/fips", "pocket": "security"}],
-        "parent": "ubuntu/bionic",
+        "ppas": [
+            {"ppa" : "ubuntu-advantage/fips", "pocket": "security"},
+            {"ppa" : "ubuntu-advantage/pro-fips", "pocket": "security"}
+        ],
+        "parent": "esm-apps/bionic",
         "description": "Available with Ubuntu Pro: https://ubuntu.com/pro";,
     },
     "fips/focal": {
         "eol": False,
-        "oval": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "packages": ["fips-focal-supported.txt"],
-        "name": "Ubuntu 20.04 FIPS Certified",
+        "name": "Ubuntu Pro FIPS 20.04 LTS",
         "codename": "Focal Fossa",
-        "ppas": [{"ppa" : "ubuntu-advantage/fips", "pocket": "security"}],
-        "parent": "ubuntu/focal",
+        "ppas": [
+            {"ppa" : "ubuntu-advantage/fips", "pocket": "security"},
+            {"ppa" : "ubuntu-advantage/pro-fips", "pocket": "security"}
+        ],
+        "parent": "esm-apps/focal",
         "description": "Available with Ubuntu Pro: https://ubuntu.com/pro";,
     },
     "fips-updates/xenial": {
         "eol": False,
-        "oval": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "packages": ["fips-updates-xenial-supported.txt"],
-        "name": "Ubuntu 16.04 FIPS Compliant",
+        "name": "Ubuntu Pro FIPS 16.04 LTS",
         "codename": "Xenial Xerus",
-        "ppas": [{"ppa" : "ubuntu-advantage/fips-updates", "pocket": "updates"}],
-        "parent": "ubuntu/xenial",
+        "ppas": [
+            {"ppa" : "ubuntu-advantage/fips-updates", "pocket": "updates"},
+            {"ppa" : "ubuntu-advantage/pro-fips-updates", "pocket": "updates"}
+        ],
+        "parent": "esm-apps/xenial",
         "description": "Available with Ubuntu Pro: https://ubuntu.com/pro";,
     },
     "fips-updates/bionic": {
         "eol": False,
-        "oval": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "packages": ["fips-updates-bionic-supported.txt"],
-        "name": "Ubuntu 18.04 FIPS Compliant",
+        "name": "Ubuntu Pro FIPS-updates 18.04 LTS",
         "codename": "Bionic Beaver",
-        "ppas": [{"ppa" : "ubuntu-advantage/fips-updates", "pocket": "updates"}],
-        "parent": "ubuntu/bionic",
+        "ppas": [
+            {"ppa" : "ubuntu-advantage/fips-updates", "pocket": "updates"},
+            {"ppa" : "ubuntu-advantage/pro-fips-updates", "pocket": "updates"}
+        ],
+        "parent": "esm-apps/bionic",
         "description": "Available with Ubuntu Pro: https://ubuntu.com/pro";,
     },
     "fips-updates/focal": {
         "eol": False,
-        "oval": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "packages": ["fips-updates-focal-supported.txt"],
-        "name": "Ubuntu 20.04 FIPS Compliant",
+        "name": "Ubuntu Pro FIPS-updates 20.04 LTS",
         "codename": "Focal Fossa",
-        "ppas": [{"ppa" : "ubuntu-advantage/fips-updates", "pocket": "updates"}],
+        "ppas": [
+            {"ppa" : "ubuntu-advantage/fips-updates", "pocket": "updates"},
+            {"ppa" : "ubuntu-advantage/pro-fips-updates", "pocket": "updates"}
+        ],
+        "parent": "esm-apps/focal",
+        "description": "Available with Ubuntu Pro: https://ubuntu.com/pro";,
+    },
+    "fips-updates/jammy": {
+        "eol": False,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
+        "packages": ["fips-updates-jammy-supported.txt"],
+        "name": "Ubuntu Pro FIPS-updates 22.04 LTS",
+        "codename": "Jammy Jellyfish",
+        "ppas": [
+            {"ppa" : "ubuntu-advantage/fips-updates", "pocket": "updates"},
+            {"ppa" : "ubuntu-advantage/pro-fips-updates", "pocket": "updates"}
+        ],
+        "parent": "esm-apps/jammy",
+        "description": "Available with Ubuntu Pro: https://ubuntu.com/pro";,
+    },
+    "fips-preview/jammy": {
+        "eol": False,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
+        "packages": ["fips-preview-jammy-supported.txt"],
+        "name": "Ubuntu Pro FIPS-preview 22.04 LTS",
+        "codename": "Jammy Jellyfish",
+        "ppas": [
+            {"ppa" : "ubuntu-advantage/fips-preview", "pocket": "security"},
+            {"ppa" : "ubuntu-advantage/pro-fips-preview", "pocket": "security"}
+        ],
+        "parent": "esm-apps/jammy",
+        "description": "Available with Ubuntu Pro: https://ubuntu.com/pro";,
+    },
+    "realtime/jammy": {
+        "eol": False,
+        "data_formats": {
+            "json-pkg": False,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
+        "packages": ["realtime-jammy-supported.txt"],
+        "name": "Ubuntu Pro 22.04 LTS Realtime Kernel",
+        "codename": "Jammy Jellyfish",
+        "ppas": [{"ppa": "ubuntu-advantage/realtime-updates", "pocket": "release"}],
+        "parent": "ubuntu/jammy",
+        "description": "Available with Ubuntu Pro: https://ubuntu.com/pro";,
+    },
+    "realtime/noble": {
+        "eol": False,
+        "data_formats": {
+            "json-pkg": False,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
+        "packages": ["realtime-noble-supported.txt"],
+        "name": "Ubuntu Pro 24.04 LTS Realtime Kernel",
+        "codename": "Noble Numbat",
+        "ppas": [{"ppa": "ubuntu-advantage/realtime-updates", "pocket": "release"}],
+        "parent": "ubuntu/noble",
+        "description": "Available with Ubuntu Pro: https://ubuntu.com/pro";,
+    },
+    "ros-esm/foxy": {
+        "eol": False,
+        "data_formats": {
+            "json-pkg": False,
+            "oval": False,
+            "osv": False,
+            "vex": False,
+        },
+        "packages": ["ros-esm-focal-foxy-supported.txt"],
+        "name": "Ubuntu 20.04 ROS ESM",
+        "codename": "Focal Fossa",
+        "alias": "ros-esm/focal/foxy",
+        "ppas": [{"ppa": "ubuntu-robotics-packagers/ros-security", "pocket": "security"}],
         "parent": "ubuntu/focal",
         "description": "Available with Ubuntu Pro: https://ubuntu.com/pro";,
     },
     "ros-esm/kinetic": {
         "eol": False,
-        "oval": False,
+        "data_formats": {
+            "json-pkg": False,
+            "oval": False,
+            "osv": False,
+            "vex": False,
+        },
         "packages": ["ros-esm-xenial-kinetic-supported.txt"],
         "name": "Ubuntu 16.04 ROS ESM",
         "codename": "Xenial Xerus",
         "alias": "ros-esm/xenial",
         "ppas": [{"ppa": "ubuntu-robotics-packagers/ros-security", "pocket": "security"}],
         "parent": "ubuntu/xenial",
-        "description": "Available with Ubuntu Advantage: https://ubuntu.com/advantage";,
+        "description": "Available with Ubuntu Pro: https://ubuntu.com/pro";,
     },
     "ros-esm/melodic": {
         "eol": False,
-        "oval": False,
+        "data_formats": {
+            "json-pkg": False,
+            "oval": False,
+            "osv": False,
+            "vex": False,
+        },
         "packages": ["ros-esm-bionic-melodic-supported.txt"],
         "name": "Ubuntu 18.04 ROS ESM",
         "codename": "Bionic Beaver",
         "alias": "ros-esm/bionic",
         "ppas": [{"ppa": "ubuntu-robotics-packagers/ros-security", "pocket": "security"}],
         "parent": "ubuntu/bionic",
-        "description": "Available with Ubuntu Advantage: https://ubuntu.com/advantage";,
+        "description": "Available with Ubuntu Pro: https://ubuntu.com/pro";,
+    },
+    "ros-esm/noetic": {
+        "eol": False,
+        "data_formats": {
+            "json-pkg": False,
+            "oval": False,
+            "osv": False,
+            "vex": False,
+        },
+        "packages": ["ros-esm-focal-noetic-supported.txt"],
+        "name": "Ubuntu 20.04 ROS ESM",
+        "codename": "Focal Fossa",
+        "alias": "ros-esm/focal/noetic",
+        "ppas": [{"ppa": "ubuntu-robotics-packagers/ros-security", "pocket": "security"}],
+        "parent": "ubuntu/focal",
+        "description": "Available with Ubuntu Pro: https://ubuntu.com/pro";,
     },
     "ubuntu/warty": {
         "eol": True,
@@ -514,7 +756,12 @@ subprojects = {
     },
     "ubuntu/trusty": {
         "eol": True,
-        "oval": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "components": ["main", "restricted", "universe", "multiverse"],
         "name": "Ubuntu 14.04 LTS",
         "version": 14.04,
@@ -555,7 +802,12 @@ subprojects = {
     },
     "ubuntu/xenial": {
         "eol": True,
-        "oval": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "components": ["main", "restricted", "universe", "multiverse"],
         "name": "Ubuntu 16.04 LTS",
         "version": 16.04,
@@ -596,7 +848,12 @@ subprojects = {
     },
     "ubuntu/bionic": {
         "eol": True,
-        "oval": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "components": ["main", "restricted", "universe", "multiverse"],
         "name": "Ubuntu 18.04 LTS",
         "version": 18.04,
@@ -636,8 +893,13 @@ subprojects = {
         "stamp": 1571234400,
     },
     "ubuntu/focal": {
-        "eol": False,
-        "oval": True,
+        "eol": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "components": ["main", "restricted", "universe", "multiverse"],
         "name": "Ubuntu 20.04 LTS",
         "version": 20.04,
@@ -678,7 +940,12 @@ subprojects = {
     },
     "ubuntu/jammy": {
         "eol": False,
-        "oval": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "components": ["main", "restricted", "universe", "multiverse"],
         "name": "Ubuntu 22.04 LTS",
         "version": 22.04,
@@ -689,7 +956,9 @@ subprojects = {
     },
     "ubuntu/kinetic": {
         "eol": True,
-        "oval": True,
+        "data_formats": {
+            "oval": True,
+        },
         "components": ["main", "restricted", "universe", "multiverse"],
         "name": "Ubuntu 22.10",
         "version": 22.10,
@@ -701,7 +970,9 @@ subprojects = {
     },
     "ubuntu/lunar": {
         "eol": True,
-        "oval": True,
+        "data_formats": {
+            "oval": True,
+        },
         "components": ["main", "restricted", "universe", "multiverse"],
         "name": "Ubuntu 23.04",
         "version": 23.04,
@@ -713,7 +984,9 @@ subprojects = {
     },
     "ubuntu/mantic": {
         "eol": True,
-        "oval": True,
+        "data_formats": {
+            "oval": True,
+        },
         "components": ["main", "restricted", "universe", "multiverse"],
         "name": "Ubuntu 23.10",
         "version": 23.10,
@@ -725,7 +998,12 @@ subprojects = {
     },
     "ubuntu/noble": {
         "eol": False,
-        "oval": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
         "components": ["main", "restricted", "universe", "multiverse"],
         "name": "Ubuntu 24.04 LTS",
         "version": 24.04,
@@ -735,60 +1013,149 @@ subprojects = {
         "description": "Long Term Release",
         "stamp": 1714060800,
     },
-   "ubuntu/oracular": {
-       "eol": False,
-       "oval": True,
-       "components": ["main", "restricted", "universe", "multiverse"],
-       "name": "Ubuntu 24.10",
-       "version": 24.10,
-       "codename": "Oracular Oriole",
-       "alias": "oracular",
-       "devel": True,  # there can be only one ⚔
-       "description": "Interim Release",
-   },
+    "ubuntu/oracular": {
+        "eol": True,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
+        "components": ["main", "restricted", "universe", "multiverse"],
+        "name": "Ubuntu 24.10",
+        "version": 24.10,
+        "codename": "Oracular Oriole",
+        "alias": "oracular",
+        "devel": False,  # there can be only one ⚔
+        "description": "Interim Release",
+        "stamp": 1728961200,
+    },
+    "ubuntu/plucky": {
+        "eol": False,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
+        "components": ["main", "restricted", "universe", "multiverse"],
+        "name": "Ubuntu 25.04",
+        "version": 25.04,
+        "codename": "Plucky Puffin",
+        "alias": "plucky",
+        "devel": False,  # there can be only one ⚔
+        "description": "Interim Release",
+        "stamp": 1744905600,
+    },
+    "ubuntu/questing": {
+        "eol": False,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
+        "components": ["main", "restricted", "universe", "multiverse"],
+        "name": "Ubuntu 25.10",
+        "version": 25.10,
+        "codename": "Questing Quokka",
+        "alias": "questing",
+        "devel": False,  # there can be only one ⚔
+        "description": "Interim Release",
+        "stamp": 1760029200,
+    },
+    "ubuntu/resolute": {
+        "eol": False,
+        "data_formats": {
+            "json-pkg": True,
+            "oval": True,
+            "osv": True,
+            "vex": True,
+        },
+        "components": ["main", "restricted", "universe", "multiverse"],
+        "name": "Ubuntu 26.04",
+        "version": 26.04,
+        "codename": "Resolute Raccoon",
+        "alias": "resolute",
+        "devel": True,  # there can be only one ⚔
+        "description": "Long Term Release",
+    },
     "snap": {
         "eol": False,
-        "oval": False,
+        "data_formats": {
+            "json-pkg": False,
+            "oval": False,
+            "osv": False,
+            "vex": False,
+        },
         "packages": ["snap-supported.txt"],
     }
 }
 
-
+@lru_cache(maxsize=None)
 def product_series(rel):
     """Return the product,series tuple for rel."""
-    series = ""
-    parts = rel.split("/", 1)
-    product = parts[0]
-    if len(parts) == 2:
-        series = parts[1]
+    if rel in external_releases:
+        product = subprojects[rel]['product']
+        series = subprojects[rel]['release']
+    else:
+        series = ""
+        parts = rel.split('/')
+        if len(parts) == 3:
+            # for example: ros-esm/focal/foxy
+            product = parts[0]
+            series = parts[1]
+        elif len(parts) == 2:
+            product = parts[0]
+            series = parts[1]
+            # handle trusty/esm case
+            if product in releases:
+                product, series = series, product
+        elif parts[0] in releases:
+            # by default ubuntu releases have an omitted ubuntu product
+            # this avoids cases like snaps
+            product = PRODUCT_UBUNTU
+            series = parts[0]
+        else:
+            product = parts[0]
     return product, series
 
 
-# get the subproject details for rel along with
-# it's canonical name, product and series
+# get the subproject details for rel along with it's canonical name, product and series
+@lru_cache(maxsize=None)
 def get_subproject_details(rel):
-    """Return the product,series,details tuple for rel."""
-    canon, product, series, details = None, None, None, None
-    try:
+    """Return the canonical name,product,series,details tuple for rel."""
+    canon, product, series, details, release = None, None, None, None, None
+    if rel in subprojects:
         details = subprojects[rel]
-        product, series = product_series(rel)
-        canon = product + "/" + series
-    except (ValueError, KeyError):
-        # look for alias
+        release = rel
+    else:
         for r in subprojects:
             try:
-                if subprojects[r]["alias"] == rel:
-                    product, series = product_series(r)
+                if subprojects[r]["alias"] == rel \
+                  or (rel == "devel" and subprojects[r]["devel"]):
                     details = subprojects[r]
-                    canon = product + "/" + series
+                    release = r
                     break
             except KeyError:
                 pass
-            if details is not None:
-                break
+        else:
+            # support subproject versions: if no match was found, and the release name
+            # contains a slash, use the token before the slash to
+            # look for a subproject with a matching alias
+            if isinstance(rel, str) and "/" in rel:
+                parent = rel.split("/", 1)[0]  # e.g. from "focal/foxy", get "focal"
+                for r in subprojects:
+                    if subprojects[r].get("alias") == parent:
+                        details = subprojects[r]
+                        release = r
+                        break
+
+    if release:
+        product, series = product_series(release)
+        canon = release
     return canon, product, series, details
 
-
 def release_alias(rel):
     """Return the alias for rel or just rel if no alias is defined."""
     alias = rel
@@ -799,7 +1166,6 @@ def release_alias(rel):
         pass
     return alias
 
-
 def release_parent(rel):
     """Return the parent for rel or None if no parent is defined."""
     parent = None
@@ -811,6 +1177,26 @@ def release_parent(rel):
     return parent
 
 
+def release_progenitor(rel):
+    parent = release_parent(rel)
+    while release_parent(parent):
+        parent = release_parent(parent)
+
+    return parent
+
+
+def release_version(rel):
+    """Return the version for rel or its parent if it doesn't have one."""
+    version = 0.0
+    _, _, _, details = get_subproject_details(rel)
+    if details:
+        try:
+            version = details["version"]
+        except KeyError:
+            return release_version(release_progenitor(rel))
+    return version
+
+
 def get_external_subproject_cve_dir(subproject):
     """Get the directory where CVE files are stored for the subproject.
 
@@ -826,7 +1212,6 @@ def get_external_subproject_cve_dir(subproject):
     # CVEs live in the product dir
     return os.path.join(subprojects_dir, product)
 
-
 def get_external_subproject_dir(subproject):
     """Get the directory for the given external subproject."""
     rel, _, _, _ = get_subproject_details(subproject)
@@ -834,14 +1219,21 @@ def get_external_subproject_dir(subproject):
         raise ValueError("%s is not an external subproject" % rel)
     return os.path.join(subprojects_dir, rel)
 
-
-def read_external_subproject_config(subproject):
-    """Read and return the configuration for the given subproject."""
-    sp_dir = get_external_subproject_dir(subproject)
-    config_yaml = os.path.join(sp_dir, "config.yaml")
+def read_external_subproject_config(subproject_dir):
+    """Read and return the configuration for the given subproject directory."""
+    config_yaml = os.path.join(subproject_dir, "config.yml")
     with open(config_yaml) as cfg:
         return yaml.safe_load(cfg)
-
+    
+def read_external_subproject_details(subproject):
+    """Read and return the project details for the given subproject."""
+    sp_dir = get_external_subproject_dir(subproject)
+    # project.yml is located in the top level folder for the subproject
+    project_dir = sp_dir[:sp_dir.rfind("/")]
+    project_yaml = os.path.join(project_dir, "project.yml")
+    if os.path.isfile(project_yaml):
+        with open(project_yaml) as cfg:
+            return yaml.safe_load(cfg)
 
 def find_files_recursive(path, name):
     """Return a list of all files under path with name."""
@@ -853,156 +1245,205 @@ def find_files_recursive(path, name):
                 matches.append(filepath)
     return matches
 
-
-def find_external_subproject_cves(cve):
-    """
-    Return the list of external subproject CVE snippets for the given CVE.
-    """
+def find_external_subproject_cves(cve, realpath=False):
+    """Return the list of external subproject CVE snippets for the given CVE."""
     cves = []
-    for rel in external_releases:
-        # fallback to the series specific subdir rather than just the
-        # top-level project directory even though this is preferred
-        for d in [
-            get_external_subproject_cve_dir(rel),
-            get_external_subproject_dir(rel),
-        ]:
-            path = os.path.join(d, cve)
+    # Use the cache if it's not empty
+    if subproject_dir_cache_cves:
+        if cve not in subproject_dir_cache_cves:
+            return cves
+        for entry in subproject_dir_cache_dirs:
+            path = os.path.join(entry, cve)
             if os.path.exists(path):
-                cves.append(path)
+                if realpath:
+                    path = os.path.realpath(path)
+                if path not in cves:
+                    cves.append(path)
+    else:
+        for rel in external_releases:
+            # fallback to the series specific subdir rather than just the
+            # top-level project directory even though this is preferred
+            for path in [get_external_subproject_dir(rel),
+                         get_external_subproject_cve_dir(rel)]:
+                path = os.path.join(path, cve)
+                if os.path.exists(path):
+                    if realpath:
+                        path = os.path.realpath(path)
+                    if path not in cves:
+                        cves.append(path)
+                    break
+
     return cves
 
+# Keys in config.yml for a external subproject
+# should follow the same as any other subproject
+# except for the extra 'product' and 'release' keys.
+MANDATORY_EXTERNAL_SUBPROJECT_KEYS = ['cve_triage', 'cve_patching', 'cve_notification', 'security_updates_notification', 'binary_copies_only', 'seg_support', 'owners', 'subprojects']
+MANDATORY_EXTERNAL_SUBPROJECT_PPA_KEYS = ['ppas', 'data_formats', 'product', 'release', 'supported_packages']
+OPTIONAL_EXTERNAL_SUBPROJECT_PPA_KEYS =  ['parent', 'name', 'codename', 'description', 'aliases', 'archs', 'lp_distribution', 'staging_updates_ppa', 'staging_lp_distribution', 'build_ppa', 'build_lp_distribution']
 
-def load_external_subprojects():
+def load_external_subprojects(strict=False):
     """Search for and load subprojects into the global subprojects dict.
 
     Search for and load subprojects into the global subprojects dict.
 
     A subproject is defined as a directory which resides within
-    subprojects_dir and contains a supported.txt file. It can also contain
-    a project.yml file which specifies configuration directives for the
-    project as well as snippet CVE files. By convention, a subproject is
-    usually defined as the combination of a product and series, ie:
+    subprojects_dir and references a supported.txt file and a PPA.
+    This information is stored in config.yml, which contains all the
+    information in regards the subproject. It can also contain
+    a project.yml file which specifies metadata for the project as well
+    as snippet CVE files. By convention, a subproject is usually defined
+    as the combination of a product and series, ie:
 
     esm-apps/focal
 
     as such in this case there would expect to be within subprojects_dir a
-    directory called esm-apps/ and within that a subdirectory called
-    focal/. Inside this focal/ subdirectory a supported.txt file would list
-    the packages which are supported by the esm-apps/focal subproject. By
-    convention, snippet CVE files should reside within the esm-apps/
-    project directory rather than the esm-apps/focal/ subdirectory to avoid
-    unnecessary fragmentation across different subproject series.
-
+    directory called esm-apps/ and within that, in the config.yml, an entry
+    of type 'esm-apps/focal'. Inside this entry, a reference to the designated
+    supported.txt file, which would list the packages which are supported by
+    the esm-apps/focal subproject. By convention, snippet CVE files should
+    reside within the esm-apps/ project directory.
+
+    The strict argument determines whether to continue processing if
+    there are any missing components to the subproject or not.
     """
-    for supported_txt in find_files_recursive(
-        subprojects_dir, "supported.txt"
-    ):
-        # rel name is the path component between subprojects/ and
-        # /supported.txt
-        rel = supported_txt[
-            len(subprojects_dir) + 1:-len("supported.txt") - 1
-        ]
-        external_releases.append(rel)
-        subprojects.setdefault(rel, {"packages": [], "eol": False})
-        # an external subproject can append to an internal one
-        subprojects[rel]["packages"].append(supported_txt)
-        try:
-            # use config to populate other parts of the
-            # subproject settings
-            config = read_external_subproject_config(rel)
-            subprojects[rel].setdefault("ppa", config["ppa"])
-            subprojects[rel].setdefault("name", config["name"])
-            subprojects[rel].setdefault("description", config["description"])
-            subprojects[rel].setdefault("parent", config["parent"])
-        except Exception:
-            pass
-
+    for config_yaml in find_files_recursive(subprojects_dir, "config.yml"):
+        subproject_path = config_yaml[:-len("config.yml")-1]
+        # use config to populate other parts of the
+        # subproject settings
+        main_config = read_external_subproject_config(subproject_path)
+
+        for key in MANDATORY_EXTERNAL_SUBPROJECT_KEYS:
+            if key not in main_config:
+                error_msg = '%s missing "%s" field.' % (subproject_path, key)
+                if strict:
+                    raise ValueError(error_msg)
+                else:
+                    print(error_msg, file=sys.stderr)
+
+        for subproject in main_config['subprojects']:
+            config = main_config['subprojects'][subproject]
+            if 'product' not in config or 'release' not in config:
+                error_msg = '%s: missing "product" or "release".' % (subproject_path)
+                if strict:
+                    raise ValueError(error_msg)
+                else:
+                    print(error_msg, file=sys.stderr)
+
+            external_releases.append(subproject)
+            subprojects.setdefault(subproject, {"packages": []})
+            # an external subproject can append to an internal one
+            subprojects[subproject]["packages"].append(\
+                os.path.join(subproject_path, config['supported_packages']))
+
+            # check if aliases for packages exist
+            if 'aliases' in config:
+                subprojects[subproject].setdefault("aliases", \
+                    os.path.join(subproject_path, config['aliases']))
+
+            for key in MANDATORY_EXTERNAL_SUBPROJECT_PPA_KEYS + OPTIONAL_EXTERNAL_SUBPROJECT_PPA_KEYS:
+                if key in config:
+                    subprojects[subproject].setdefault(key, config[key])
+                elif key in OPTIONAL_EXTERNAL_SUBPROJECT_PPA_KEYS:
+                    _, _, _, original_release_details = get_subproject_details(config['release'])
+                    if original_release_details and key in original_release_details:
+                        subprojects[subproject].setdefault(key, original_release_details[key])
+                else:
+                    error_msg = '%s missing "%s" field.' % (subproject_path, key)
+                    del subprojects[subproject]
+                    external_releases.remove(subproject)
+                    if strict:
+                        raise ValueError(error_msg)
+                    else:
+                        print(error_msg, file=sys.stderr)
+
+            subprojects[subproject].update({
+                key: value for key, value in main_config.items() if key != "subprojects"
+            })
+
+            # Introducing a new "eol" tag for subprojects, earlier the eol tag was marked
+            # as "False" for all subprojects and introducing a new "eol" tag will add the
+            # details to "support_metadata" but not that actual subprojects[subproject]["eol"]
+            # field, so this assignment will make subprojects[subproject]["eol"] in sync
+            # with "eol" tag marked in subproject config
+            subprojects[subproject]["eol"] = config.get("eol", False)
+
+            # populate `eol_external_subprojects` and `active_external_subprojects`
+            (eol_external_subprojects if subprojects[subproject]["eol"] else active_external_subprojects)[subproject] = subprojects[subproject]
+
+            project = read_external_subproject_details(subproject)
+            if project:
+                subprojects[subproject].setdefault("customer", project)
+
+    # now ensure they are consistent
+    global devel_release
+    for release in subprojects:
+        details = subprojects[release]
+        rel = release_alias(release)
+        # prefer the alias name
+        all_releases.append(rel)
+        if details["eol"]:
+            eol_releases.append(rel)
+        if "devel" in details and details["devel"]:
+            if devel_release != "" and devel_release != rel:
+                raise ValueError("there can be only one ⚔ devel")
+            devel_release = rel
+        if (
+            "description" in details
+            and details["description"] == "Interim Release"
+            and rel not in external_releases
+        ):
+            interim_releases.append(rel)
+        # ubuntu specific releases
+        product, _ = product_series(release)
+        if product == PRODUCT_UBUNTU:
+            releases.append(rel)
 
 load_external_subprojects()
 
-for release in subprojects:
-    details = subprojects[release]
-    rel = release_alias(release)
-    # prefer the alias name
-    all_releases.append(rel)
-    if details["eol"]:
-        eol_releases.append(rel)
-    if "devel" in details and details["devel"]:
-        if devel_release != "":
-            raise ValueError("there can be only one ⚔ devel")
-        devel_release = rel
-    # ubuntu specific releases
-    product, series = product_series(release)
-    if product == PRODUCT_UBUNTU:
-        releases.append(rel)
-
-
+# all of the following are only valid for the Tags field of the CVE file itself
 valid_cve_tags = {
-    "cisa-kev": (
-        "This vulnerability is listed in the CISA Known Exploited "
-        "Vulnerabilities Catalog. For more details see "
-        "https://www.cisa.gov/known-exploited-vulnerabilities-catalog";
-    ),
+        'cisa-kev': 'This vulnerability is listed in the CISA Known Exploited Vulnerabilities Catalog. For more details see https://www.cisa.gov/known-exploited-vulnerabilities-catalog',
+        'epss-prioritized': 'This vulnerability has a significant EPSS score and is being prioritized for analysis. For more details on EPSS scoring see https://www.first.org/epss',
+        'epss-reviewed': 'This vulnerability has been reviewed/analyzed due to previously being tagged as epss-prioritized.',
 }
 
+# all of the following are only valid for a Tags_srcpkg field
 valid_package_tags = {
-    "universe-binary": (
-        "Binaries built from this source package are in universe and so are "
-        "supported by the community. For more details see "
-        "https://wiki.ubuntu.com/SecurityTeam/FAQ#Official_Support";
-    ),
-    "not-ue": (
-        "This package is not directly supported by the Ubuntu Security Team"
-    ),
-    "apparmor": (
-        "This vulnerability is mitigated in part by an AppArmor profile. "
-        "For more details see "
-        "https://wiki.ubuntu.com/Security/Features#apparmor";
-    ),
-    "stack-protector": (
-        "This vulnerability is mitigated in part by the use of gcc's stack "
-        "protector in Ubuntu. For more details see "
-        "https://wiki.ubuntu.com/Security/Features#stack-protector";
-    ),
-    "fortify-source": (
-        "This vulnerability is mitigated in part by the use of "
-        "-D_FORTIFY_SOURCE=2 in Ubuntu. For more details see "
-        "https://wiki.ubuntu.com/Security/Features#fortify-source";
-    ),
-    "symlink-restriction": (
-        "This vulnerability is mitigated in part by the use of symlink "
-        "restrictions in Ubuntu. For more details see "
-        "https://wiki.ubuntu.com/Security/Features#symlink";
-    ),
-    "hardlink-restriction": (
-        "This vulnerability is mitigated in part by the use of hardlink "
-        "restrictions in Ubuntu. For more details see "
-        "https://wiki.ubuntu.com/Security/Features#hardlink";
-    ),
-    "heap-protector": (
-        "This vulnerability is mitigated in part by the use of GNU C Library "
-        "heap protector in Ubuntu. For more details see "
-        "https://wiki.ubuntu.com/Security/Features#heap-protector";
-    ),
-    "pie": (
-        "This vulnerability is mitigated in part by the use of Position "
-        "Independent Executables in Ubuntu. For more details see "
-        "https://wiki.ubuntu.com/Security/Features#pie";
-    ),
-    "review-break-fix": (
-        "This vulnerability automatically received break-fix commits entries "
-        "when it was added and needs to be reviewed."
-    ),
+    'universe-binary': 'Binaries built from this source package are in universe and so are supported by the community. For more details see https://wiki.ubuntu.com/SecurityTeam/FAQ#Official_Support',
+    'not-ue': 'This package is not directly supported by the Ubuntu Security Team',
+    'apparmor': 'This vulnerability is mitigated in part by an AppArmor profile. For more details see https://wiki.ubuntu.com/Security/Features#apparmor',
+    'stack-protector': 'This vulnerability is mitigated in part by the use of gcc\'s stack protector in Ubuntu. For more details see https://wiki.ubuntu.com/Security/Features#stack-protector',
+    'fortify-source': 'This vulnerability is mitigated in part by the use of -D_FORTIFY_SOURCE=2 in Ubuntu. For more details see https://wiki.ubuntu.com/Security/Features#fortify-source',
+    'symlink-restriction': 'This vulnerability is mitigated in part by the use of symlink restrictions in Ubuntu. For more details see https://wiki.ubuntu.com/Security/Features#symlink',
+    'hardlink-restriction': 'This vulnerability is mitigated in part by the use of hardlink restrictions in Ubuntu. For more details see https://wiki.ubuntu.com/Security/Features#hardlink',
+    'heap-protector': 'This vulnerability is mitigated in part by the use of GNU C Library heap protector in Ubuntu. For more details see https://wiki.ubuntu.com/Security/Features#heap-protector',
+    'pie': 'This vulnerability is mitigated in part by the use of Position Independent Executables in Ubuntu. For more details see https://wiki.ubuntu.com/Security/Features#pie',
+    'review-break-fix': 'This vulnerability automatically received break-fix commits entries when it was added and needs to be reviewed.',
 }
 
 # Possible CVE priorities
-PRIORITIES = ["negligible", "low", "medium", "high", "critical"]
+priorities = ['negligible', 'low', 'medium', 'high', 'critical']
+
+NOTE_RE = re.compile(r'^\s+([A-Za-z0-9-]+)([>|]) *(.*)$')
 
-NOTE_RE = re.compile(r"^\s+([A-Za-z0-9-]+)([>|]) *(.*)$")
+# as per
+# https://www.debian.org/doc/debian-policy/ch-controlfields.html#s-f-version
+# ideally we would use dpkg --validate-version for this but it is much more
+# expensive to shell out than match via a regex so even though this is both
+# slightly more strict and also less strict that what dpkg --validate-version
+# would permit, it should be good enough for our purposes
+VERSION_RE = re.compile(r'^([0-9]+:)?([0-9]+[a-zA-Z0-9~.+-]*)$')
+
+def validate_version(version):
+    return VERSION_RE.match(version) is not None
 
 EXIT_FAIL = 1
 EXIT_OKAY = 0
 
+subproject_dir_cache_cves = set()
+subproject_dir_cache_dirs = set()
+
 # New CVE file format for release package field is:
 # <product>[/<where or who>]_SOFTWARE[/<modifier>]: <status> [(<when>)]
 # <product> is the Canonical product or supporting technology (eg, ‘esm-apps’
@@ -1026,35 +1467,28 @@ EXIT_OKAY = 0
 # e.g.: git/github.com/gogo/protobuf_gogoprotobuf: needs-triage
 # This method should keep supporting existing current format:
 # e.g.: bionic_jackson-databind: needs-triage
-def parse_cve_release_package_field(
-    cve, field, data, value, code, msg, linenum
-):
+def parse_cve_release_package_field(cvefile, field, data, value, code, msg, linenum):
     package = ""
     release = ""
     state = ""
     details = ""
     try:
-        release, package = field.split("_", 1)
+        release, package = field.split('_', 1)
     except ValueError:
-        msg += "%s: %d: bad field with '_': '%s'\n" % (cve, linenum, field)
+        msg += "%s: %d: bad field with '_': '%s'\n" % (cvefile, linenum, field)
         code = EXIT_FAIL
         return False, package, release, state, details, code, msg
 
     try:
-        info = value.split(" ", 1)
+        info = value.split(' ', 1)
     except ValueError:
-        msg += "%s: %d: missing state for '%s': '%s'\n" % (
-            cve,
-            linenum,
-            field,
-            value,
-        )
+        msg += "%s: %d: missing state for '%s': '%s'\n" % (cvefile, linenum, field, value)
         code = EXIT_FAIL
         return False, package, release, state, details, code, msg
 
     state = info[0]
-    if state == "":
-        state = "needs-triage"
+    if state == '':
+        state = 'needs-triage'
 
     if len(info) < 2:
         details = ""
@@ -1062,72 +1496,49 @@ def parse_cve_release_package_field(
         details = info[1].strip()
 
     if details.startswith("["):
-        msg += "%s: %d: %s has details that starts with a bracket: '%s'\n" % (
-            cve,
-            linenum,
-            field,
-            details,
-        )
+        msg += "%s: %d: %s has details that starts with a bracket: '%s'\n" % (cvefile, linenum, field, details)
         code = EXIT_FAIL
         return False, package, release, state, details, code, msg
 
-    if details.startswith("("):
+    if details.startswith('('):
         details = details[1:]
-    if details.endswith(")"):
+    if details.endswith(')'):
         details = details[:-1]
 
     # Work-around for old-style of only recording released versions
-    if details == "" and state[0] in ("0123456789"):
+    if details == '' and state[0] in ('0123456789'):
         details = state
-        state = "released"
-
-    valid_states = [
-        "needs-triage",
-        "needed",
-        "active",
-        "pending",
-        "released",
-        "deferred",
-        "DNE",
-        "ignored",
-        "not-affected",
-    ]
+        state = 'released'
+
+    valid_states = ['needs-triage', 'needed', 'in-progress', 'pending', 'released', 'deferred', 'DNE', 'ignored', 'not-affected']
     if state not in valid_states:
-        msg += (
-            "%s: %d: %s has unknown state: '%s' (valid states are: %s)\n"
-            % (
-                cve,
-                linenum,
-                field,
-                state,
-                " ".join(valid_states),
-            )
-        )
+        msg += "%s: %d: %s has unknown state: '%s' (valid states are: %s)\n" % (cvefile, linenum, field, state,
+                                                                                   ' '.join(valid_states))
         code = EXIT_FAIL
         return False, package, release, state, details, code, msg
 
+    # if the state is released or pending then the details needs to be a valid
+    # debian package version number
+    if details != "" and state in ['released', 'pending'] and release not in ['upstream', 'snap']:
+        if not validate_version(details):
+            msg += "%s: %d: %s has invalid version for state %s: '%s'\n" % (cvefile, linenum, field, state, details)
+            code = EXIT_FAIL
+            return False, package, release, state, details, code, msg
+
     # Verify "released" kernels have version details
-    # if state == 'released' and package in kernel_srcs and details == '':
-    #    msg += "%s: %s_%s has state '%s' but lacks version note\n" % (
-    #       cve, package, release, state
-    #    )
+    #if state == 'released' and package in kernel_srcs and details == '':
+    #    msg += "%s: %s_%s has state '%s' but lacks version note\n" % (cvefile, package, release, state)
     #    code = EXIT_FAIL
 
     # Verify "active" states have an Assignee
-    if state == "active" and data["Assigned-to"].strip() == "":
-        msg += "%s: %d: %s has state '%s' but lacks 'Assigned-to'\n" % (
-            cve,
-            linenum,
-            field,
-            state,
-        )
+    if state == 'in-progress' and data['Assigned-to'].strip() == "":
+        msg += "%s: %d: %s has state '%s' but lacks 'Assigned-to'\n" % (cvefile, linenum, field, state)
         code = EXIT_FAIL
         return False, package, release, state, details, code, msg
 
     return True, package, release, state, details, code, msg
 
-
-class NotesParser:
+class NotesParser(object):
     def __init__(self):
         self.notes = list()
         self.user = None
@@ -1145,17 +1556,12 @@ class NotesParser:
             # follow up comments should have 2 space indent and
             # an author
             if self.user is None:
-                msg += "%s: %d: Note entry with no author: '%s'\n" % (
-                    cve,
-                    linenum,
-                    line[1:],
-                )
+                msg += ("%s: %d: Note entry with no author: '%s'\n" %
+                        (cve, linenum, line[1:]))
                 code = EXIT_FAIL
-            if not line.startswith("  "):
-                msg += (
-                    "%s: %d: Note continuations should be indented by "
-                    "2 spaces: '%s'.\n" % (cve, linenum, line)
-                )
+            if not line.startswith('  '):
+                msg += ("%s: %d: Note continuations should be indented by 2 spaces: '%s'.\n" %
+                        (cve, linenum, line))
                 code = EXIT_FAIL
             new_user = self.user
             new_sep = self.separator
@@ -1163,21 +1569,21 @@ class NotesParser:
         if self.user and self.separator and self.note:
             # if is different user, start a new note
             if new_user != self.user:
-                self.notes.append((self.user, self.note))
+                self.notes.append([self.user, self.note])
                 self.user = new_user
                 self.note = new_note
                 self.separator = new_sep
             elif new_sep != self.separator:
                 # finish this note and start a new one since this has new
                 # semantics
-                self.notes.append((self.user, self.note))
+                self.notes.append([self.user, self.note])
                 self.separator = new_sep
                 self.note = new_note
             else:
-                if self.separator == "|":
+                if self.separator == '|':
                     self.note = self.note + " " + new_note
                 else:
-                    assert self.separator == ">"
+                    assert(self.separator == '>')
                     self.note = self.note + "\n" + new_note
         else:
             # this is the first note
@@ -1189,7 +1595,7 @@ class NotesParser:
     def finalize(self):
         if self.user is not None and self.note is not None:
             # add last Note
-            self.notes.append((self.user, self.note))
+            self.notes.append([self.user, self.note])
             self.user = None
             self.note = None
         notes = self.notes
@@ -1198,313 +1604,240 @@ class NotesParser:
         self.notes = None
         return notes
 
+def load_cve(cvefile, strict=False, srcentries=None):
+    '''Loads a given CVE into:
+       dict( fields...
+             'pkgs' -> dict(  pkg -> dict(  release ->  (state, details)   ) )
+           )
+    '''
 
-def load_cve(cve, strict=False, srcmap=None):
-    """Loads a given CVE into:
-    dict( fields...
-          'pkgs' -> dict(  pkg -> dict(  release ->  (state, details)   ) )
-        )
-    """
-
-    msg = ""
+    msg = ''
     code = EXIT_OKAY
-    required_fields = [
-        "Candidate",
-        "PublicDate",
-        "References",
-        "Description",
-        "Ubuntu-Description",
-        "Notes",
-        "Bugs",
-        "Priority",
-        "Discovered-by",
-        "Assigned-to",
-        "CVSS",
-    ]
-    extra_fields = ["CRD", "PublicDateAtUSN", "Mitigation"]
-
-    data = OrderedDict()
+    required_fields = ['Candidate', 'PublicDate', 'References', 'Description',
+                       'Ubuntu-Description', 'Notes', 'Bugs',
+                       'Priority', 'Discovered-by', 'Assigned-to', 'CVSS']
+    extra_fields = ['CRD', 'PublicDateAtUSN', 'Mitigation', 'Tags']
+
+    data = dict()
     # maps entries in data to their source line - if didn't supply one
     # create a local one to simplify the code
-    if srcmap is None:
-        srcmap = OrderedDict()
-    srcmap.setdefault("pkgs", OrderedDict())
-    srcmap.setdefault("tags", OrderedDict())
-    data.setdefault("tags", OrderedDict())
-    srcmap.setdefault("patches", OrderedDict())
-    data.setdefault("patches", OrderedDict())
-    affected = OrderedDict()
+    if srcentries is None:
+        srcentries = dict()
+    srcentries.setdefault('pkgs', dict())
+    srcentries.setdefault('tags', dict())
+    data.setdefault('tags', dict())
+    srcentries.setdefault('patches', dict())
+    data.setdefault('patches', dict())
+    affected = dict()
     lastfield = ""
-    fields_seen = []
-    if not os.path.exists(cve):
-        raise ValueError("File does not exist: '%s'" % (cve))
+    fields_seen = set()
+    if not os.path.exists(cvefile):
+        raise ValueError("File does not exist: '%s'" % cvefile)
     linenum = 0
     notes_parser = NotesParser()
+    priority_reason = {}
     cvss_entries = []
-
-    cve_file = codecs.open(cve, encoding="utf-8")
-
-    for line in cve_file.readlines():
+    with codecs.open(cvefile, encoding="utf-8") as inF:
+        lines = inF.readlines()
+    for line in lines:
         line = line.rstrip()
         linenum += 1
 
         # Ignore blank/commented lines
-        if len(line) == 0 or line.startswith("#"):
+        if len(line) == 0 or line.startswith('#'):
             continue
-        if line.startswith(" "):
+        if line.startswith(' '):
             try:
                 # parse Notes properly
-                if lastfield == "Notes":
-                    code, newmsg = notes_parser.parse_line(
-                        cve, line, linenum, code
-                    )
+                if lastfield == 'Notes':
+                    code, newmsg = notes_parser.parse_line(cvefile, line, linenum, code)
                     if code != EXIT_OKAY:
                         msg += newmsg
-                elif "Patches_" in lastfield:
+                elif lastfield.startswith('Priority'):
+                    priority_part = lastfield.split('_')[1] if '_' in lastfield else None
+                    if priority_part in priority_reason:
+                        priority_reason[priority_part].append(line.strip())
+                    else:
+                        priority_reason[priority_part] = [line.strip()]
+                elif 'Patches_' in lastfield:
                     try:
-                        _, pkg = lastfield.split("_", 1)
-                        patch_type, entry = line.split(":", 1)
+                        _, pkg = lastfield.split('_', 1)
+                        patch_type, entry = line.split(':', 1)
                         patch_type = patch_type.strip()
                         entry = entry.strip()
-                        data["patches"][pkg].append((patch_type, entry))
-                        srcmap["patches"][pkg].append((cve, linenum))
+                        data['patches'][pkg].append((patch_type, entry))
+                        srcentries['patches'][pkg].append((cvefile, linenum))
                     except Exception as e:
-                        msg += (
-                            "%s: %d: Failed to parse '%s' entry %s: %s\n"
-                            % (
-                                cve,
-                                linenum,
-                                lastfield,
-                                line,
-                                e,
-                            )
-                        )
+                        msg += "%s: %d: Failed to parse '%s' entry %s: %s\n" % (cvefile, linenum, lastfield, line, e)
                         code = EXIT_FAIL
-                elif lastfield == "CVSS":
+                elif lastfield == 'CVSS':
                     try:
-                        cvss = OrderedDict()
-                        result = re.search(
-                            r" (.+)\: (\S+)( \[(.*) (.*)\])?", line
-                        )
+                        cvss = dict()
+                        result = re.search(r' (.+)\: (\S+)( \[(.*) (.*)\])?', line)
                         if result is None:
                             continue
-                        cvss["source"] = result.group(1)
-                        cvss["vector"] = result.group(2)
-                        entry = parse_cvss(cvss["vector"])
-                        if entry is None:
-                            raise RuntimeError(
-                                "Failed to parse_cvss() without raising "
-                                "an exception."
-                            )
+                        cvss['source'] = result.group(1)
+                        cvss['vector'] = result.group(2)
                         if result.group(3):
-                            cvss["baseScore"] = result.group(4)
-                            cvss["baseSeverity"] = result.group(5)
+                            cvss['baseScore'] = result.group(4)
+                            cvss['baseSeverity'] = result.group(5)
 
                         cvss_entries.append(cvss)
-                        # CVSS in srcmap will be a tuple since this is the
+                        # CVSS in srcentries will be a tuple since this is the
                         # line where the CVSS block starts - so convert it
                         # to a dict first if needed
-                        if type(srcmap["CVSS"]) is tuple:
-                            srcmap["CVSS"] = OrderedDict()
-                        srcmap["CVSS"].setdefault(
-                            cvss["source"], (cve, linenum)
-                        )
+                        if type(srcentries["CVSS"]) is tuple:
+                            srcentries["CVSS"] = dict()
+                        srcentries["CVSS"].setdefault(cvss['source'], (cvefile, linenum))
                     except Exception as e:
-                        msg += "%s: %d: Failed to parse CVSS: %s\n" % (
-                            cve,
-                            linenum,
-                            e,
-                        )
+                        msg += "%s: %d: Failed to parse CVSS: %s\n" % (cvefile, linenum, e)
                         code = EXIT_FAIL
                 else:
-                    data[lastfield] += "\n%s" % (line[1:])
+                    data[lastfield] += '\n%s' % (line[1:])
             except KeyError as e:
-                msg += "%s: %d: bad line '%s' (%s)\n" % (cve, linenum, line, e)
+                msg += "%s: %d: bad line '%s' (%s)\n" % (cvefile, linenum, line, e)
                 code = EXIT_FAIL
             continue
 
         try:
-            field, value = line.split(":", 1)
+            field, value = line.split(':', 1)
         except ValueError as e:
-            msg += "%s: %d: bad line '%s' (%s)\n" % (cve, linenum, line, e)
+            msg += "%s: %d: bad line '%s' (%s)\n" % (cvefile, linenum, line, e)
             code = EXIT_FAIL
             continue
 
         lastfield = field = field.strip()
         if field in fields_seen:
-            msg += "%s: %d: repeated field '%s'\n" % (cve, linenum, field)
+            msg += "%s: %d: repeated field '%s'\n" % (cvefile, linenum, field)
             code = EXIT_FAIL
         else:
-            fields_seen.append(field)
+            fields_seen.add(field)
         value = value.strip()
-        if field == "Candidate":
+        if field == 'Candidate':
             data.setdefault(field, value)
-            srcmap.setdefault(field, (cve, linenum))
-            if (
-                value != ""
-                and not value.startswith("CVE-")
-                and not value.startswith("UEM-")
-                and not value.startswith("EMB-")
-            ):
-                msg += (
-                    "%s: %d: unknown Candidate '%s' "
-                    "(must be /(CVE|UEM|EMB)-/)\n"
-                    % (
-                        cve,
-                        linenum,
-                        value,
-                    )
-                )
+            srcentries.setdefault(field, (cvefile, linenum))
+            if value != "" and not value.startswith('CVE-') and not value.startswith('UEM-') and not value.startswith('EMB-'):
+                msg += "%s: %d: unknown Candidate '%s' (must be /(CVE|UEM|EMB)-/)\n" % (cvefile, linenum, value)
                 code = EXIT_FAIL
-        elif "Priority" in field:
+        elif 'Priority' in field:
             # For now, throw away comments on Priority fields
-            if " " in value:
+            if ' ' in value:
                 value = value.split()[0]
-            if "Priority_" in field:
+            if 'Priority_' in field:
                 try:
-                    _, pkg = field.split("_", 1)
+                    _, pkg = field.split('_', 1)
                 except ValueError:
-                    msg += "%s: %d: bad field with 'Priority_': '%s'\n" % (
-                        cve,
-                        linenum,
-                        field,
-                    )
+                    msg += "%s: %d: bad field with 'Priority_': '%s'\n" % (cvefile, linenum, field)
                     code = EXIT_FAIL
                     continue
-            data.setdefault(field, value)
-            srcmap.setdefault(field, (cve, linenum))
-            if value not in ["untriaged", "not-for-us"] + PRIORITIES:
-                msg += "%s: %d: unknown Priority '%s'\n" % (
-                    cve,
-                    linenum,
-                    value,
-                )
+            # initially set the priority reason as an empty string - this will
+            # be fixed up later with a real value if one is found
+            data.setdefault(field, [value, ""])
+            srcentries.setdefault(field, (cvefile, linenum))
+            if value not in ['untriaged', 'not-for-us'] + priorities:
+                msg += "%s: %d: unknown Priority '%s'\n" % (cvefile, linenum, value)
                 code = EXIT_FAIL
-        elif "Patches_" in field:
+        elif 'Patches_' in field:
             try:
-                _, pkg = field.split("_", 1)
+                _, pkg = field.split('_', 1)
             except ValueError:
-                msg += "%s: %d: bad field with 'Patches_': '%s'\n" % (
-                    cve,
-                    linenum,
-                    field,
-                )
+                msg += "%s: %d: bad field with 'Patches_': '%s'\n" % (cvefile, linenum, field)
                 code = EXIT_FAIL
                 continue
             # value should be empty
             if len(value) > 0:
-                msg += "%s: %d: '%s' field should have no value\n" % (
-                    cve,
-                    linenum,
-                    field,
-                )
+                msg += "%s: %d: '%s' field should have no value\n" % (cvefile, linenum, field)
                 code = EXIT_FAIL
                 continue
-            data["patches"].setdefault(pkg, list())
-            srcmap["patches"].setdefault(pkg, list())
-        # This changes are needed to support global `Tags:`
-        elif "Tags" in field:
-            """These are processed into the "tags" hash"""
+            data['patches'].setdefault(pkg, list())
+            srcentries['patches'].setdefault(pkg, list())
+        elif 'Tags' in field:
+            '''These are processed into the "tags" hash'''
             try:
-                _, pkg = field.split("_", 1)
+                _, pkg = field.split('_', 1)
             except ValueError:
                 # no package specified - this is the global tags field - use a
                 # key of '*' to store it in the package hash
                 pkg = GLOBAL_TAGS_KEY
-            data["tags"].setdefault(pkg, set())
-            srcmap["tags"].setdefault(pkg, (cve, linenum))
-            for word in value.strip().split(" "):
+            data['tags'].setdefault(pkg, set())
+            srcentries['tags'].setdefault(pkg, (cvefile, linenum))
+            for word in value.strip().split(' '):
                 if pkg == GLOBAL_TAGS_KEY and word not in valid_cve_tags:
-                    msg += "%s: %d: invalid CVE tag '%s': '%s'\n" % (
-                        cve,
-                        linenum,
-                        word,
-                        field,
-                    )
+                    msg += "%s: %d: invalid CVE tag '%s': '%s'\n" % (cvefile, linenum, word, field)
                     code = EXIT_FAIL
                     continue
                 elif pkg != GLOBAL_TAGS_KEY and word not in valid_package_tags:
-                    msg += "%s: %d: invalid package tag '%s': '%s'\n" % (
-                        cve,
-                        linenum,
-                        word,
-                        field,
-                    )
+                    msg += "%s: %d: invalid package tag '%s': '%s'\n" % (cvefile, linenum, word, field)
                     code = EXIT_FAIL
                     continue
-                data["tags"][pkg].add(word)
-        elif "_" in field:
-            (
-                success,
-                pkg,
-                rel,
-                state,
-                details,
-                code,
-                msg,
-            ) = parse_cve_release_package_field(
-                cve, field, data, value, code, msg, linenum
-            )
+                data['tags'][pkg].add(word)
+        elif '_' in field:
+            success, pkg, rel, state, details, code, msg = parse_cve_release_package_field(cvefile, field, data, value, code, msg, linenum)
             if not success:
-                assert code == EXIT_FAIL
+                assert(code == EXIT_FAIL)
                 continue
             canon, _, _, _ = get_subproject_details(rel)
-            if canon is None and rel not in ["upstream", "devel"]:
-                msg += "%s: %d: unknown entry '%s'\n" % (cve, linenum, rel)
+            if canon is None and rel not in ['upstream', 'devel']:
+                msg += "%s: %d: unknown entry '%s'\n" % (cvefile, linenum, rel)
                 code = EXIT_FAIL
                 continue
-            affected.setdefault(pkg, OrderedDict())
+            affected.setdefault(pkg, dict())
             if rel in affected[pkg]:
-                msg += (
-                    "%s: %d: duplicate entry for '%s': original at line %d\n"
-                    % (
-                        cve,
-                        linenum,
-                        rel,
-                        srcmap["pkgs"][pkg][rel][1],
-                    )
-                )
+                msg += ("%s: %d: duplicate entry for '%s': original at %s line %d\n"
+                        % (cvefile, linenum, rel, srcentries['pkgs'][pkg][rel][0], srcentries['pkgs'][pkg][rel][1]))
                 code = EXIT_FAIL
                 continue
             affected[pkg].setdefault(rel, [state, details])
-            srcmap["pkgs"].setdefault(pkg, OrderedDict())
-            srcmap["pkgs"][pkg].setdefault(rel, (cve, linenum))
+            srcentries['pkgs'].setdefault(pkg, dict())
+            srcentries['pkgs'][pkg].setdefault(rel, (cvefile, linenum))
         elif field not in required_fields + extra_fields:
-            msg += "%s: %d: unknown field '%s'\n" % (cve, linenum, field)
+            msg += "%s: %d: unknown field '%s'\n" % (cvefile, linenum, field)
             code = EXIT_FAIL
         else:
             data.setdefault(field, value)
-            srcmap.setdefault(field, (cve, linenum))
-
-    cve_file.close()
+            srcentries.setdefault(field, (cvefile, linenum))
 
-    data["Notes"] = notes_parser.finalize()
-    data["CVSS"] = cvss_entries
+    data['Notes'] = notes_parser.finalize()
+    data['CVSS'] = cvss_entries
 
     # Check for required fields
     for field in required_fields:
-        nonempty = ["Candidate"]
-        if strict:
-            nonempty += ["PublicDate"]
         # boilerplate files are special and can (should?) be empty
-        if "boilerplate" in cve:
-            nonempty = []
+        nonempty = [] if "boilerplate" in cvefile else ['Candidate']
+        if strict:
+            nonempty += ['PublicDate']
 
         if field not in data or field not in fields_seen:
-            msg += "%s: %d: missing field '%s'\n" % (cve, linenum, field)
+            msg += "%s: %d: missing field '%s'\n" % (cvefile, linenum, field)
             code = EXIT_FAIL
         elif field in nonempty and data[field].strip() == "":
-            msg += "%s: %d: required field '%s' is empty\n" % (
-                cve,
-                linenum,
-                field,
-            )
+            linenum = srcentries[field][1]
+            msg += "%s: %d: required field '%s' is empty\n" % (cvefile, linenum, field)
             code = EXIT_FAIL
 
     # Fill in defaults for missing fields
-    if "Priority" not in data:
-        data.setdefault("Priority", "untriaged")
-        srcmap.setdefault("Priority", (cve, 1))
+    if 'Priority' not in data:
+        data.setdefault('Priority', ['untriaged'])
+        srcentries.setdefault('Priority', (cvefile, 1))
+    # Perform override fields
+    if 'PublicDateAtUSN' in data:
+        data['PublicDate'] = data['PublicDateAtUSN']
+        srcentries['PublicDate'] = srcentries['PublicDateAtUSN']
+    if 'CRD' in data and data['CRD'].strip() != '' and data['PublicDate'] != data['CRD']:
+        if cvefile.startswith("embargoed"):
+            print("%s: %d: adjusting PublicDate to use CRD: %s" % (cvefile, linenum, data['CRD']), file=sys.stderr)
+        data['PublicDate'] = data['CRD']
+        srcentries['PublicDate'] = srcentries['CRD']
+
+    if data["PublicDate"] > PRIORITY_REASON_DATE_START and \
+            data["Priority"][0] in PRIORITY_REASON_REQUIRED and not priority_reason:
+        linenum = srcentries["Priority"][1]
+        msg += "%s: %d: needs a reason for being '%s'\n" % (cvefile, linenum, data["Priority"][0])
+        code = EXIT_FAIL
+    
+    for item in priority_reason:
+        field = 'Priority' if not item else 'Priority_' + item
+        data[field][1] = priority_reason[item]
 
     # entries need an upstream entry if any entries are from the internal
     # list of subprojects
@@ -1513,27 +1846,27 @@ def load_cve(cve, strict=False, srcmap=None):
         for rel in affected[pkg]:
             if rel not in external_releases:
                 needs_upstream = True
-        if needs_upstream and "upstream" not in affected[pkg]:
-            msg += "%s: %d: missing upstream '%s'\n" % (cve, linenum, pkg)
+        if needs_upstream and 'upstream' not in affected[pkg]:
+            msg += "%s: %d: missing upstream '%s'\n" % (cvefile, linenum, pkg)
             code = EXIT_FAIL
 
-    data["pkgs"] = affected
+    data['pkgs'] = affected
 
-    code, msg = load_external_subproject_cve_data(cve, data, srcmap, code, msg)
+    if not "boilerplate" in cvefile:
+        code, msg = load_external_subproject_cve_data(cvefile, data, srcentries, code, msg)
 
     if code != EXIT_OKAY:
         raise ValueError(msg.strip())
     return data
 
-
-def amend_external_subproject_pkg(cve, data, srcmap, amendments, code, msg):
+def amend_external_subproject_pkg(cve, data, srcentries, amendments, code, msg):
     linenum = 0
     for line in amendments.splitlines():
         linenum += 1
-        if len(line) == 0 or line.startswith("#") or line.startswith(" "):
+        if len(line) == 0 or line.startswith('#') or line.startswith(' '):
             continue
         try:
-            field, value = line.split(":", 1)
+            field, value = line.split(':', 1)
             field = field.strip()
             value = value.strip()
         except ValueError as e:
@@ -1541,198 +1874,35 @@ def amend_external_subproject_pkg(cve, data, srcmap, amendments, code, msg):
             code = EXIT_FAIL
             return code, msg
 
-        if "_" in field:
-            (
-                success,
-                pkg,
-                release,
-                state,
-                details,
-                code,
-                msg,
-            ) = parse_cve_release_package_field(
-                cve, field, data, value, code, msg, linenum
-            )
+        if '_' in field:
+            success, pkg, rel, state, details, code, msg = parse_cve_release_package_field(cve, field, data, value, code, msg, linenum)
             if not success:
                 return code, msg
 
-            data.setdefault("pkgs", OrderedDict())
-            data["pkgs"].setdefault(pkg, OrderedDict())
-            srcmap["pkgs"].setdefault(pkg, OrderedDict())
-            # override existing release info if it exists
-            data["pkgs"][pkg][release] = [state, details]
-            srcmap["pkgs"][pkg][release] = (cve, linenum)
+            canon, _, _, _ = get_subproject_details(rel)
+            if canon is None and rel not in ['upstream', 'devel']:
+                msg += "%s: %d: unknown entry '%s'\n" % (cve, linenum, rel)
+                code = EXIT_FAIL
+                return code, msg
+            data.setdefault("pkgs", dict())
+            data["pkgs"].setdefault(pkg, dict())
+            srcentries["pkgs"].setdefault(pkg, dict())
+            if rel in data["pkgs"][pkg]:
+                msg += ("%s: %d: duplicate entry for '%s': original at %s line %d (%s)\n"
+                        % (cve, linenum, rel, srcentries['pkgs'][pkg][rel][0], srcentries['pkgs'][pkg][rel][1], data["pkgs"][pkg][rel]))
+                code = EXIT_FAIL
+                return code, msg
+            data["pkgs"][pkg][rel] = [state, details]
+            srcentries["pkgs"][pkg][rel] = (cve, linenum)
 
     return code, msg
 
-
-def load_external_subproject_cve_data(cve, data, srcmap, code, msg):
+def load_external_subproject_cve_data(cve, data, srcentries, code, msg):
     cve_id = os.path.basename(cve)
     for f in find_external_subproject_cves(cve_id):
-        with codecs.open(f, "r", encoding="utf-8") as fp:
+        with codecs.open(f, 'r', encoding="utf-8") as fp:
             amendments = fp.read()
             fp.close()
-        code, msg = amend_external_subproject_pkg(
-            f, data, srcmap, amendments, code, msg
-        )
-
-    return code, msg
+        code, msg = amend_external_subproject_pkg(f, data, srcentries, amendments, code, msg)
 
-
-def parse_cvss(cvss):
-    # parse a CVSS string into components suitable for MITRE / NVD JSON
-    # format - assumes only the Base metric group from
-    # https://www.first.org/cvss/specification-document since this is
-    # mandatory - also validates by raising exceptions on errors
-    metrics = {
-        "attackVector": {
-            "abbrev": "AV",
-            "values": {
-                "NETWORK": 0.85,
-                "ADJACENT": 0.62,
-                "LOCAL": 0.55,
-                "PHYSICAL": 0.2,
-            },
-        },
-        "attackComplexity": {
-            "abbrev": "AC",
-            "values": {"LOW": 0.77, "HIGH": 0.44},
-        },
-        "privilegesRequired": {
-            "abbrev": "PR",
-            "values": {
-                "NONE": 0.85,
-                # [ scope unchanged, changed ]
-                "LOW": [0.62, 0.68],  # depends on scope
-                "HIGH": [0.27, 0.5],
-            },  # depends on scope
-        },
-        "userInteraction": {
-            "abbrev": "UI",
-            "values": {"NONE": 0.85, "REQUIRED": 0.62},
-        },
-        "scope": {"abbrev": "S", "values": {"UNCHANGED", "CHANGED"}},
-        "confidentialityImpact": {
-            "abbrev": "C",
-            "values": {"HIGH": 0.56, "LOW": 0.22, "NONE": 0},
-        },
-        "integrityImpact": {
-            "abbrev": "I",
-            "values": {"HIGH": 0.56, "LOW": 0.22, "NONE": 0},
-        },
-        "availabilityImpact": {
-            "abbrev": "A",
-            "values": {"HIGH": 0.56, "LOW": 0.22, "NONE": 0},
-        },
-    }
-    severities = {
-        "NONE": 0.0,
-        "LOW": 3.9,
-        "MEDIUM": 6.9,
-        "HIGH": 8.9,
-        "CRITICAL": 10.0,
-    }
-    js = None
-    # coerce cvss into a string
-    cvss = str(cvss)
-    for c in cvss.split("/"):
-        elements = c.split(":")
-        if len(elements) != 2:
-            raise ValueError("Invalid CVSS element '%s'" % c)
-        valid = False
-        metric = elements[0]
-        value = elements[1]
-        if metric == "CVSS":
-            if value == "3.0" or value == "3.1":
-                js = {"baseMetricV3": {"cvssV3": {"version": value}}}
-                valid = True
-            else:
-                raise ValueError(
-                    "Unable to process CVSS version '%s' (we only support 3.x)"
-                    % value
-                )
-        else:
-            for m in metrics.keys():
-                if metrics[m]["abbrev"] == metric:
-                    for val in metrics[m]["values"]:
-                        if val[0:1] == value:
-                            js["baseMetricV3"]["cvssV3"][m] = val
-                            valid = True
-        if not valid:
-            raise ValueError("Invalid CVSS elements '%s:%s'" % (metric, value))
-    for m in metrics.keys():
-        if m not in js["baseMetricV3"]["cvssV3"]:
-            raise ValueError("Missing required CVSS base element %s" % m)
-    # add vectorString
-    js["baseMetricV3"]["cvssV3"]["vectorString"] = cvss
-
-    # now calculate CVSS scores
-    iss = 1 - (
-        (
-            1
-            - metrics["confidentialityImpact"]["values"][
-                js["baseMetricV3"]["cvssV3"]["confidentialityImpact"]
-            ]
-        )
-        * (
-            1
-            - metrics["integrityImpact"]["values"][
-                js["baseMetricV3"]["cvssV3"]["integrityImpact"]
-            ]
-        )
-        * (
-            1
-            - metrics["availabilityImpact"]["values"][
-                js["baseMetricV3"]["cvssV3"]["availabilityImpact"]
-            ]
-        )
-    )
-    if js["baseMetricV3"]["cvssV3"]["scope"] == "UNCHANGED":
-        impact = 6.42 * iss
-    else:
-        impact = 7.52 * (iss - 0.029) - 3.25 * pow(iss - 0.02, 15)
-    attackVector = metrics["attackVector"]["values"][
-        js["baseMetricV3"]["cvssV3"]["attackVector"]
-    ]
-    attackComplexity = metrics["attackComplexity"]["values"][
-        js["baseMetricV3"]["cvssV3"]["attackComplexity"]
-    ]
-    privilegesRequired = metrics["privilegesRequired"]["values"][
-        js["baseMetricV3"]["cvssV3"]["privilegesRequired"]
-    ]
-    # privilegesRequires could be a list if is LOW or HIGH (and then the
-    # value depends on whether the scope is unchanged or not)
-    if isinstance(privilegesRequired, list):
-        if js["baseMetricV3"]["cvssV3"]["scope"] == "UNCHANGED":
-            privilegesRequired = privilegesRequired[0]
-        else:
-            privilegesRequired = privilegesRequired[1]
-    userInteraction = metrics["userInteraction"]["values"][
-        js["baseMetricV3"]["cvssV3"]["userInteraction"]
-    ]
-    exploitability = (
-        8.22
-        * attackVector
-        * attackComplexity
-        * privilegesRequired
-        * userInteraction
-    )
-    if impact <= 0:
-        base_score = 0
-    elif js["baseMetricV3"]["cvssV3"]["scope"] == "UNCHANGED":
-        # use ceil and * 10 / 10 to get rounded up to nearest 10th decimal
-        # (where rounded-up is say 0.01 -> 0.1)
-        base_score = math.ceil(min(impact + exploitability, 10) * 10) / 10
-    else:
-        base_score = (
-            math.ceil(min(1.08 * (impact + exploitability), 10) * 10) / 10
-        )
-    js["baseMetricV3"]["cvssV3"]["baseScore"] = base_score
-    for severity in severities.keys():
-        if base_score <= severities[severity]:
-            js["baseMetricV3"]["cvssV3"]["baseSeverity"] = severity
-            break
-    # these use normal rounding to 1 decimal place
-    js["baseMetricV3"]["exploitabilityScore"] = round(exploitability * 10) / 10
-    js["baseMetricV3"]["impactScore"] = round(impact * 10) / 10
-    return js
+    return code, msg
\ No newline at end of file
diff --git a/lib/lp/bugs/scripts/uct/models.py b/lib/lp/bugs/scripts/uct/models.py
index 72b8d92..d328286 100644
--- a/lib/lp/bugs/scripts/uct/models.py
+++ b/lib/lp/bugs/scripts/uct/models.py
@@ -171,7 +171,7 @@ class UCTRecord(SVTRecord):
                         status=cls.PackageStatus(status),
                         reason=reason,
                         priority=(
-                            cls.Priority(series_priority)
+                            cls.Priority(series_priority[0])
                             if series_priority
                             else None
                         ),
@@ -187,7 +187,7 @@ class UCTRecord(SVTRecord):
                     name=package,
                     statuses=statuses,
                     priority=(
-                        cls.Priority(package_priority)
+                        cls.Priority(package_priority[0])
                         if package_priority
                         else None
                     ),
@@ -225,7 +225,7 @@ class UCTRecord(SVTRecord):
             )
         cvss = dict(cvss)
 
-        _priority = cls._pop_cve_property(cve_data, "Priority").split("\n")
+        _priority = cls._pop_cve_property(cve_data, "Priority")
 
         entry = UCTRecord(
             parent_dir=cve_path.absolute().parent.name,
@@ -249,7 +249,7 @@ class UCTRecord(SVTRecord):
             ),
             notes=cls._format_notes(cls._pop_cve_property(cve_data, "Notes")),
             priority=cls.Priority(_priority[0]),
-            priority_explanation="\n".join(_priority[1:]),
+            priority_explanation="\n".join(_priority[1]),
             references=cls._pop_cve_property(cve_data, "References").split(
                 "\n"
             ),
diff --git a/lib/lp/bugs/scripts/uct/tests/test_uct.py b/lib/lp/bugs/scripts/uct/tests/test_uct.py
index 935fd6a..4e7780a 100644
--- a/lib/lp/bugs/scripts/uct/tests/test_uct.py
+++ b/lib/lp/bugs/scripts/uct/tests/test_uct.py
@@ -738,7 +738,7 @@ class TestUCTImporterExporter(TestCaseWithFactory):
                 2021, 1, 14, 8, 15, tzinfo=timezone.utc
             ),
             date_coordinated_release=datetime(
-                2020, 1, 14, 8, 15, tzinfo=timezone.utc
+                2022, 1, 14, 8, 15, tzinfo=timezone.utc
             ),
             distro_packages=[
                 CVE.DistroPackage(
@@ -763,7 +763,7 @@ class TestUCTImporterExporter(TestCaseWithFactory):
                     package_name=self.ubuntu_package.sourcepackagename,
                     importance=BugTaskImportance.HIGH,
                     status=BugTaskStatus.FIXRELEASED,
-                    status_explanation="released",
+                    status_explanation="2.56+dfsg-1",
                 ),
                 CVE.SeriesPackage(
                     target=SourcePackage(
@@ -877,7 +877,7 @@ class TestUCTImporterExporter(TestCaseWithFactory):
             assigned_to=assignee.name,
             bugs=["https://github.com/mm2/Little-CMS/issues/29";],
             candidate="CVE-2022-23222",
-            crd=datetime(2020, 1, 14, 8, 15, tzinfo=timezone.utc),
+            crd=datetime(2022, 1, 14, 8, 15, tzinfo=timezone.utc),
             cvss={
                 "nvd": [
                     "CVSS:3.1/AV:L/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:H "
@@ -896,7 +896,7 @@ class TestUCTImporterExporter(TestCaseWithFactory):
                         UCTRecord.SeriesPackageStatus(
                             series="focal",
                             status=UCTRecord.PackageStatus.RELEASED,
-                            reason="released",
+                            reason="2.56+dfsg-1",
                             priority=UCTRecord.Priority.HIGH,
                         ),
                         UCTRecord.SeriesPackageStatus(
diff --git a/lib/lp/services/job/celeryconfig.py b/lib/lp/services/job/celeryconfig.py
index 6d7946e..00dac88 100644
--- a/lib/lp/services/job/celeryconfig.py
+++ b/lib/lp/services/job/celeryconfig.py
@@ -52,7 +52,7 @@ def configure(argv):
     # A queue must be specified as a command line parameter for each
     # "celery worker" instance, but this is not required for a Launchpad app
     # server.
-    if "celery" in argv[0] and argv[1] == "worker":
+    if "celery" in argv[0] and "worker" in argv:
         if queues is None or queues == "":
             raise ConfigurationError("A queue must be specified.")
         queues = queues.split(",")
diff --git a/lib/lp/services/job/tests/__init__.py b/lib/lp/services/job/tests/__init__.py
index 179afe3..9a62a25 100644
--- a/lib/lp/services/job/tests/__init__.py
+++ b/lib/lp/services/job/tests/__init__.py
@@ -32,11 +32,13 @@ def celery_worker(queue, cwd=None):
     with celery_app.broker_connection() as connection:
         broker_uri = connection.as_uri(include_password=True)
     cmd_args = (
-        "worker",
+        "-A",
+        "lp.services.job.celeryjob",
         "--config",
         "lp.services.job.celeryconfig",
         "--broker",
         broker_uri,
+        "worker",
         "--concurrency",
         "1",
         "--loglevel",
diff --git a/lib/lp/services/job/tests/celery_helpers.py b/lib/lp/services/job/tests/celery_helpers.py
index d79ae57..48a84cc 100644
--- a/lib/lp/services/job/tests/celery_helpers.py
+++ b/lib/lp/services/job/tests/celery_helpers.py
@@ -3,23 +3,17 @@
 
 __all__ = ["noop", "pop_notifications"]
 
-from celery.task import task
+from lp.services.job.celeryjob import celery_app
 
-# Force the correct celeryconfig to be used.
-import lp.services.job.celeryjob
 
-# Quiet lint unused import warning.
-lp.services.job.celeryjob
-
-
-@task
+@celery_app.task
 def pop_notifications():
     from lp.testing.mail_helpers import pop_notifications
 
     return [message.as_string() for message in pop_notifications()]
 
 
-@task
+@celery_app.task
 def noop():
     """Task that does nothing.
 
diff --git a/requirements/launchpad.txt b/requirements/launchpad.txt
index 906b35a..c98bd1f 100644
--- a/requirements/launchpad.txt
+++ b/requirements/launchpad.txt
@@ -6,7 +6,7 @@
 # versions; they will be included automatically.
 
 ampoule==24.10.0
-amqp==2.6.1
+amqp==5.3.1
 annotated-types==0.7.0
 anyjson==0.3.3
 appdirs==1.4.3
@@ -15,11 +15,12 @@ attrs==19.3.0
 Automat==20.2.0
 backcall==0.2.0
 backports.functools-lru-cache==1.5
+backports.zoneinfo==0.2.1
 # ztk-versions.cfg uses 3.2.0 on Python 3, but that drops support for Python
 # 3.5.  Pin to 3.1.7 until we no longer care about xenial.
 bcrypt==3.1.7
 beautifulsoup4==4.12.3
-billiard==3.6.4.0
+billiard==4.2.2
 bleach==6.1.0
 bleach-allowlist==1.0.3
 breezy==3.2.0
@@ -27,8 +28,12 @@ brz-builder==0.7.4
 bson==0.5.9
 boto3==1.35.71
 botocore==1.35.71
-celery==4.4.7
+celery==5.5.3
 Chameleon==3.6.2
+click==8.1.8
+click-didyoumean==0.3.1
+click-plugins==1.1.1.2
+click-repl==0.3.0
 configobj==5.0.6
 contextvars==2.4
 constantly==15.1.0
@@ -74,14 +79,14 @@ jmespath==0.10.0
 jsautobuild==0.2
 keyring==0.6.2
 keystoneauth1==4.1.0
-kombu==4.6.11
+kombu==5.5.4
 launchpad-buildd==206
 launchpadlib==2.1.0
 lazr.batchnavigator==1.3.1
 lazr.config==2.2.3
 lazr.delegates==2.0.4
 lazr.enum==1.2.1
-lazr.jobrunner==0.17
+lazr.jobrunner==1.1
 lazr.lifecycle==1.2.1
 lazr.restful==2.0.2
 lazr.restfulclient==0.14.5
@@ -126,6 +131,7 @@ pexpect==4.8.0
 pgbouncer==0.0.9
 pickleshare==0.7.5
 pkginfo==1.11.2
+poetry-core==1.9.1
 prettytable==0.7.2
 psutil==7.0.0
 psycopg2==2.8.6
@@ -193,9 +199,10 @@ txfixtures==0.4.3
 txfixtures==0.5.2; python_version >= "3.10"
 txpkgupload==0.5
 typing_extensions==4.12.2; python_version >= "3.9"
+tzdata==2025.2
 urllib3==1.26.20
 van.testing==3.0.0
-vine==1.3.0
+vine===5.1.0
 virtualenv-tools3==3.1.1
 wadllib==1.3.6
 waitress==2.1.2