← Back to team overview

canonical-ubuntu-qa team mailing list archive

[Merge] ~andersson123/autopkgtest-cloud:fix-update-github-jobs into autopkgtest-cloud:master

 

Tim Andersson has proposed merging ~andersson123/autopkgtest-cloud:fix-update-github-jobs into autopkgtest-cloud:master.

Requested reviews:
  Canonical's Ubuntu QA (canonical-ubuntu-qa)

For more details, see:
https://code.launchpad.net/~andersson123/autopkgtest-cloud/+git/autopkgtest-cloud/+merge/459166
-- 
Your team Canonical's Ubuntu QA is requested to review the proposed merge of ~andersson123/autopkgtest-cloud:fix-update-github-jobs into autopkgtest-cloud:master.
diff --git a/charms/focal/autopkgtest-web/units/update-github-jobs.service b/charms/focal/autopkgtest-web/units/update-github-jobs.service
index a52bac5..5681a9f 100644
--- a/charms/focal/autopkgtest-web/units/update-github-jobs.service
+++ b/charms/focal/autopkgtest-web/units/update-github-jobs.service
@@ -5,6 +5,5 @@ Description=Update GitHub job status
 Type=oneshot
 User=www-data
 Group=www-data
-TimeoutStartSec=1m
 EnvironmentFile=/etc/environment.d/*.conf
 ExecStart=/home/ubuntu/webcontrol/update-github-jobs
diff --git a/charms/focal/autopkgtest-web/webcontrol/update-github-jobs b/charms/focal/autopkgtest-web/webcontrol/update-github-jobs
index 6362803..8c625ad 100755
--- a/charms/focal/autopkgtest-web/webcontrol/update-github-jobs
+++ b/charms/focal/autopkgtest-web/webcontrol/update-github-jobs
@@ -1,6 +1,7 @@
 #!/usr/bin/python3
 
 import configparser
+import datetime
 import io
 import json
 import logging
@@ -15,6 +16,7 @@ from urllib.error import HTTPError
 from request.submit import Submit
 
 PENDING_DIR = "/run/autopkgtest_webcontrol/github-pending"
+DAY_IN_SECONDS = 86400
 swift_url = None
 external_url = None
 
@@ -23,7 +25,7 @@ def result_matches_job(result_url, params):
     # download result.tar and get exit code and testinfo
     for _ in range(5):
         try:
-            with urllib.request.urlopen(result_url + "/result.tar") as f:
+            with urllib.request.urlopen(result_url) as f:
                 tar_bytes = io.BytesIO(f.read())
             break
         except IOError as e:
@@ -96,6 +98,20 @@ def finish_job(jobfile, params, code, log_url):
     os.unlink(jobfile)
 
 
+def check_time_diff(object_time, job_time):
+    splitted_url = object_time.split("/")
+    # date and time of object embedded in swift object path
+    timestamp = splitted_url[4]
+    # last part of the timestamp is a storage ID which we don't need
+    timestamp = "_".join(timestamp.split("_")[0:2])
+    timestamp = datetime.datetime.strptime(timestamp, "%Y%m%d_%H%M%S")
+    diff = abs(timestamp.timestamp() - job_time) / DAY_IN_SECONDS
+    # 15 days either side
+    if diff < 15:
+        return True
+    return False
+
+
 def process_job(jobfile):
     try:
         with open(jobfile) as f:
@@ -137,6 +153,14 @@ def process_job(jobfile):
                 result_url = os.path.join(
                     container_url, result.strip().decode()
                 )
+                # We don't want to check the urls that aren't for result.tar
+                if "result.tar" not in result_url:
+                    continue
+                # We check the swift object and the github jobfile has a time
+                # difference of less than 15 days - otherwise we're checking
+                # swift objects from years ago and this script is super slow.
+                if not check_time_diff(result.strip().decode(), mtime):
+                    continue
                 logging.debug(
                     "checking result %s for job %s",
                     result_url,

Follow ups