← Back to team overview

ubuntu-support-team team mailing list archive

[Merge] ~ubuntu-support-team/ubuntu-dev-tools:devel into ubuntu-dev-tools:master

 

Dan Streetman has proposed merging ~ubuntu-support-team/ubuntu-dev-tools:devel into ubuntu-dev-tools:master.

Requested reviews:
  Ubuntu Development Team (ubuntu-dev)

For more details, see:
https://code.launchpad.net/~ubuntu-support-team/ubuntu-dev-tools/+git/ubuntu-dev-tools-1/+merge/380884
-- 
Your team Ubuntu Support Team is subscribed to branch ~ubuntu-support-team/ubuntu-dev-tools:devel.
diff --git a/debian/changelog b/debian/changelog
index 4d3ec62..80cbace 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -2,8 +2,10 @@ ubuntu-dev-tools (0.177) UNRELEASED; urgency=medium
 
   * Verify checksums for downloaded binary files
   * pullpkg: support pulling from Ubuntu upload queues
+  * Correct log level of some messages
+  * archive: consolidate pull/pull_binaries download code
 
- -- Dan Streetman <ddstreet@xxxxxxxxxxxxx>  Fri, 13 Mar 2020 10:02:36 -0400
+ -- Dan Streetman <ddstreet@xxxxxxxxxxxxx>  Fri, 13 Mar 2020 10:06:38 -0400
 
 ubuntu-dev-tools (0.176) unstable; urgency=medium
 
diff --git a/ubuntutools/archive.py b/ubuntutools/archive.py
index abe6ed4..d631539 100644
--- a/ubuntutools/archive.py
+++ b/ubuntutools/archive.py
@@ -420,7 +420,7 @@ class SourcePackage(object):
         pathname = os.path.join(self.workdir, filename)
 
         if self._verify_file(pathname, dscverify, sha1sum, sha256sum, size):
-            Logger.debug('Using existing file %s', filename)
+            Logger.info('Using existing file %s', filename)
             return True
 
         if urlparse(url).scheme in ["", "file"]:
@@ -435,25 +435,31 @@ class SourcePackage(object):
 
         return self._verify_file(pathname, dscverify, sha1sum, sha256sum, size)
 
+    def _download_file_from_urls(self, urls, filename, size, dscverify=False,
+                                 sha1sum=None, sha256sum=None):
+        "Try to download a file from a list of urls."
+        for url in urls:
+            try:
+                if self._download_file(url, filename, size, dscverify=dscverify,
+                                       sha1sum=sha1sum, sha256sum=sha256sum):
+                    return
+            except HTTPError as e:
+                if e.code == 404:
+                    # It's ok if the file isn't found, just try the next url
+                    Logger.debug("File not found at %s" % url)
+                else:
+                    Logger.error('HTTP Error %i: %s', e.code, str(e))
+            except URLError as e:
+                Logger.error('URL Error: %s', e.reason)
+        raise DownloadError('Failed to download %s' % filename)
+
     def pull(self):
         "Pull into workdir"
         self._write_dsc()
         for entry in self.dsc['Files']:
             name = entry['name']
-            for url in self._source_urls(name):
-                try:
-                    if self._download_file(url, name, int(entry['size']), dscverify=True):
-                        break
-                except HTTPError as e:
-                    # It's ok if the file isn't found; we try multiple places to download
-                    if e.code == 404:
-                        Logger.info("File not found at %s" % url)
-                        continue
-                    Logger.info('HTTP Error %i: %s', e.code, str(e))
-                except URLError as e:
-                    Logger.info('URL Error: %s', e.reason)
-            else:
-                raise DownloadError('File %s could not be found' % name)
+            urls = self._source_urls(name)
+            self._download_file_from_urls(urls, name, int(entry['size']), dscverify=True)
 
     def pull_binaries(self, arch=None, name=None, ext=None):
         """Pull binary debs into workdir.
@@ -466,34 +472,25 @@ class SourcePackage(object):
 
         Returns the number of files downloaded.
         """
-        total = 0
-
         Logger.debug("pull_binaries(arch=%s, name=%s, ext=%s)" % (arch, name, ext))
 
         if arch == 'all':
             arch = None
 
+        total = 0
         for bpph in self.lp_spph.getBinaries(arch=arch, name=name, ext=ext):
             fname = bpph.getFileName()
             fsha1 = bpph.binaryFileSha1(fname)
             fsha256 = bpph.binaryFileSha256(fname)
             fsize = bpph.binaryFileSize(fname)
-            for url in self._binary_urls(fname, bpph):
-                try:
-                    if self._download_file(url, fname, fsize,
-                                           sha1sum=fsha1, sha256sum=fsha256):
-                        total += 1
-                        break
-                except HTTPError as e:
-                    # It's ok if the file isn't found; we try multiple places to download
-                    if e.code == 404:
-                        Logger.info("File not found at %s" % url)
-                        continue
-                    Logger.info('HTTP Error %i: %s', e.code, str(e))
-                except URLError as e:
-                    Logger.info('URL Error: %s', e.reason)
-            else:
-                Logger.info("Could not download from any location: %s", fname)
+            urls = self._binary_urls(fname, bpph)
+            try:
+                self._download_file_from_urls(urls, fname, fsize,
+                                              sha1sum=fsha1, sha256sum=fsha256)
+                total += 1
+            except DownloadError as e:
+                # log/print the error, but continue to get the rest of the files
+                Logger.error(e)
         return total
 
     def verify(self):

Follow ups