launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #15914
[Merge] lp:~cjwatson/launchpad-buildd/large-build-artifacts into lp:launchpad-buildd
Colin Watson has proposed merging lp:~cjwatson/launchpad-buildd/large-build-artifacts into lp:launchpad-buildd.
Commit message:
Avoid trying to read entire log or result files into memory in one go.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
Related bugs:
Bug #1227086 in launchpad-buildd: "Slaves on 32-bit architectures crash on >2GiB build logs"
https://bugs.launchpad.net/launchpad-buildd/+bug/1227086
For more details, see:
https://code.launchpad.net/~cjwatson/launchpad-buildd/large-build-artifacts/+merge/186503
This branch rearranges the buildd slave to avoid trying to read entire log or result files into memory in one go.
For result files, this is fairly trivial. For log files, we have to search them so it's not so trivial. However, matches will not span the whole file, so we can do this using a sliding window.
This has efficiency problems of its own. I think they'll generally be acceptable for the moment; for instance I can use this code to search a 2GB file for three regexes in 31 seconds on my laptop, and grep takes 11 seconds for one regex; also, we used to read the entire file in some cases even if we weren't going to search it (e.g. successful builds), so this should improve slave throughput regardless.
--
https://code.launchpad.net/~cjwatson/launchpad-buildd/large-build-artifacts/+merge/186503
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~cjwatson/launchpad-buildd/large-build-artifacts into lp:launchpad-buildd.
=== modified file 'debian/changelog'
--- debian/changelog 2013-09-18 12:20:31 +0000
+++ debian/changelog 2013-09-19 11:12:45 +0000
@@ -3,6 +3,10 @@
[ Colin Watson ]
* Remove obsolete BuilderStatus.ABORTED.
* Remove obsolete BuildDSlave.fetchFile method, unused since October 2005.
+ * Don't attempt to read entire files into memory at once when storing them
+ in the file cache.
+ * Rearrange build log searching to avoid reading the entire build log into
+ memory at once (LP: #1227086).
[ Adam Conrad ]
* Tidy up log formatting of the "Already reaped..." message.
=== modified file 'lpbuildd/binarypackage.py'
--- lpbuildd/binarypackage.py 2013-07-25 17:26:10 +0000
+++ lpbuildd/binarypackage.py 2013-09-19 11:12:45 +0000
@@ -88,25 +88,30 @@
def iterate_SBUILD(self, success):
"""Finished the sbuild run."""
- tmpLog = self.getTmpLogContents()
if success != SBuildExitCodes.OK:
+ log_patterns = []
+
if (success == SBuildExitCodes.DEPFAIL or
success == SBuildExitCodes.PACKAGEFAIL):
for rx in BuildLogRegexes.GIVENBACK:
- mo = re.search(rx, tmpLog, re.M)
- if mo:
- success = SBuildExitCodes.GIVENBACK
+ log_patterns.append([rx, re.M])
if success == SBuildExitCodes.DEPFAIL:
for rx, dep in BuildLogRegexes.DEPFAIL:
- mo = re.search(rx, tmpLog, re.M)
- if mo:
+ log_patterns.append([rx, re.M])
+
+ if log_patterns:
+ rx, mo = self.searchLogContents(log_patterns)
+ if mo:
+ if rx in BuildLogRegexes.GIVENBACK:
+ success = SBuildExitCodes.GIVENBACK
+ elif rx in BuildLogRegexes.DEPFAIL:
if not self.alreadyfailed:
+ dep = BuildLogRegexes.DEPFAIL[rx]
print("Returning build status: DEPFAIL")
print("Dependencies: " + mo.expand(dep))
self._slave.depFail(mo.expand(dep))
success = SBuildExitCodes.DEPFAIL
- break
else:
success = SBuildExitCodes.PACKAGEFAIL
=== modified file 'lpbuildd/debian.py'
--- lpbuildd/debian.py 2013-08-12 04:29:33 +0000
+++ lpbuildd/debian.py 2013-09-19 11:12:45 +0000
@@ -9,6 +9,7 @@
__metaclass__ = type
import os
+import re
from lpbuildd.slave import (
BuildManager,
@@ -100,16 +101,15 @@
The primary file we care about is the .changes file. We key from there.
"""
path = self.getChangesFilename()
- name = os.path.basename(path)
+ self._slave.addWaitingFile(path)
+
chfile = open(path, "r")
- self._slave.waitingfiles[name] = self._slave.storeFile(chfile.read())
- chfile.seek(0)
-
- for fn in self._parseChangesFile(chfile):
- self._slave.addWaitingFile(
- get_build_path(self.home, self._buildid, fn))
-
- chfile.close()
+ try:
+ for fn in self._parseChangesFile(chfile):
+ self._slave.addWaitingFile(
+ get_build_path(self.home, self._buildid, fn))
+ finally:
+ chfile.close()
def iterate(self, success):
# When a Twisted ProcessControl class is killed by SIGTERM,
@@ -173,12 +173,36 @@
self._state = DebianBuildState.UPDATE
self.doUpdateChroot()
- def getTmpLogContents(self):
+ def searchLogContents(self, patterns_and_flags):
+ """Search for any of a list of regex patterns in the build log.
+
+ The build log is matched using a sliding window, which avoids having
+ to read the whole file into memory at once but requires that matches
+ be no longer than the chunk size (currently 256KiB).
+
+ :return: A tuple of the regex pattern that matched and the match
+ object, or (None, None).
+ """
+ chunk_size = 256 * 1024
+ regexes = [
+ re.compile(pattern, flags)
+ for pattern, flags in patterns_and_flags]
+ log = open(os.path.join(self._cachepath, "buildlog"))
try:
- tmpLogHandle = open(os.path.join(self._cachepath, "buildlog"))
- return tmpLogHandle.read()
+ window = ""
+ chunk = log.read(chunk_size)
+ while chunk:
+ window += chunk
+ for regex in regexes:
+ match = regex.search(window)
+ if match is not None:
+ return regex.pattern, match
+ if len(window) > chunk_size:
+ window = window[chunk_size:]
+ chunk = log.read(chunk_size)
finally:
- tmpLogHandle.close()
+ log.close()
+ return None, None
def iterate_SOURCES(self, success):
"""Just finished overwriting sources.list."""
=== modified file 'lpbuildd/slave.py'
--- lpbuildd/slave.py 2013-09-18 12:20:31 +0000
+++ lpbuildd/slave.py 2013-09-19 11:12:45 +0000
@@ -376,25 +376,32 @@
self.log(extra_info)
return (os.path.exists(cachefile), extra_info)
- def storeFile(self, content):
- """Take the provided content and store it in the file cache."""
- sha1sum = hashlib.sha1(content).hexdigest()
+ def storeFile(self, path):
+ """Store the content of the provided path in the file cache."""
+ f = open(path)
+ try:
+ sha1 = hashlib.sha1()
+ for chunk in iter(lambda: f.read(256*1024), ''):
+ sha1.update(chunk)
+ sha1sum = sha1.hexdigest()
+ finally:
+ f.close()
present, info = self.ensurePresent(sha1sum)
if present:
return sha1sum
- f = open(self.cachePath(sha1sum), "w")
- f.write(content)
- f.close()
+ f = open(path)
+ of = open(self.cachePath(sha1sum), "w")
+ try:
+ for chunk in iter(lambda: f.read(256*1024), ''):
+ of.write(chunk)
+ finally:
+ of.close()
+ f.close()
return sha1sum
def addWaitingFile(self, path):
"""Add a file to the cache and store its details for reporting."""
- fn = os.path.basename(path)
- f = open(path)
- try:
- self.waitingfiles[fn] = self.storeFile(f.read())
- finally:
- f.close()
+ self.waitingfiles[os.path.basename(path)] = self.storeFile(path)
def abort(self):
"""Abort the current build."""
=== modified file 'lpbuildd/sourcepackagerecipe.py'
--- lpbuildd/sourcepackagerecipe.py 2013-08-28 22:41:24 +0000
+++ lpbuildd/sourcepackagerecipe.py 2013-09-19 11:12:45 +0000
@@ -99,11 +99,10 @@
print("Returning build status: OK")
elif retcode == RETCODE_FAILURE_INSTALL_BUILD_DEPS:
if not self.alreadyfailed:
- tmpLog = self.getTmpLogContents()
rx = (
'The following packages have unmet dependencies:\n'
'.*: Depends: ([^ ]*( \([^)]*\))?)')
- mo = re.search(rx, tmpLog, re.M)
+ _, mo = self.searchLogContents([rx, re.M])
if mo:
self._slave.depFail(mo.group(1))
print("Returning build status: DEPFAIL")
Follow ups