launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #00188
[Merge] lp:~lifeless/launchpad/soyuz into lp:launchpad
Robert Collins has proposed merging lp:~lifeless/launchpad/soyuz into lp:launchpad.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
Delete some cruft. \o/
--
https://code.launchpad.net/~lifeless/launchpad/soyuz/+merge/30453
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~lifeless/launchpad/soyuz into lp:launchpad.
=== modified file 'buildout-templates/bin/test.in'
--- buildout-templates/bin/test.in 2010-07-16 16:55:27 +0000
+++ buildout-templates/bin/test.in 2010-07-20 19:16:36 +0000
@@ -252,15 +252,14 @@
# tree. This is very useful for IDE integration, so an IDE can
# e.g. run the test that you are currently editing.
try:
- there = os.getcwd()
- os.chdir(BUILD_DIR)
- result = testrunner.run([])
+ try:
+ there = os.getcwd()
+ os.chdir(BUILD_DIR)
+ testrunner.run([])
+ except SystemExit:
+ # Print Layer profiling report if requested.
+ if main_process and local_options.verbose >= 3:
+ profiled.report_profile_stats()
+ raise
finally:
os.chdir(there)
- # Cribbed from sourcecode/zope/test.py - avoid spurious error during exit.
- logging.disable(999999999)
-
- # Print Layer profiling report if requested.
- if main_process and local_options.verbose >= 3:
- profiled.report_profile_stats()
- sys.exit(result)
=== removed directory 'lib/canonical/database/testing'
=== removed file 'lib/canonical/database/testing/__init__.py'
=== removed file 'lib/canonical/database/testing/tracers.py'
--- lib/canonical/database/testing/tracers.py 2009-06-25 05:30:52 +0000
+++ lib/canonical/database/testing/tracers.py 1970-01-01 00:00:00 +0000
@@ -1,80 +0,0 @@
-# Copyright 2009 Canonical Ltd. This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-"""Storm tracers for debugging."""
-
-__metaclass__ = type
-__all__ = [
- 'BaseTracer',
- 'CountAllTracer',
- 'StderrDebugTracer',
- ]
-
-
-import sys
-import storm.tracer
-
-
-class BaseTracer:
- """Base class for all tracers."""
-
- def __init__(self):
- # A flag indicating whether tracing should be enabled or not.
- self.trace = False
-
- def install(self, only=False):
- """Install this tracer.
-
- :param only: When True, remove all existing tracers before adding this
- one.
- :type only: boolean
- """
- if only:
- storm.tracer.remove_all_tracers()
- storm.tracer.install_tracer(self)
-
- def uninstall(self):
- """Uninstall all tracers of this instance's type."""
- storm.tracer.remove_tracer_type(type(self))
-
- # The trace API
- def connection_raw_execute(self, *args):
- pass
-
- def connection_raw_execute_error(self, *args):
- pass
-
- def set_statement_timeout(self, *args):
- pass
-
-
-
-class CountAllTracer(BaseTracer):
- """A counter of all SQL statements executed by Storm."""
-
- def __init__(self):
- super(CountAllTracer, self).__init__()
- self.count = 0
-
- def connection_raw_execute(self, *args):
- if self.trace:
- self.count += 1
-
-
-class StderrDebugTracer(BaseTracer):
- """Print all executed SQL statements to a stream.
-
- By default, print to the real stderr (e.g. not a possibly
- doctest-redirected stderr).
- """
-
- def __init__(self, stream=None):
- super(StderrDebugTracer, self).__init__()
- if stream is None:
- self.stream = sys.__stderr__
- else:
- self.stream = stream
-
- def connection_raw_execute(self, connection, cursor, statement, params):
- if self.trace:
- print >> self.stream, statement
=== modified file 'lib/canonical/launchpad/doc/canonical_url_examples.txt'
--- lib/canonical/launchpad/doc/canonical_url_examples.txt 2010-04-28 22:03:05 +0000
+++ lib/canonical/launchpad/doc/canonical_url_examples.txt 2010-07-20 19:16:36 +0000
@@ -9,7 +9,8 @@
>>> from zope.component import getUtility
>>> from canonical.launchpad.webapp import canonical_url
- >>> from canonical.launchpad.interfaces import ILaunchpadCelebrities
+ >>> from canonical.launchpad.interfaces.launchpad import (
+ ... ILaunchpadCelebrities)
>>> celebs = getUtility(ILaunchpadCelebrities)
The examples are divided into sections by theme. Each section starts with
@@ -20,10 +21,10 @@
== Application homepages ==
- >>> from canonical.launchpad.interfaces import (
- ... IMaloneApplication, IBazaarApplication,
- ... ILaunchpadRoot, IQuestionSet
- ... )
+ >>> from canonical.launchpad.interfaces.launchpad import IBazaarApplication
+ >>> from canonical.launchpad.webapp.interfaces import ILaunchpadRoot
+ >>> from lp.answers.interfaces.questioncollection import IQuestionSet
+ >>> from lp.bugs.interfaces.malone import IMaloneApplication
The Launchpad homepage.
=== removed file 'lib/canonical/launchpad/doc/storm-tracers.txt'
--- lib/canonical/launchpad/doc/storm-tracers.txt 2009-04-17 10:32:16 +0000
+++ lib/canonical/launchpad/doc/storm-tracers.txt 1970-01-01 00:00:00 +0000
@@ -1,165 +0,0 @@
-= Storm tracers =
-
-The Storm ORM supports an interface for installing 'tracers', methods of which
-get called during certain parts of the query workflow. Launchpad itself
-provides a few helpers for tracing all SQL, or just those statements which
-succeed or fail. Demonstrating the entire Storm tracer API is outside of the
-scope of this document, but here you'll see Launchpad's convenience tracers.
-
-The base class for tracers provides the basic infrastructure for enabling,
-disabling, installing and uninstalling tracers.
-
- # Save the global tracer state so that we can restore them after the test.
- >>> import storm.tracer
- >>> global_tracers = storm.tracer._tracers[:]
-
- >>> from canonical.database.testing import tracers
-
- >>> messages = []
- >>> class WaveToTheCrowdTracer(tracers.BaseTracer):
- ... def connection_raw_execute(self, *args):
- ... if self.trace:
- ... messages.append('hello world')
-
- >>> tracer = WaveToTheCrowdTracer()
- >>> tracer.install()
-
-Even though the tracer is installed, it won't get called until it's enabled.
-
- >>> from lp.registry.interfaces.person import IPersonSet
- >>> person_set = getUtility(IPersonSet)
- >>> person = person_set.getByName('name12')
- >>> len(messages)
- 0
-
-Once it's enabled, the tracer does its thing.
-
- >>> tracer.trace = True
- >>> person = person_set.getByName('name12')
- >>> len(messages)
- 1
-
-Of course, disabling it, stops the tracer from doing its thing.
-
- >>> tracer.trace = False
- >>> person = person_set.getByName('name12')
- >>> len(messages)
- 1
-
-Let's re-enable it...
-
- >>> tracer.trace = True
- >>> person = person_set.getByName('name12')
- >>> len(messages)
- 2
-
-...and then uninstall it.
-
- >>> tracer.uninstall()
- >>> person = person_set.getByName('name12')
- >>> len(messages)
- 2
-
-We can install more than one tracer and both will run.
-
- >>> messages = []
- >>> class WaveToTheCrowdTracer(tracers.BaseTracer):
- ... def connection_raw_execute(self, *args):
- ... messages.append('hello world')
-
- >>> tracer_one = WaveToTheCrowdTracer()
- >>> tracer_two = WaveToTheCrowdTracer()
- >>> tracer_one.install()
- >>> tracer_two.install()
-
- >>> person = person_set.getByName('name12')
- >>> len(messages)
- 2
- >>> person = person_set.getByName('name12')
- >>> len(messages)
- 4
-
-Because of the API presented by Storm, uninstalling a tracer actually
-uninstalls all tracers of its type.
-
- >>> tracer_one.uninstall()
- >>> person = person_set.getByName('name12')
- >>> len(messages)
- 4
-
-When installing a tracer, we can request that all other tracers first be
-removed.
-
- >>> messages = []
- >>> tracer_one.install()
- >>> tracer_two.install(only=True)
- >>> person = person_set.getByName('name12')
- >>> len(messages)
- 1
- >>> person = person_set.getByName('name12')
- >>> len(messages)
- 2
-
- # Clear all the tracers for the next batch of tests.
- >>> tracer_one.uninstall()
-
-
-== A counting tracer ==
-
-Launchpad provides a counting tracer which can be used to count the total
-number of SQL queries in a particular section of code.
-
- >>> counter = tracers.CountAllTracer()
- >>> counter.install()
- >>> counter.trace = True
-
- >>> person = person_set.getByName('name12')
- >>> person = person_set.getByName('name12')
- >>> counter.count
- 2
-
- >>> counter.trace = False
- >>> person = person_set.getByName('name12')
- >>> person = person_set.getByName('name12')
- >>> counter.count
- 2
-
- # Clear all the tracers for the next batch of tests.
- >>> counter.uninstall()
-
-
-== SQL debugging ==
-
-Launchpad also provides a tracer that can print SQL statements to a file
-stream. By default, it prints to the real stderr.
-
- >>> import sys
- >>> from cStringIO import StringIO
-
- >>> old_real_stderr = sys.__stderr__
- >>> sys.__stderr__ = out = StringIO()
- >>> try:
- ... debug = tracers.StderrDebugTracer()
- ... debug.install()
- ... debug.trace = True
- ... person = person_set.getByName('name12')
- ... finally:
- ... sys.__stderr__ = old_real_stderr
- >>> print out.getvalue()
- SELECT Person...
-
- # Clear all the tracers for the next batch of tests.
- >>> debug.uninstall()
-
-You can also tell it to print to a specific stream.
-
- >>> out = StringIO()
- >>> debug = tracers.StderrDebugTracer(out)
- >>> debug.install()
- >>> debug.trace = True
- >>> person = person_set.getByName('name12')
- >>> print out.getvalue()
- SELECT Person...
-
- # Restore the global state of the tracers before this test was run.
- >>> storm.tracer._tracers = global_tracers
=== modified file 'lib/canonical/launchpad/security.py'
--- lib/canonical/launchpad/security.py 2010-07-16 18:15:44 +0000
+++ lib/canonical/launchpad/security.py 2010-07-20 19:16:36 +0000
@@ -1201,15 +1201,12 @@
template = self.obj
if template.distroseries is not None:
# Template is on a distribution.
- distribution = template.distroseries.distribution
- return (
- AdminDistributionTranslations(
- distribution).checkAuthenticated(user))
-
+ return AdminDistroSeriesTranslations(
+ template.distroseries).checkAuthenticated(user)
else:
# Template is on a product.
- return OnlyRosettaExpertsAndAdmins.checkAuthenticated(
- self, user)
+ return AdminProductSeriesTranslations(
+ template.productseries).checkAuthenticated(user)
class EditPOTemplateDetails(AdminPOTemplateDetails, EditByOwnersOrAdmins):
@@ -1484,6 +1481,7 @@
return (self.obj.archive.owner and
user.inTeam(self.obj.archive.owner))
+
class EditBinaryPackageBuild(EditPackageBuild):
permission = 'launchpad.Edit'
usedfor = IBinaryPackageBuild
@@ -1778,6 +1776,16 @@
self.obj.distribution).checkAuthenticated(user))
+class AdminProductSeriesTranslations(AuthorizationBase):
+ permission = 'launchpad.TranslationsAdmin'
+ usedfor = IProductSeries
+
+ def checkAuthenticated(self, user):
+ """Is the user able to manage `IProductSeries` translations."""
+
+ return OnlyRosettaExpertsAndAdmins(self.obj).checkAuthenticated(user)
+
+
class BranchMergeProposalView(AuthorizationBase):
permission = 'launchpad.View'
usedfor = IBranchMergeProposal
=== modified file 'lib/lp/archiveuploader/dscfile.py'
--- lib/lp/archiveuploader/dscfile.py 2010-06-25 21:35:15 +0000
+++ lib/lp/archiveuploader/dscfile.py 2010-07-20 19:16:36 +0000
@@ -13,7 +13,7 @@
'SignableTagFile',
'DSCFile',
'DSCUploadedFile',
- 'findAndMoveChangelog',
+ 'findChangelog',
'findCopyright',
]
@@ -204,6 +204,8 @@
else:
self.processSignature()
+ self.unpacked_dir = None
+
#
# Useful properties.
#
@@ -483,18 +485,19 @@
"Verifying uploaded source package by unpacking it.")
# Get a temporary dir together.
- tmpdir = tempfile.mkdtemp()
+ self.unpacked_dir = tempfile.mkdtemp()
# chdir into it
cwd = os.getcwd()
- os.chdir(tmpdir)
- dsc_in_tmpdir = os.path.join(tmpdir, self.filename)
+ os.chdir(self.unpacked_dir)
+ dsc_in_tmpdir = os.path.join(self.unpacked_dir, self.filename)
package_files = self.files + [self]
try:
for source_file in package_files:
- os.symlink(source_file.filepath,
- os.path.join(tmpdir, source_file.filename))
+ os.symlink(
+ source_file.filepath,
+ os.path.join(self.unpacked_dir, source_file.filename))
args = ["dpkg-source", "-sn", "-x", dsc_in_tmpdir]
dpkg_source = subprocess.Popen(args, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
@@ -517,8 +520,9 @@
# SourcePackageRelease records.
# Check if 'dpkg-source' created only one directory.
- temp_directories = [dirname for dirname in os.listdir(tmpdir)
- if os.path.isdir(dirname)]
+ temp_directories = [
+ dirname for dirname in os.listdir(self.unpacked_dir)
+ if os.path.isdir(dirname)]
if len(temp_directories) > 1:
yield UploadError(
'Unpacked source contains more than one directory: %r'
@@ -528,31 +532,33 @@
# name (<sourcename>-<no_epoch(no_revision(version))>).
# Locate both the copyright and changelog files for later processing.
- for error in findCopyright(self, tmpdir, self.logger):
+ for error in findCopyright(self, self.unpacked_dir, self.logger):
yield error
- for error in findAndMoveChangelog(self, cwd, tmpdir, self.logger):
+ for error in findChangelog(self, self.unpacked_dir, self.logger):
yield error
self.logger.debug("Cleaning up source tree.")
+ self.logger.debug("Done")
+
+ def cleanUp(self):
+ if self.unpacked_dir is None:
+ return
try:
- shutil.rmtree(tmpdir)
+ shutil.rmtree(self.unpacked_dir)
except OSError, error:
# XXX: dsilvers 2006-03-15: We currently lack a test for this.
if errno.errorcode[error.errno] != 'EACCES':
- yield UploadError(
+ raise UploadError(
"%s: couldn't remove tmp dir %s: code %s" % (
- self.filename, tmpdir, error.errno))
+ self.filename, self.unpacked_dir, error.errno))
else:
- yield UploadWarning(
- "%s: Couldn't remove tree, fixing up permissions." %
- self.filename)
- result = os.system("chmod -R u+rwx " + tmpdir)
+ result = os.system("chmod -R u+rwx " + self.unpacked_dir)
if result != 0:
- yield UploadError("chmod failed with %s" % result)
- shutil.rmtree(tmpdir)
+ raise UploadError("chmod failed with %s" % result)
+ shutil.rmtree(self.unpacked_dir)
+ self.unpacked_dir = None
- self.logger.debug("Done")
def findBuild(self):
"""Find and return the SourcePackageRecipeBuild, if one is specified.
@@ -733,15 +739,14 @@
logger.debug("Copying copyright contents.")
dsc_file.copyright = open(copyright_file).read().strip()
-def findAndMoveChangelog(dsc_file, target_dir, source_dir, logger):
+def findChangelog(dsc_file, source_dir, logger):
"""Find and move any debian/changelog.
- This function finds the changelog file within the source package and
- moves it to target_dir. The changelog file is later uploaded to the
- librarian by DSCFile.storeInDatabase().
+ This function finds the changelog file within the source package. The
+ changelog file is later uploaded to the librarian by
+ DSCFile.storeInDatabase().
:param dsc_file: A DSCFile object where the copyright will be stored.
- :param target_dir: The directory where the changelog will end up.
:param source_dir: The directory where the source was extracted.
:param logger: A logger object for debug output.
"""
@@ -756,9 +761,8 @@
return
# Move the changelog file out of the package direcotry
- logger.debug("Found changelog contents; moving to root directory")
- dsc_file.changelog_path = os.path.join(target_dir, "changelog")
- shutil.move(changelog_file, dsc_file.changelog_path)
+ logger.debug("Found changelog")
+ dsc_file.changelog_path = changelog_file
def check_format_1_0_files(filename, file_type_counts, component_counts,
bzip2_count):
=== modified file 'lib/lp/archiveuploader/nascentupload.py'
--- lib/lp/archiveuploader/nascentupload.py 2010-05-27 22:18:16 +0000
+++ lib/lp/archiveuploader/nascentupload.py 2010-07-20 19:16:36 +0000
@@ -839,6 +839,12 @@
'Exception while accepting:\n %s' % e, exc_info=True)
self.do_reject(notify)
return False
+ else:
+ self.cleanUp()
+
+ def cleanUp(self):
+ if self.changes.dsc is not None:
+ self.changes.dsc.cleanUp()
def do_reject(self, notify=True):
"""Reject the current upload given the reason provided."""
@@ -869,6 +875,7 @@
self.queue_root.notify(summary_text=self.rejection_message,
changes_file_object=changes_file_object, logger=self.logger)
changes_file_object.close()
+ self.cleanUp()
def _createQueueEntry(self):
"""Return a PackageUpload object."""
=== modified file 'lib/lp/archiveuploader/tests/test_dscfile.py'
--- lib/lp/archiveuploader/tests/test_dscfile.py 2010-07-18 00:26:33 +0000
+++ lib/lp/archiveuploader/tests/test_dscfile.py 2010-07-20 19:16:36 +0000
@@ -7,11 +7,14 @@
import os
+from canonical.config import config
+from canonical.launchpad.scripts.logger import QuietFakeLogger
from canonical.testing.layers import LaunchpadZopelessLayer
from lp.archiveuploader.dscfile import (
- findAndMoveChangelog, findCopyright)
+ DSCFile, findChangelog, findCopyright)
from lp.archiveuploader.nascentuploadfile import UploadError
-from lp.archiveuploader.tests import mock_logger_quiet
+from lp.archiveuploader.tests import datadir, mock_logger_quiet
+from lp.archiveuploader.uploadpolicy import BuildDaemonUploadPolicy
from lp.testing import TestCase, TestCaseWithFactory
@@ -27,7 +30,6 @@
os.makedirs(self.dir_path)
self.copyright_path = os.path.join(self.dir_path, "copyright")
self.changelog_path = os.path.join(self.dir_path, "changelog")
- self.changelog_dest = os.path.join(self.tmpdir, "changelog")
self.dsc_file = self.MockDSCFile()
def testBadDebianCopyright(self):
@@ -66,8 +68,8 @@
dangling symlink in an attempt to try and access files on the system
processing the source packages."""
os.symlink("/etc/passwd", self.changelog_path)
- errors = list(findAndMoveChangelog(
- self.dsc_file, self.tmpdir, self.tmpdir, mock_logger_quiet))
+ errors = list(findChangelog(
+ self.dsc_file, self.tmpdir, mock_logger_quiet))
self.assertEqual(len(errors), 1)
self.assertIsInstance(errors[0], UploadError)
@@ -82,12 +84,12 @@
file.write(changelog)
file.close()
- errors = list(findAndMoveChangelog(
- self.dsc_file, self.tmpdir, self.tmpdir, mock_logger_quiet))
+ errors = list(findChangelog(
+ self.dsc_file, self.tmpdir, mock_logger_quiet))
self.assertEqual(len(errors), 0)
self.assertEqual(self.dsc_file.changelog_path,
- self.changelog_dest)
+ self.changelog_path)
def testOversizedFile(self):
"""Test that a file larger than 10MiB will fail.
@@ -106,8 +108,8 @@
file.write(empty_file)
file.close()
- errors = list(findAndMoveChangelog(
- self.dsc_file, self.tmpdir, self.tmpdir, mock_logger_quiet))
+ errors = list(findChangelog(
+ self.dsc_file, self.tmpdir, mock_logger_quiet))
self.assertIsInstance(errors[0], UploadError)
self.assertEqual(
@@ -120,12 +122,27 @@
layer = LaunchpadZopelessLayer
+ def getDscFile(self, name):
+ dsc_path = datadir(os.path.join('suite', name, name + '.dsc'))
+ class Changes:
+ architectures = ['source']
+ logger = QuietFakeLogger()
+ policy = BuildDaemonUploadPolicy()
+ policy.distroseries = self.factory.makeDistroSeries()
+ policy.archive = self.factory.makeArchive()
+ policy.distro = policy.distroseries.distribution
+ return DSCFile(dsc_path, 'digest', 0, 'main/editors',
+ 'priority', 'package', 'version', Changes, policy, logger)
+
def test_ReadOnlyCWD(self):
"""Processing a file should work when cwd is read-only."""
tempdir = self.useTempDir()
- dsc_file = self.factory.makeDscFile(tempdir)
os.chmod(tempdir, 0555)
try:
- list(dsc_file.unpackAndCheckSource())
+ dsc_file = self.getDscFile('bar_1.0-1')
+ try:
+ list(dsc_file.verify())
+ finally:
+ dsc_file.cleanUp()
finally:
os.chmod(tempdir, 0755)
=== modified file 'lib/lp/archiveuploader/uploadprocessor.py'
--- lib/lp/archiveuploader/uploadprocessor.py 2010-06-29 14:01:01 +0000
+++ lib/lp/archiveuploader/uploadprocessor.py 2010-07-20 19:16:36 +0000
@@ -293,7 +293,7 @@
(distribution, suite_name,
archive) = parse_upload_path(relative_path)
except UploadPathError, e:
- # pick some defaults to create the NascentUploap() object.
+ # pick some defaults to create the NascentUpload() object.
# We will be rejecting the upload so it doesn matter much.
distribution = getUtility(IDistributionSet)['ubuntu']
suite_name = None
=== modified file 'lib/lp/bugs/browser/bugrole.py'
--- lib/lp/bugs/browser/bugrole.py 2010-06-11 21:51:48 +0000
+++ lib/lp/bugs/browser/bugrole.py 2010-07-20 19:16:36 +0000
@@ -20,7 +20,7 @@
OTHER_TEAM = object()
OK = object()
- def _getFieldState(self, field_name, data):
+ def _getFieldState(self, current_role, field_name, data):
"""Return the enum that summarises the field state."""
# The field_name will not be in the data if the user did not enter
# a person in the ValidPersonOrTeam vocabulary.
@@ -28,6 +28,10 @@
return self.INVALID_PERSON
role = data[field_name]
user = self.user
+ # If no data was changed, the field is OK regardless of who the
+ # current user is.
+ if current_role == role:
+ return self.OK
# The user may assign the role to None, himself, or a team he admins.
if role is None or self.context.userCanAlterSubscription(role, user):
return self.OK
@@ -43,7 +47,8 @@
Verify that the value is None, the user, or a team he administers,
otherwise, set a field error.
"""
- field_state = self._getFieldState('bug_supervisor', data)
+ field_state = self._getFieldState(
+ self.context.bug_supervisor, 'bug_supervisor', data)
if field_state is self.INVALID_PERSON:
error = (
'You must choose a valid person or team to be the '
@@ -78,7 +83,12 @@
self.setFieldError('bug_supervisor', error)
def changeBugSupervisor(self, bug_supervisor):
- self.context.setBugSupervisor(bug_supervisor, self.user)
+ if self.context.bug_supervisor != bug_supervisor:
+ self.context.setBugSupervisor(bug_supervisor, self.user)
+
+ def changeSecurityContact(self, security_contact):
+ if self.context.security_contact != security_contact:
+ self.context.security_contact = security_contact
def validateSecurityContact(self, data):
"""Validates the new security contact.
@@ -86,7 +96,8 @@
Verify that the value is None, the user, or a team he administers,
otherwise, set a field error.
"""
- field_state = self._getFieldState('security_contact', data)
+ field_state = self._getFieldState(
+ self.context.security_contact, 'security_contact', data)
if field_state is self.INVALID_PERSON:
error = (
'You must choose a valid person or team to be the '
=== modified file 'lib/lp/bugs/browser/bugtarget.py'
--- lib/lp/bugs/browser/bugtarget.py 2010-06-18 00:46:17 +0000
+++ lib/lp/bugs/browser/bugtarget.py 2010-07-20 19:16:36 +0000
@@ -167,11 +167,13 @@
@action("Change", name='change')
def change_action(self, action, data):
- # bug_supervisor requires a transition method, so it must be
- # handled separately and removed for the updateContextFromData
- # to work as expected.
+ # bug_supervisor and security_contactrequires a transition method,
+ # so it must be handled separately and removed for the
+ # updateContextFromData to work as expected.
self.changeBugSupervisor(data['bug_supervisor'])
del data['bug_supervisor']
+ self.changeSecurityContact(data['security_contact'])
+ del data['security_contact']
self.updateContextFromData(data)
=== modified file 'lib/lp/bugs/browser/tests/test_bugtarget_configure.py'
--- lib/lp/bugs/browser/tests/test_bugtarget_configure.py 2010-06-18 00:46:17 +0000
+++ lib/lp/bugs/browser/tests/test_bugtarget_configure.py 2010-07-20 19:16:36 +0000
@@ -122,6 +122,31 @@
self.assertEqual([], view.errors)
self.assertFalse(self.product.enable_bug_expiration)
+ def test_bug_role_non_admin_can_edit(self):
+ # Verify that a member of an owning team who is not an admin of
+ # the bug supervisor team or security_contact team can change bug
+ # reporting guidelines.
+ owning_team = self.factory.makeTeam(owner=self.owner)
+ bug_team = self.factory.makeTeam(owner=self.owner)
+ weak_owner = self.factory.makePerson()
+ login_person(self.owner)
+ owning_team.addMember(weak_owner, self.owner)
+ bug_team.addMember(weak_owner, self.owner)
+ self.product.owner = owning_team
+ self.product.setBugSupervisor(bug_team, self.owner)
+ self.product.security_contact = bug_team
+ login_person(weak_owner)
+ form = self._makeForm()
+ # Only the bug_reporting_guidelines are different.
+ form['field.bug_supervisor'] = bug_team.name
+ form['field.security_contact'] = bug_team.name
+ form['field.bug_reporting_guidelines'] = 'new guidelines'
+ view = create_initialized_view(
+ self.product, name='+configure-bugtracker', form=form)
+ self.assertEqual([], view.errors)
+ self.assertEqual(
+ 'new guidelines', self.product.bug_reporting_guidelines)
+
def test_suite():
return unittest.TestLoader().loadTestsFromName(__name__)
=== modified file 'lib/lp/registry/doc/vocabularies.txt'
--- lib/lp/registry/doc/vocabularies.txt 2010-07-14 15:59:44 +0000
+++ lib/lp/registry/doc/vocabularies.txt 2010-07-20 19:16:36 +0000
@@ -1,9 +1,11 @@
= Registry vocabularies =
- >>> from canonical.launchpad.ftests import login
- >>> from canonical.launchpad.interfaces import (
- ... IPersonSet, IOpenLaunchBag, IProductSet, IProjectGroupSet)
>>> from canonical.database.sqlbase import flush_database_updates
+ >>> from canonical.launchpad.webapp.interfaces import IOpenLaunchBag
+ >>> from lp.registry.interfaces.person import IPersonSet
+ >>> from lp.registry.interfaces.product import IProductSet
+ >>> from lp.registry.interfaces.projectgroup import IProjectGroupSet
+ >>> from lp.testing import login
>>> person_set = getUtility(IPersonSet)
>>> product_set = getUtility(IProductSet)
>>> login('foo.bar@xxxxxxxxxxxxx')
@@ -50,8 +52,9 @@
>>> personset = getUtility(IPersonSet)
>>> ddaa = personset.getByName('ddaa')
>>> carlos = personset.getByName('carlos')
- >>> from canonical.launchpad.interfaces import (
- ... IMailingListSet, MailingListStatus, TeamSubscriptionPolicy)
+ >>> from lp.registry.interfaces.mailinglist import (
+ ... IMailingListSet, MailingListStatus)
+ >>> from lp.registry.interfaces.person import TeamSubscriptionPolicy
>>> team_one = personset.newTeam(
... ddaa, 'bass-players', 'Bass Players',
... subscriptionpolicy=TeamSubscriptionPolicy.OPEN)
@@ -257,7 +260,7 @@
The PersonActiveMembership vocabulary only shows teams where the
membership is public.
- >>> from canonical.launchpad.interfaces import PersonVisibility
+ >>> from lp.registry.interfaces.person import PersonVisibility
>>> pubteam = factory.makeTeam(owner=foo_bar, name='public-team',
... displayname="Public Team",
... visibility=PersonVisibility.PUBLIC)
@@ -350,7 +353,7 @@
>>> len(milestones)
0
- >>> from canonical.launchpad.interfaces import IMaloneApplication
+ >>> from lp.bugs.interfaces.malone import IMaloneApplication
>>> malone = getUtility(IMaloneApplication)
>>> milestones = get_naked_vocab(malone, 'Milestone')
>>> len(milestones)
@@ -395,7 +398,7 @@
If the context is a bugtask, only the bugtask's target's milestones are
in the vocabulary.
- >>> from canonical.launchpad.interfaces import IBugSet
+ >>> from lp.bugs.interfaces.bug import IBugSet
>>> bug_one = getUtility(IBugSet).get(1)
>>> firefox_task = bug_one.bugtasks[0]
>>> firefox_task.bugtargetdisplayname
@@ -523,7 +526,7 @@
The ProjectGroupVocabulary does not list inactive projects.
- >>> from canonical.launchpad.interfaces import IProjectGroupSet
+ >>> from lp.registry.interfaces.projectgroup import IProjectGroupSet
>>> moz_project = getUtility(IProjectGroupSet)['mozilla']
>>> moz_project in project_vocabulary
True
@@ -647,7 +650,7 @@
A person with a single and unvalidated email address can be merged.
- >>> from canonical.launchpad.interfaces import PersonCreationRationale
+ >>> from lp.registry.interfaces.person import PersonCreationRationale
>>> fooperson, email = person_set.createPersonAndEmail(
... 'foobaz@xxxxxxx', PersonCreationRationale.UNKNOWN,
... name='foobaz', displayname='foo baz')
@@ -679,7 +682,8 @@
A person whose account_status is any of the statuses of
INACTIVE_ACCOUNT_STATUSES is part of the vocabulary, though.
- >>> from canonical.launchpad.interfaces import INACTIVE_ACCOUNT_STATUSES
+ >>> from canonical.launchpad.interfaces.account import (
+ ... INACTIVE_ACCOUNT_STATUSES)
>>> naked_cprov.merged = None
>>> checked_count = 0
>>> for status in INACTIVE_ACCOUNT_STATUSES:
@@ -1287,12 +1291,10 @@
Inactive projects and project groups are not available.
- >>> from canonical.launchpad.ftests import syncUpdate
>>> tomcat = product_set.getByName('tomcat')
>>> tomcat in vocab
True
>>> tomcat.active = False
- >>> syncUpdate(tomcat)
>>> tomcat in vocab
False
@@ -1300,7 +1302,6 @@
>>> apache in vocab
True
>>> apache.active = False
- >>> syncUpdate(apache)
>>> apache in vocab
False
@@ -1349,7 +1350,8 @@
thunderbird Mozilla Thunderbird Mozilla Thunderbird (Product)
ubuntu Ubuntu Ubuntu Linux (Distribution)
- >>> from canonical.launchpad.interfaces import ILaunchpadCelebrities
+ >>> from canonical.launchpad.interfaces.launchpad import (
+ ... ILaunchpadCelebrities)
>>> ubuntu = getUtility(ILaunchpadCelebrities).ubuntu
>>> ubuntu in featured_project_vocabulary
True
@@ -1373,7 +1375,7 @@
The test data has one project with a proprietary license. Let's
change bzr's so we will get more interesting results.
- >>> from canonical.launchpad.interfaces import License
+ >>> from lp.registry.interfaces.product import License
>>> bzr = product_set.getByName('bzr')
>>> bzr.licenses = [License.OTHER_PROPRIETARY]
=== modified file 'lib/lp/soyuz/adapters/archivedependencies.py'
--- lib/lp/soyuz/adapters/archivedependencies.py 2010-05-10 19:49:02 +0000
+++ lib/lp/soyuz/adapters/archivedependencies.py 2010-07-20 19:16:36 +0000
@@ -12,8 +12,9 @@
Auxiliary functions exposed for testing purposes:
- * get_components_for_building: return the corresponding component
- dependencies for a build, this result is known as 'ogre_components';
+ * get_components_for_context: return the corresponding component
+ dependencies for a component and pocket, this result is known as
+ 'ogre_components';
* get_primary_current_component: return the component name where the
building source is published in the primary archive.
@@ -30,7 +31,8 @@
'component_dependencies',
'default_component_dependency_name',
'default_pocket_dependency',
- 'get_components_for_building',
+ 'expand_dependencies',
+ 'get_components_for_context',
'get_primary_current_component',
'get_sources_list_for_building',
'pocket_dependencies',
@@ -86,19 +88,20 @@
default_component_dependency_name = 'multiverse'
-def get_components_for_building(build):
+def get_components_for_context(component, pocket):
"""Return the components allowed to be used in the build context.
- :param build: a context `IBuild`.
+ :param component: the context `IComponent`.
+ :param pocket: the context `IPocket`.
:return: a list of component names.
"""
# BACKPORTS should be able to fetch build dependencies from any
# component in order to cope with component changes occurring
- # accross distroseries. See bug #198936 for further information.
- if build.pocket == PackagePublishingPocket.BACKPORTS:
+ # across distroseries. See bug #198936 for further information.
+ if pocket == PackagePublishingPocket.BACKPORTS:
return component_dependencies['multiverse']
- return component_dependencies[build.current_component.name]
+ return component_dependencies[component.name]
def get_primary_current_component(archive, distroseries, sourcepackagename):
@@ -119,6 +122,54 @@
return 'universe'
+def expand_dependencies(archive, distro_series, pocket, component,
+ source_package_name):
+ """Return the set of dependency archives, pockets and components.
+
+ :param archive: the context `IArchive`.
+ :param distro_series: the context `IDistroSeries`.
+ :param pocket: the context `PackagePublishingPocket`.
+ :param component: the context `IComponent`.
+ :param source_package_name: A source package name (as text)
+ :return: a list of (archive, pocket, [component]), representing the
+ dependencies defined by the given build context.
+ """
+ deps = []
+
+ # Add implicit self-dependency for non-primary contexts.
+ if archive.purpose in ALLOW_RELEASE_BUILDS:
+ deps.append((
+ archive, PackagePublishingPocket.RELEASE,
+ get_components_for_context(component, pocket)))
+
+ primary_component = get_primary_current_component(
+ archive, distro_series, source_package_name)
+ # Consider user-selected archive dependencies.
+ for archive_dependency in archive.dependencies:
+ # When the dependency component is undefined, we should use
+ # the component where the source is published in the primary
+ # archive.
+ if archive_dependency.component is None:
+ components = component_dependencies[primary_component]
+ else:
+ components = component_dependencies[
+ archive_dependency.component.name]
+ # Follow pocket dependencies.
+ for pocket in pocket_dependencies[archive_dependency.pocket]:
+ deps.append(
+ (archive_dependency.dependency, pocket, components))
+
+ # Consider primary archive dependency override. Add the default
+ # primary archive dependencies if it's not present.
+ if archive.getArchiveDependency(
+ archive.distribution.main_archive) is None:
+ primary_dependencies = _get_default_primary_dependencies(
+ archive, component, pocket)
+ deps.extend(primary_dependencies)
+
+ return deps
+
+
def get_sources_list_for_building(build, distroarchseries, sourcepackagename):
"""Return the sources_list entries required to build the given item.
@@ -133,17 +184,13 @@
:param sourcepackagename: A source package name (as text)
:return: a deb sources_list entries (lines).
"""
- deps = []
- sources_list_lines = []
-
- # Add implicit self-dependency for non-primary contexts.
- if build.archive.purpose in ALLOW_RELEASE_BUILDS:
- self_dep = [(
- build.archive, PackagePublishingPocket.RELEASE,
- get_components_for_building(build))]
- sources_list_lines = _get_sources_list_for_dependencies(
- self_dep, distroarchseries)
-
+ deps = expand_dependencies(
+ build.archive, distroarchseries.distroseries, build.pocket,
+ build.current_component, sourcepackagename)
+ sources_list_lines = \
+ _get_sources_list_for_dependencies(deps, distroarchseries)
+
+ external_dep_lines = []
# Append external sources_list lines for this archive if it's
# specified in the configuration.
try:
@@ -152,7 +199,7 @@
for archive_dep in dependencies.splitlines():
line = archive_dep % (
{'series': distroarchseries.distroseries.name})
- sources_list_lines.append(line)
+ external_dep_lines.append(line)
except StandardError, e:
# Malformed external dependencies can incapacitate the build farm
# manager (lp:516169). That's obviously not acceptable.
@@ -166,34 +213,13 @@
if build.archive.enabled == True:
build.archive.disable()
- # Consider user-selected archive dependencies.
- primary_component = get_primary_current_component(
- build.archive, build.distro_series, sourcepackagename)
- for archive_dependency in build.archive.dependencies:
- # When the dependency component is undefined, we should use
- # the component where the source is published in the primary
- # archive.
- if archive_dependency.component is None:
- components = component_dependencies[primary_component]
- else:
- components = component_dependencies[
- archive_dependency.component.name]
- # Follow pocket dependencies.
- for pocket in pocket_dependencies[archive_dependency.pocket]:
- deps.append(
- (archive_dependency.dependency, pocket, components)
- )
-
- # Consider primary archive dependency override. Add the default
- # primary archive dependencies if it's not present.
- if build.archive.getArchiveDependency(
- build.archive.distribution.main_archive) is None:
- primary_dependencies = _get_default_primary_dependencies(build)
- deps.extend(primary_dependencies)
-
- sources_list_lines.extend(
- _get_sources_list_for_dependencies(deps, distroarchseries))
- return sources_list_lines
+ # For an unknown reason (perhaps because OEM has archives with
+ # binaries that need to override primary binaries of the same
+ # version), we want the external dependency lines to show up second:
+ # after the archive itself, but before any other dependencies.
+ return [sources_list_lines[0]] + external_dep_lines + \
+ sources_list_lines[1:]
+
def _has_published_binaries(archive, distroarchseries, pocket):
"""Whether or not the archive dependency has published binaries."""
@@ -252,27 +278,29 @@
return sources_list_lines
-def _get_default_primary_dependencies(build):
- """Return the default primary dependencies for a given build.
+def _get_default_primary_dependencies(archive, component, pocket):
+ """Return the default primary dependencies for a given context.
- :param build: the `IBuild` context;
+ :param archive: the context `IArchive`.
+ :param component: the context `IComponent`.
+ :param pocket: the context `PackagePublishingPocket`.
:return: a list containing the default dependencies to primary
archive.
"""
- if build.archive.purpose in ALLOW_RELEASE_BUILDS:
+ if archive.purpose in ALLOW_RELEASE_BUILDS:
primary_pockets = pocket_dependencies[
default_pocket_dependency]
primary_components = component_dependencies[
default_component_dependency_name]
else:
- primary_pockets = pocket_dependencies[build.pocket]
- primary_components = get_components_for_building(build)
+ primary_pockets = pocket_dependencies[pocket]
+ primary_components = get_components_for_context(component, pocket)
primary_dependencies = []
for pocket in primary_pockets:
primary_dependencies.append(
- (build.distro_series.distribution.main_archive, pocket,
+ (archive.distribution.main_archive, pocket,
primary_components))
return primary_dependencies
=== modified file 'lib/lp/soyuz/browser/archive.py'
--- lib/lp/soyuz/browser/archive.py 2010-07-15 14:20:49 +0000
+++ lib/lp/soyuz/browser/archive.py 2010-07-20 19:16:36 +0000
@@ -46,6 +46,7 @@
from canonical.cachedproperty import cachedproperty
from canonical.launchpad import _
+from canonical.launchpad.components.tokens import create_token
from canonical.launchpad.helpers import english_list
from canonical.lazr.utils import smartquote
from lp.buildmaster.interfaces.buildbase import BuildStatus
@@ -1877,11 +1878,24 @@
custom_widget('enabled_restricted_families', LabeledMultiCheckBoxWidget)
+ def updateContextFromData(self, data):
+ """Update context from form data.
+
+ If the user did not specify a buildd secret but marked the
+ archive as private, generate a secret for them.
+ """
+ if data['private'] and data['buildd_secret'] is None:
+ # buildd secrets are only used by builders, autogenerate one.
+ self.context.buildd_secret = create_token(16)
+ del(data['buildd_secret'])
+ super(ArchiveAdminView, self).updateContextFromData(data)
+
def validate_save(self, action, data):
"""Validate the save action on ArchiveAdminView.
- buildd_secret can only be set, and must be set, when
- this is a private archive.
+ buildd_secret can only, and must, be set for private archives.
+ If the archive is private and the buildd secret is not set it will be
+ generated.
"""
form.getWidgetsData(self.widgets, 'field', data)
@@ -1893,11 +1907,6 @@
'This archive already has published sources. It is '
'not possible to switch the privacy.')
- if data.get('buildd_secret') is None and data['private']:
- self.setFieldError(
- 'buildd_secret',
- 'Required for private archives.')
-
if self.owner_is_private_team and not data['private']:
self.setFieldError(
'private',
=== modified file 'lib/lp/soyuz/browser/tests/test_archive_admin_view.py'
--- lib/lp/soyuz/browser/tests/test_archive_admin_view.py 2010-02-16 15:53:05 +0000
+++ lib/lp/soyuz/browser/tests/test_archive_admin_view.py 2010-07-20 19:16:36 +0000
@@ -23,19 +23,19 @@
# object.
login('admin@xxxxxxxxxxxxx')
- def initialize_admin_view(self, private=True):
+ def initialize_admin_view(self, private=True, buildd_secret=''):
"""Initialize the admin view to set the privacy.."""
method = 'POST'
form = {
'field.enabled': 'on',
'field.actions.save': 'Save',
}
+
+ form['field.buildd_secret'] = buildd_secret
if private is True:
form['field.private'] = 'on'
- form['field.buildd_secret'] = 'test'
else:
form['field.private'] = 'off'
- form['field.buildd_secret'] = ''
view = ArchiveAdminView(self.ppa, LaunchpadTestRequest(
method=method, form=form))
@@ -56,7 +56,7 @@
def test_set_private_without_packages(self):
# If a ppa does not have packages published, it is possible to
# update the private attribute.
- view = self.initialize_admin_view(private=True)
+ view = self.initialize_admin_view(private=True, buildd_secret="test")
self.assertEqual(0, len(view.errors))
self.assertTrue(view.context.private)
@@ -66,14 +66,22 @@
self.make_ppa_private(self.ppa)
self.assertTrue(self.ppa.private)
- view = self.initialize_admin_view(private=False)
+ view = self.initialize_admin_view(private=False, buildd_secret='')
self.assertEqual(0, len(view.errors))
self.assertFalse(view.context.private)
+ def test_set_private_without_buildd_secret(self):
+ """If a PPA is marked private but no buildd secret is specified,
+ one will be generated."""
+ view = self.initialize_admin_view(private=True, buildd_secret='')
+ self.assertEqual(0, len(view.errors))
+ self.assertTrue(view.context.private)
+ self.assertTrue(len(view.context.buildd_secret) > 4)
+
def test_set_private_with_packages(self):
# A PPA that does have packages cannot be privatised.
self.publish_to_ppa(self.ppa)
- view = self.initialize_admin_view(private=True)
+ view = self.initialize_admin_view(private=True, buildd_secret="test")
self.assertEqual(1, len(view.errors))
self.assertEqual(
'This archive already has published sources. '
@@ -87,7 +95,7 @@
self.assertTrue(self.ppa.private)
self.publish_to_ppa(self.ppa)
- view = self.initialize_admin_view(private=False)
+ view = self.initialize_admin_view(private=False, buildd_secret='')
self.assertEqual(1, len(view.errors))
self.assertEqual(
'This archive already has published sources. '
=== modified file 'lib/lp/soyuz/doc/archive-dependencies.txt'
--- lib/lp/soyuz/doc/archive-dependencies.txt 2010-05-10 19:49:02 +0000
+++ lib/lp/soyuz/doc/archive-dependencies.txt 2010-07-20 19:16:36 +0000
@@ -97,7 +97,7 @@
... 'main', 'restricted', 'universe', 'multiverse', 'partner']
>>> from lp.soyuz.adapters.archivedependencies import (
- ... get_components_for_building)
+ ... get_components_for_context)
>>> ogre_pub = test_publisher.getPubSource(sourcename='ogre')
>>> [ogre_build] = ogre_pub.createMissingBuilds()
@@ -111,7 +111,8 @@
... syncUpdate(ogre_pub)
... flush_database_caches()
... components_term = " ".join(
- ... get_components_for_building(ogre_build))
+ ... get_components_for_context(
+ ... ogre_build.current_component, ogre_build.pocket))
... print '%10s | %s' % (ogre_build.current_component.name,
... components_term)
@@ -566,4 +567,3 @@
main restricted universe multiverse
deb http://ftpmaster.internal/ubuntu hoary-updates
main restricted universe multiverse
-
=== modified file 'lib/lp/soyuz/doc/archive.txt'
--- lib/lp/soyuz/doc/archive.txt 2010-07-02 21:25:36 +0000
+++ lib/lp/soyuz/doc/archive.txt 2010-07-20 19:16:36 +0000
@@ -1285,72 +1285,6 @@
...
AssertionError: This dependency does not exist.
-== Find binary package dependency candidates ==
-
-Archive allows a lookup on a single binary package dependency
-candidate by its name, via the `PublishedPackage` table:
-
- >>> warty_i386 = warty['i386']
-
- >>> candidate = ubuntu.main_archive.findDepCandidateByName(
- ... warty_i386, "pmount")
- >>> print candidate.binarypackagerelease.binarypackagename.name
- pmount
-
- >>> candidate = cprov.archive.findDepCandidateByName(
- ... warty_i386, "pmount")
- >>> print candidate.binarypackagerelease.binarypackagename.name
- pmount
-
-Since 'python2.4' isn't available in our sampledata (not even
-published), None is returned:
-
- >>> print ubuntu.main_archive.findDepCandidateByName(
- ... warty_i386, "python2.4")
- None
-
- >>> print cprov.archive.findDepCandidateByName(
- ... warty_i386, "python2.4")
- None
-
-This method is aware of the archive dependency tree. So, even when a
-package is not published on the context PPA but is available somewhere
-in the archive dependency domain it will be found.
-
-We also add another archive dependency here to exercise findDepCandidateByName
-a little more.
-
- >>> joe = factory.makePerson(email='joe@xxxxxxxxxxx')
- >>> second_ppa = factory.makeArchive(name="secondppa", owner=joe)
- >>> second_archive_dep = cprov.archive.addArchiveDependency(
- ... second_ppa, release_pocket, main_component)
-
-'at' binary package is not present in Celso's PPA.
-
- >>> cprov_archive.getAllPublishedBinaries(name='at').count()
- 0
-
-But it is available in PRIMARY ubuntu archive.
-
- >>> primary_candidate = ubuntu.main_archive.findDepCandidateByName(
- ... warty_i386, "at")
- >>> primary_candidate is not None
- True
-
-Then a lookup on Celso's PPA will find it.
-
- >>> ppa_candidate = cprov.archive.findDepCandidateByName(
- ... warty_i386, "at")
- >>> ppa_candidate is not None
- True
-
- >>> primary_candidate == ppa_candidate
- True
-
-And clean-up after ourselves:
-
- >>> ignore = cprov.archive.removeArchiveDependency(second_ppa)
-
== Creating a package copy request from an IArchive ==
The IArchive interface includes a convenience method for creating a
@@ -1529,7 +1463,7 @@
>>> archive_purposes = [archive.purpose.name for archive in archive_set]
>>> len(archive_purposes)
- 19
+ 18
>>> print sorted(set(archive_purposes))
['COPY', 'DEBUG', 'PARTNER', 'PPA', 'PRIMARY']
=== modified file 'lib/lp/soyuz/doc/binarypackagebuild.txt'
--- lib/lp/soyuz/doc/binarypackagebuild.txt 2010-06-10 22:12:08 +0000
+++ lib/lp/soyuz/doc/binarypackagebuild.txt 2010-07-20 19:16:36 +0000
@@ -987,8 +987,9 @@
main
>>> from lp.soyuz.adapters.archivedependencies import (
- ... get_components_for_building)
- >>> print get_components_for_building(depwait_build)
+ ... get_components_for_context)
+ >>> print get_components_for_context(
+ ... depwait_build.current_component, depwait_build.pocket)
['main']
Thus the 'pmount' dependency remains unsatisfied.
@@ -1036,7 +1037,8 @@
>>> flush_database_caches()
>>> login(ANONYMOUS)
- >>> print get_components_for_building(depwait_build)
+ >>> print get_components_for_context(
+ ... depwait_build.current_component, depwait_build.pocket)
['main']
>>> print pmount_pub.component.name
=== modified file 'lib/lp/soyuz/interfaces/archive.py'
--- lib/lp/soyuz/interfaces/archive.py 2010-07-12 08:45:32 +0000
+++ lib/lp/soyuz/interfaces/archive.py 2010-07-20 19:16:36 +0000
@@ -443,11 +443,24 @@
Person table indexes while searching.
"""
- def findDepCandidateByName(distroarchseries, name):
- """Return the last published binarypackage by given name.
-
- Return the `BinaryPackagePublishingHistory` record by distroarchseries
- and name, or None if not found.
+ def findDepCandidates(distro_arch_series, pocket, component,
+ source_package_name, dep_name):
+ """Return matching binaries in this archive and its dependencies.
+
+ Return all published `IBinaryPackagePublishingHistory` records with
+ the given name, in this archive and dependencies as specified by the
+ given build context, using the usual archive dependency rules.
+
+ We can't just use the first, since there may be other versions
+ published in other dependency archives.
+
+ :param distro_arch_series: the context `IDistroArchSeries`.
+ :param pocket: the context `PackagePublishingPocket`.
+ :param component: the context `IComponent`.
+ :param source_package_name: the context source package name (as text).
+ :param dep_name: the name of the binary package to look up.
+ :return: a sequence of matching `IBinaryPackagePublishingHistory`
+ records.
"""
def removeArchiveDependency(dependency):
=== modified file 'lib/lp/soyuz/model/archive.py'
--- lib/lp/soyuz/model/archive.py 2010-07-12 08:45:32 +0000
+++ lib/lp/soyuz/model/archive.py 2010-07-20 19:16:36 +0000
@@ -37,6 +37,7 @@
from lp.buildmaster.model.buildfarmjob import BuildFarmJob
from lp.buildmaster.model.packagebuild import PackageBuild
from lp.services.job.interfaces.job import JobStatus
+from lp.soyuz.adapters.archivedependencies import expand_dependencies
from lp.soyuz.adapters.packagelocation import PackageLocation
from canonical.launchpad.components.tokens import (
create_unique_token_for_table)
@@ -47,6 +48,7 @@
from lp.soyuz.model.binarypackagerelease import (
BinaryPackageReleaseDownloadCount)
from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
+from lp.soyuz.model.component import Component
from lp.soyuz.model.distributionsourcepackagecache import (
DistributionSourcePackageCache)
from lp.soyuz.model.distroseriespackagecache import DistroSeriesPackageCache
@@ -807,30 +809,32 @@
self.sources_cached = sources_cached.count()
self.binaries_cached = binaries_cached.count()
- def findDepCandidateByName(self, distroarchseries, name):
+ def findDepCandidates(self, distro_arch_series, pocket, component,
+ source_package_name, dep_name):
"""See `IArchive`."""
- archives = []
- if self.is_ppa:
- archives.append(self.distribution.main_archive.id)
- archives.append(self.id)
- archives.extend(
- IResultSet(self.dependencies).values(
- ArchiveDependency.dependencyID))
+ deps = expand_dependencies(
+ self, distro_arch_series.distroseries, pocket, component,
+ source_package_name)
+ archive_clause = Or([And(
+ BinaryPackagePublishingHistory.archiveID == archive.id,
+ BinaryPackagePublishingHistory.pocket == pocket,
+ Component.name.is_in(components))
+ for (archive, pocket, components) in deps])
store = ISlaveStore(BinaryPackagePublishingHistory)
- candidate = store.find(
+ return store.find(
BinaryPackagePublishingHistory,
- BinaryPackageName.name == name,
+ BinaryPackageName.name == dep_name,
BinaryPackageRelease.binarypackagename == BinaryPackageName.id,
BinaryPackagePublishingHistory.binarypackagerelease ==
BinaryPackageRelease.id,
BinaryPackagePublishingHistory.distroarchseries ==
- distroarchseries,
- In(BinaryPackagePublishingHistory.archiveID, archives),
+ distro_arch_series,
BinaryPackagePublishingHistory.status ==
- PackagePublishingStatus.PUBLISHED
- ).order_by(Desc(BinaryPackagePublishingHistory.id))
- return candidate.first()
+ PackagePublishingStatus.PUBLISHED,
+ BinaryPackagePublishingHistory.componentID == Component.id,
+ archive_clause).order_by(
+ Desc(BinaryPackagePublishingHistory.id))
def getArchiveDependency(self, dependency):
"""See `IArchive`."""
=== modified file 'lib/lp/soyuz/model/binarypackagebuild.py'
--- lib/lp/soyuz/model/binarypackagebuild.py 2010-06-15 11:11:27 +0000
+++ lib/lp/soyuz/model/binarypackagebuild.py 2010-07-20 19:16:36 +0000
@@ -12,6 +12,7 @@
import operator
from storm.locals import Int, Reference
+from storm.store import EmptyResultSet
from zope.interface import implements
from zope.component import getUtility
@@ -50,7 +51,6 @@
from lp.buildmaster.model.packagebuild import (
PackageBuild, PackageBuildDerived)
from lp.services.job.model.job import Job
-from lp.soyuz.adapters.archivedependencies import get_components_for_building
from lp.soyuz.interfaces.archive import ArchivePurpose
from lp.soyuz.interfaces.binarypackagebuild import (
BuildSetStatus, CannotBeRescored, IBinaryPackageBuild,
@@ -387,33 +387,25 @@
def _isDependencySatisfied(self, token):
"""Check if the given dependency token is satisfied.
- Check if the dependency exists, if its version constraint is
- satisfied and if it is reachable in the build context.
+ Check if the dependency exists and that its version constraint is
+ satisfied.
"""
name, version, relation = self._parseDependencyToken(token)
- dep_candidate = self.archive.findDepCandidateByName(
- self.distro_arch_series, name)
-
- if not dep_candidate:
- return False
-
- if not self._checkDependencyVersion(
- dep_candidate.binarypackagerelease.version, version, relation):
- return False
-
- # Only PRIMARY archive build dependencies should be restricted
- # to the ogre_components. Both PARTNER and PPA can reach
- # dependencies from all components in the PRIMARY archive.
- # Moreover, PARTNER and PPA component domain is single, i.e,
- # PARTNER only contains packages in 'partner' component and PPAs
- # only contains packages in 'main' component.
- ogre_components = get_components_for_building(self)
- if (self.archive.purpose == ArchivePurpose.PRIMARY and
- dep_candidate.component.name not in ogre_components):
- return False
-
- return True
+ # There may be several published versions in the available
+ # archives and pockets. If any one of them satisifies our
+ # constraints, the dependency is satisfied.
+ dep_candidates = self.archive.findDepCandidates(
+ self.distro_arch_series, self.pocket, self.current_component,
+ self.source_package_release.sourcepackagename.name, name)
+
+ for dep_candidate in dep_candidates:
+ if self._checkDependencyVersion(
+ dep_candidate.binarypackagerelease.version, version,
+ relation):
+ return True
+
+ return False
def _toAptFormat(self, token):
"""Rebuild dependencies line in apt format."""
@@ -891,10 +883,7 @@
"""See `IBinaryPackageBuildSet`."""
# If not distroarchseries was found return empty list
if not arch_ids:
- # XXX cprov 2006-09-08: returning and empty SelectResult to make
- # the callsites happy as bjorn suggested. However it would be
- # much clearer if we have something like SQLBase.empty() for this
- return BinaryPackageBuild.select("2=1")
+ return EmptyResultSet()
clauseTables = []
=== modified file 'lib/lp/soyuz/model/publishing.py'
--- lib/lp/soyuz/model/publishing.py 2010-07-15 14:20:49 +0000
+++ lib/lp/soyuz/model/publishing.py 2010-07-20 19:16:36 +0000
@@ -83,12 +83,6 @@
def publish(self, diskpool, log):
"""See IFilePublishing."""
- # DDEB publication for PPAs is temporarily disabled, see bug #399444.
- if (self.archive.is_ppa and
- self.libraryfilealiasfilename.endswith('.ddeb')):
- log.debug('Skipping DDEB disk publication.')
- return
-
# XXX cprov 2006-06-12 bug=49510: The encode should not be needed
# when retrieving data from DB.
source = self.sourcepackagename.encode('utf-8')
=== modified file 'lib/lp/soyuz/stories/ppa/xx-ppa-workflow.txt'
--- lib/lp/soyuz/stories/ppa/xx-ppa-workflow.txt 2010-06-16 21:49:38 +0000
+++ lib/lp/soyuz/stories/ppa/xx-ppa-workflow.txt 2010-07-20 19:16:36 +0000
@@ -402,25 +402,13 @@
'deb not_a_url' is not a complete and valid sources.list entry
-When the archive is private, the buildd secret must also be set, or an
-error is issued:
-
- >>> admin_browser.getControl(
- ... name="field.external_dependencies").value = ""
- >>> admin_browser.getControl(name="field.private").value = True
- >>> admin_browser.getControl(name="field.buildd_secret").value = ""
- >>> admin_browser.getControl("Save").click()
-
- >>> for error in get_feedback_messages(admin_browser.contents):
- ... print error
- There is 1 error.
- Required for private archives.
-
-Conversely, setting the buildd secret for non-private archives also generates
+Setting the buildd secret for non-private archives also generates
an error. Because the "commercial" flag is also currently set, removing
privacy will also trigger a validation error because the commercial flag can
only be set on private archives:
+ >>> admin_browser.getControl(
+ ... name="field.external_dependencies").value = ""
>>> admin_browser.getControl(name="field.private").value = False
>>> admin_browser.getControl(name="field.buildd_secret").value = "secret"
>>> admin_browser.getControl("Save").click()
=== modified file 'lib/lp/soyuz/tests/test_archive.py'
--- lib/lp/soyuz/tests/test_archive.py 2010-07-12 13:32:53 +0000
+++ lib/lp/soyuz/tests/test_archive.py 2010-07-20 19:16:36 +0000
@@ -4,9 +4,10 @@
"""Test Archive features."""
from datetime import date, datetime, timedelta
+import unittest
+
import pytz
-import unittest
-
+import transaction
from zope.component import getUtility
from zope.security.interfaces import Unauthorized
from zope.security.proxy import removeSecurityProxy
@@ -1099,7 +1100,141 @@
login("commercial-member@xxxxxxxxxxxxx")
self.setCommercial(self.archive, True)
self.assertTrue(self.archive.commercial)
-
+
+
+class TestFindDepCandidates(TestCaseWithFactory):
+ """Tests for Archive.findDepCandidates."""
+
+ layer = LaunchpadZopelessLayer
+
+ def setUp(self):
+ super(TestFindDepCandidates, self).setUp()
+ self.archive = self.factory.makeArchive()
+ self.publisher = SoyuzTestPublisher()
+ login('admin@xxxxxxxxxxxxx')
+ self.publisher.prepareBreezyAutotest()
+
+ def assertDep(self, arch_tag, name, expected, archive=None,
+ pocket=PackagePublishingPocket.RELEASE, component=None,
+ source_package_name='something-new'):
+ """Helper to check that findDepCandidates works.
+
+ Searches for the given dependency name in the given architecture and
+ archive, and compares it to the given expected value.
+ The archive defaults to self.archive.
+
+ Also commits, since findDepCandidates uses the slave store.
+ """
+ transaction.commit()
+
+ if component is None:
+ component = getUtility(IComponentSet)['main']
+ if archive is None:
+ archive = self.archive
+
+ self.assertEquals(
+ list(
+ archive.findDepCandidates(
+ self.publisher.distroseries[arch_tag], pocket, component,
+ source_package_name, name)),
+ expected)
+
+ def test_finds_candidate_in_same_archive(self):
+ # A published candidate in the same archive should be found.
+ bins = self.publisher.getPubBinaries(
+ binaryname='foo', archive=self.archive,
+ status=PackagePublishingStatus.PUBLISHED)
+ self.assertDep('i386', 'foo', [bins[0]])
+ self.assertDep('hppa', 'foo', [bins[1]])
+
+ def test_does_not_find_pending_publication(self):
+ # A pending candidate in the same archive should not be found.
+ bins = self.publisher.getPubBinaries(
+ binaryname='foo', archive=self.archive)
+ self.assertDep('i386', 'foo', [])
+
+ def test_ppa_searches_primary_archive(self):
+ # PPA searches implicitly look in the primary archive too.
+ self.assertEquals(self.archive.purpose, ArchivePurpose.PPA)
+ self.assertDep('i386', 'foo', [])
+
+ bins = self.publisher.getPubBinaries(
+ binaryname='foo', archive=self.archive.distribution.main_archive,
+ status=PackagePublishingStatus.PUBLISHED)
+
+ self.assertDep('i386', 'foo', [bins[0]])
+
+ def test_searches_dependencies(self):
+ # Candidates from archives on which the target explicitly depends
+ # should be found.
+ bins = self.publisher.getPubBinaries(
+ binaryname='foo', archive=self.archive,
+ status=PackagePublishingStatus.PUBLISHED)
+ other_archive = self.factory.makeArchive()
+ self.assertDep('i386', 'foo', [], archive=other_archive)
+
+ other_archive.addArchiveDependency(
+ self.archive, PackagePublishingPocket.RELEASE)
+ self.assertDep('i386', 'foo', [bins[0]], archive=other_archive)
+
+ def test_obeys_dependency_pockets(self):
+ # Only packages published in a pocket matching the dependency should
+ # be found.
+ release_bins = self.publisher.getPubBinaries(
+ binaryname='foo-release', archive=self.archive,
+ status=PackagePublishingStatus.PUBLISHED)
+ updates_bins = self.publisher.getPubBinaries(
+ binaryname='foo-updates', archive=self.archive,
+ status=PackagePublishingStatus.PUBLISHED,
+ pocket=PackagePublishingPocket.UPDATES)
+ proposed_bins = self.publisher.getPubBinaries(
+ binaryname='foo-proposed', archive=self.archive,
+ status=PackagePublishingStatus.PUBLISHED,
+ pocket=PackagePublishingPocket.PROPOSED)
+
+ # Temporarily turn our test PPA into a copy archive, so we can
+ # add non-RELEASE dependencies on it.
+ removeSecurityProxy(self.archive).purpose = ArchivePurpose.COPY
+
+ other_archive = self.factory.makeArchive()
+ other_archive.addArchiveDependency(
+ self.archive, PackagePublishingPocket.UPDATES)
+ self.assertDep(
+ 'i386', 'foo-release', [release_bins[0]], archive=other_archive)
+ self.assertDep(
+ 'i386', 'foo-updates', [updates_bins[0]], archive=other_archive)
+ self.assertDep('i386', 'foo-proposed', [], archive=other_archive)
+
+ other_archive.removeArchiveDependency(self.archive)
+ other_archive.addArchiveDependency(
+ self.archive, PackagePublishingPocket.PROPOSED)
+ self.assertDep(
+ 'i386', 'foo-proposed', [proposed_bins[0]], archive=other_archive)
+
+ def test_obeys_dependency_components(self):
+ # Only packages published in a component matching the dependency
+ # should be found.
+ primary = self.archive.distribution.main_archive
+ main_bins = self.publisher.getPubBinaries(
+ binaryname='foo-main', archive=primary, component='main',
+ status=PackagePublishingStatus.PUBLISHED)
+ universe_bins = self.publisher.getPubBinaries(
+ binaryname='foo-universe', archive=primary,
+ component='universe',
+ status=PackagePublishingStatus.PUBLISHED)
+
+ self.archive.addArchiveDependency(
+ primary, PackagePublishingPocket.RELEASE,
+ component=getUtility(IComponentSet)['main'])
+ self.assertDep('i386', 'foo-main', [main_bins[0]])
+ self.assertDep('i386', 'foo-universe', [])
+
+ self.archive.removeArchiveDependency(primary)
+ self.archive.addArchiveDependency(
+ primary, PackagePublishingPocket.RELEASE,
+ component=getUtility(IComponentSet)['universe'])
+ self.assertDep('i386', 'foo-main', [main_bins[0]])
+ self.assertDep('i386', 'foo-universe', [universe_bins[0]])
def test_suite():
=== modified file 'lib/lp/soyuz/tests/test_binarypackagebuild.py'
--- lib/lp/soyuz/tests/test_binarypackagebuild.py 2010-06-21 07:26:51 +0000
+++ lib/lp/soyuz/tests/test_binarypackagebuild.py 2010-07-20 19:16:36 +0000
@@ -164,13 +164,13 @@
Return an `IBinaryPackageBuild` in MANUALDEWAIT state and depending on a
binary that exists and is reachable.
"""
- test_publisher = SoyuzTestPublisher()
- test_publisher.prepareBreezyAutotest()
+ self.publisher = SoyuzTestPublisher()
+ self.publisher.prepareBreezyAutotest()
- depwait_source = test_publisher.getPubSource(
+ depwait_source = self.publisher.getPubSource(
sourcename='depwait-source')
- test_publisher.getPubBinaries(
+ self.publisher.getPubBinaries(
binaryname='dep-bin',
status=PackagePublishingStatus.PUBLISHED)
@@ -273,6 +273,41 @@
depwait_build.updateDependencies()
self.assertEquals(depwait_build.dependencies, '')
+ def testVersionedDependencies(self):
+ # `IBinaryPackageBuild.updateDependencies` supports versioned
+ # dependencies. A build will not be retried unless the candidate
+ # complies with the version restriction.
+ # In this case, dep-bin 666 is available. >> 666 isn't
+ # satisified, but >= 666 is.
+ depwait_build = self._setupSimpleDepwaitContext()
+ self.layer.txn.commit()
+
+ depwait_build.dependencies = u'dep-bin (>> 666)'
+ depwait_build.updateDependencies()
+ self.assertEquals(depwait_build.dependencies, u'dep-bin (>> 666)')
+ depwait_build.dependencies = u'dep-bin (>= 666)'
+ depwait_build.updateDependencies()
+ self.assertEquals(depwait_build.dependencies, u'')
+
+ def testVersionedDependencyOnOldPublication(self):
+ # `IBinaryPackageBuild.updateDependencies` doesn't just consider
+ # the latest publication. There may be older publications which
+ # satisfy the version constraints (in other archives or pockets).
+ # In this case, dep-bin 666 and 999 are available, so both = 666
+ # and = 999 are satisfied.
+ depwait_build = self._setupSimpleDepwaitContext()
+ self.publisher.getPubBinaries(
+ binaryname='dep-bin', version='999',
+ status=PackagePublishingStatus.PUBLISHED)
+ self.layer.txn.commit()
+
+ depwait_build.dependencies = u'dep-bin (= 666)'
+ depwait_build.updateDependencies()
+ self.assertEquals(depwait_build.dependencies, u'')
+ depwait_build.dependencies = u'dep-bin (= 999)'
+ depwait_build.updateDependencies()
+ self.assertEquals(depwait_build.dependencies, u'')
+
class BaseTestCaseWithThreeBuilds(TestCaseWithFactory):
=== modified file 'lib/lp/soyuz/tests/test_publishing.py'
--- lib/lp/soyuz/tests/test_publishing.py 2010-07-15 14:19:40 +0000
+++ lib/lp/soyuz/tests/test_publishing.py 2010-07-20 19:16:36 +0000
@@ -267,7 +267,8 @@
pub_source=None,
version='666',
architecturespecific=False,
- builder=None):
+ builder=None,
+ component='main'):
"""Return a list of binary publishing records."""
if distroseries is None:
distroseries = self.distroseries
@@ -285,7 +286,8 @@
pub_source = self.getPubSource(
sourcename=sourcename, status=status, pocket=pocket,
archive=archive, distroseries=distroseries,
- version=version, architecturehintlist=architecturehintlist)
+ version=version, architecturehintlist=architecturehintlist,
+ component=component)
else:
archive = pub_source.archive
@@ -579,24 +581,6 @@
pool_path = "%s/main/f/foo/foo-bin_666_all.deb" % self.pool_dir
self.assertEqual(open(pool_path).read().strip(), 'Hello world')
- def test_publish_ddeb_for_ppas(self):
- # DDEB publications in PPAs result in a PUBLISHED publishing record
- # but the corresponding files are *not* dumped in the disk pool/.
- cprov = getUtility(IPersonSet).getByName('cprov')
- pub_binary = self.getPubBinaries(
- filecontent='Hello world', format=BinaryPackageFormat.DDEB,
- archive=cprov.archive)[0]
-
- # Publication happens in the database domain.
- pub_binary.publish(self.disk_pool, self.logger)
- self.assertEqual(
- PackagePublishingStatus.PUBLISHED,
- pub_binary.status)
-
- # But the DDEB isn't dumped to the repository pool/.
- pool_path = "%s/main/f/foo/foo-bin_666_all.ddeb" % self.pool_dir
- self.assertFalse(os.path.exists(pool_path))
-
def testPublishingOverwriteFileInPool(self):
"""Test if publishOne refuses to overwrite a file in pool.
=== modified file 'lib/lp/translations/browser/potemplate.py'
--- lib/lp/translations/browser/potemplate.py 2010-06-03 16:08:33 +0000
+++ lib/lp/translations/browser/potemplate.py 2010-07-20 19:16:36 +0000
@@ -806,6 +806,8 @@
productseries = None
label = "Translation templates"
page_title = "All templates"
+ can_edit = None
+ can_admin = None
def initialize(self, series, is_distroseries=True):
self.is_distroseries = is_distroseries
@@ -813,6 +815,10 @@
self.distroseries = series
else:
self.productseries = series
+ self.can_admin = check_permission(
+ 'launchpad.TranslationsAdmin', series)
+ self.can_edit = (
+ self.can_admin or check_permission('launchpad.Edit', series))
def iter_templates(self):
potemplateset = getUtility(IPOTemplateSet)
@@ -826,10 +832,3 @@
return "active-template"
else:
return "inactive-template"
-
- def isVisible(self, template):
- if (template.iscurrent or
- check_permission('launchpad.Edit', template)):
- return True
- else:
- return False
=== modified file 'lib/lp/translations/model/potemplate.py'
--- lib/lp/translations/model/potemplate.py 2010-07-16 14:41:10 +0000
+++ lib/lp/translations/model/potemplate.py 2010-07-20 19:16:36 +0000
@@ -461,7 +461,7 @@
"POFile.potemplate = %d AND "
"POFile.variant IS NULL" % self.id,
clauseTables=['POFile', 'Language'],
- distinct=True
+ distinct=True,
)
def getPOFileByPath(self, path):
=== modified file 'lib/lp/translations/scripts/validate_translations_file.py'
--- lib/lp/translations/scripts/validate_translations_file.py 2010-01-06 06:04:54 +0000
+++ lib/lp/translations/scripts/validate_translations_file.py 2010-07-20 19:16:36 +0000
@@ -15,8 +15,9 @@
from canonical.launchpad import scripts
from lp.translations.utilities.gettext_po_parser import POParser
+from lp.translations.utilities.mozilla_dtd_parser import DtdFile
from lp.translations.utilities.mozilla_xpi_importer import (
- DtdFile, MozillaZipImportParser)
+ MozillaZipImportParser)
from lp.translations.utilities.xpi_manifest import XpiManifest
=== modified file 'lib/lp/translations/stories/standalone/xx-series-templates.txt'
--- lib/lp/translations/stories/standalone/xx-series-templates.txt 2010-02-16 13:26:21 +0000
+++ lib/lp/translations/stories/standalone/xx-series-templates.txt 2010-07-20 19:16:36 +0000
@@ -38,9 +38,9 @@
>>> table = find_tag_by_id(anon_browser.contents, 'templates_table')
>>> print extract_text(table)
- Priority Source package Template name Length Languages Updated
- 100 evolution evolution-2.2 22 2 2005-05-06
- 0 evolution man 1 1 2006-08-14 ...
+ Priority Source package Template name Length Updated
+ 100 evolution evolution-2.2 22 2005-05-06
+ 0 evolution man 1 2006-08-14 ...
Logged-in users see a link to all the active translation templates
on a distribution series translation page.
@@ -67,10 +67,10 @@
>>> table = find_tag_by_id(admin_browser.contents, 'templates_table')
>>> print extract_text(table)
- Priority Source package ... Updated Actions
- 0 evolution ... 2007-01-05 Edit Upload Download Administer
+ Priority Source package ... Updated Actions
+ 0 evolution ... 2007-01-05 Edit Upload Download Administer
100 ...
- 0 pmount ... 2005-05-06 Edit Upload Download Administer
+ 0 pmount ... 2005-05-06 Edit Upload Download Administer
Translation group owners can administer all templates for their distribution,
including those that are currently disabled.
@@ -84,30 +84,38 @@
>>> utc_browser = setupDTCBrowser()
>>> utc_browser.open(
... 'http://translations.launchpad.dev/ubuntu/hoary/+templates')
- >>> utc_browser.getLink(url='ubuntu/hoary/+source/evolution/+pots/evolution-2.2/+edit').click()
+ >>> utc_browser.getLink(
+ ... url='ubuntu/hoary/+source/evolution/'
+ ... '+pots/evolution-2.2/+edit').click()
>>> print utc_browser.url
- http://translations.launchpad.dev/ubuntu/hoary/+source/evolution/+pots/evolution-2.2/+edit
+ http://.../ubuntu/hoary/+source/evolution/+pots/evolution-2.2/+edit
>>> utc_browser.open(
... 'http://translations.launchpad.dev/ubuntu/hoary/+templates')
- >>> utc_browser.getLink(url='/ubuntu/hoary/+source/evolution/+pots/evolution-2.2/+admin').click()
+ >>> utc_browser.getLink(
+ ... url='/ubuntu/hoary/+source/evolution/'
+ ... '+pots/evolution-2.2/+admin').click()
>>> print utc_browser.url
- http://translations.launchpad.dev/ubuntu/hoary/+source/evolution/+pots/evolution-2.2/+admin
+ http://.../ubuntu/hoary/+source/evolution/+pots/evolution-2.2/+admin
Trying to edit/administer disabled templates brings them to the
appropriate page.
>>> utc_browser.open(
... 'http://translations.launchpad.dev/ubuntu/hoary/+templates')
- >>> utc_browser.getLink(url='ubuntu/hoary/+source/evolution/+pots/disabled-template/+edit').click()
+ >>> utc_browser.getLink(
+ ... url='ubuntu/hoary/+source/evolution/'
+ ... '+pots/disabled-template/+edit').click()
>>> print utc_browser.url
- http://translations.launchpad.dev/ubuntu/hoary/+source/evolution/+pots/disabled-template/+edit
+ http://.../ubuntu/hoary/+source/evolution/+pots/disabled-template/+edit
>>> utc_browser.open(
... 'http://translations.launchpad.dev/ubuntu/hoary/+templates')
- >>> utc_browser.getLink(url='/ubuntu/hoary/+source/evolution/+pots/disabled-template/+admin').click()
+ >>> utc_browser.getLink(
+ ... url='/ubuntu/hoary/+source/evolution/'
+ ... '+pots/disabled-template/+admin').click()
>>> print utc_browser.url
- http://translations.launchpad.dev/ubuntu/hoary/+source/evolution/+pots/disabled-template/+admin
+ http://.../ubuntu/hoary/+source/evolution/+pots/disabled-template/+admin
Links to the templates
@@ -174,10 +182,10 @@
>>> table = find_tag_by_id(user_browser.contents, 'templates_table')
>>> print extract_text(table)
- Priority Template name Length Languages Updated Actions
- 0 at-the-top 0 0 ... Download
- 0 evolution-2.2 22 1 2005-08-25 Download
- 0 evolutio... 8 1 2006-12-13 Download
+ Priority Template name Length Updated Actions
+ 0 at-the-top 0 ... Download
+ 0 evolution-2.2 22 2005-08-25 Download
+ 0 evolutio... 8 2006-12-13 Download
If an administrator views this page, links to the templates admin page are
shown, too.
@@ -186,10 +194,10 @@
... 'http://translations.launchpad.dev/evolution/trunk/+templates')
>>> table = find_tag_by_id(admin_browser.contents, 'templates_table')
>>> print extract_text(table)
- Priority Template name ... Updated Actions
- 0 at-the-top ... ... Edit Upload Download Administer
- 0 evolution-2.2 ... 2005-08-25 Edit Upload Download Administer
- 0 evolutio... ... 2006-12-13 Edit Upload Download Administer
+ Priority Template name ... Updated Actions
+ 0 at-the-top ... ... Edit Upload Download Administer
+ 0 evolution-2.2 ... 2005-08-25 Edit Upload Download Administer
+ 0 evolutio... ... 2006-12-13 Edit Upload Download Administer
Links to the templates
=== modified file 'lib/lp/translations/templates/object-templates.pt'
--- lib/lp/translations/templates/object-templates.pt 2010-02-15 16:00:01 +0000
+++ lib/lp/translations/templates/object-templates.pt 2010-07-20 19:16:36 +0000
@@ -5,7 +5,7 @@
metal:use-macro="view/macro:page/main_only"
>
<body>
- <div metal:fill-slot="head_epilogue">
+ <tal:head_epilogue metal:fill-slot="head_epilogue">
<style type="text/css">
.inactive_links a{
background: none;
@@ -53,7 +53,7 @@
});
});
</script>
- </div>
+ </tal:head_epilogue>
<div metal:fill-slot="main">
<div class="translation-help-links">
<a href="https://help.launchpad.net/Translations"
@@ -79,7 +79,6 @@
class="sourcepackage_column">Source package</th>
<th class="template_column">Template name</th>
<th class="length_column">Length</th>
- <th class="languages_column">Languages</th>
<th class="lastupdate_column">Updated</th>
<th class="actions_column"
tal:condition="context/required:launchpad.AnyPerson">
@@ -89,8 +88,9 @@
<tbody>
<tal:templates repeat="template view/iter_templates">
<tr tal:define="
- inactive_css_class python:view.rowCSSClass(template)"
- tal:condition="python: view.isVisible(template)"
+ inactive_css_class python:view.rowCSSClass(template);
+ template_url template/fmt:url"
+ tal:condition="python: view.can_edit or template.iscurrent"
tal:attributes="
class string: template_row ${inactive_css_class}">
<td class="priority_column"
@@ -100,7 +100,7 @@
class="sourcepackage_column">Source package
</td>
<td class="template_column">
- <a tal:attributes="href template/fmt:url">
+ <a tal:attributes="href template_url">
<span tal:content="template/name">Template name</span>
</a>
<tal:inactive condition="not: template/iscurrent">
@@ -108,9 +108,7 @@
</tal:inactive>
</td>
<td class="length_column"
- tal:content="template/getPOTMsgSetsCount">1777</td>
- <td class="languages_column"
- tal:content="template/language_count">777</td>
+ tal:content="template/messagecount">1777</td>
<td class="lastupdate_column">
<span class="sortkey"
tal:condition="template/date_last_updated"
@@ -130,18 +128,21 @@
<td class="actions_column"
tal:condition="context/required:launchpad.AnyPerson">
<div class="template_links">
- <a tal:replace="
- structure template/menu:translations/edit/fmt:link
- " />
- <a tal:replace="
- structure template/menu:translations/upload/fmt:link
- " />
- <a tal:replace="
- structure template/menu:translations/download/fmt:link
- " />
- <a tal:replace="
- structure template/menu:translations/administer/fmt:link
- " />
+ <a class="sprite edit"
+ tal:condition="view/can_edit"
+ tal:attributes="
+ href string: ${template_url}/+edit">Edit</a>
+ <a class="sprite add"
+ tal:condition="view/can_edit"
+ tal:attributes="
+ href string: ${template_url}/+upload">Upload</a>
+ <a class="sprite download"
+ tal:attributes="
+ href string: ${template_url}/+export">Download</a>
+ <a class="sprite edit"
+ tal:condition="view/can_admin"
+ tal:attributes="
+ href string: ${template_url}/+admin">Administer</a>
</div>
</td>
</tr>
=== modified file 'lib/lp/translations/utilities/gettext_po_importer.py'
--- lib/lp/translations/utilities/gettext_po_importer.py 2009-07-17 00:26:05 +0000
+++ lib/lp/translations/utilities/gettext_po_importer.py 2010-07-20 19:16:36 +0000
@@ -1,19 +1,19 @@
-# Copyright 2009 Canonical Ltd. This software is licensed under the
+# Copyright 2009-2010 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
__metaclass__ = type
__all__ = [
- 'GettextPOImporter'
+ 'GettextPOImporter',
]
from zope.component import getUtility
from zope.interface import implements
+from lp.translations.interfaces.translationfileformat import (
+ TranslationFileFormat)
from lp.translations.interfaces.translationimporter import (
ITranslationFormatImporter)
-from lp.translations.interfaces.translationfileformat import (
- TranslationFileFormat)
from lp.translations.utilities.gettext_po_parser import (
POParser, POHeader)
from canonical.librarian.interfaces import ILibrarianClient
=== added file 'lib/lp/translations/utilities/mozilla_dtd_parser.py'
--- lib/lp/translations/utilities/mozilla_dtd_parser.py 1970-01-01 00:00:00 +0000
+++ lib/lp/translations/utilities/mozilla_dtd_parser.py 2010-07-20 19:16:36 +0000
@@ -0,0 +1,144 @@
+# Copyright 2010 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Importer for DTD files as found in XPI archives."""
+
+__metaclass__ = type
+__all__ = [
+ 'DtdFile'
+ ]
+
+from old_xmlplus.parsers.xmlproc import dtdparser, xmldtd, utils
+
+from lp.translations.interfaces.translationimporter import (
+ TranslationFormatInvalidInputError,
+ TranslationFormatSyntaxError)
+from lp.translations.utilities.translation_common_format import (
+ TranslationMessageData)
+from lp.translations.interfaces.translations import TranslationConstants
+
+
+class MozillaDtdConsumer(xmldtd.WFCDTD):
+ """Mozilla DTD translatable message parser.
+
+ msgids are stored as entities. This class extracts it along
+ with translations, comments and source references.
+ """
+ def __init__(self, parser, filename, chrome_path, messages):
+ self.started = False
+ self.last_comment = None
+ self.chrome_path = chrome_path
+ self.messages = messages
+ self.filename = filename
+ xmldtd.WFCDTD.__init__(self, parser)
+
+ def dtd_start(self):
+ """See `xmldtd.WFCDTD`."""
+ self.started = True
+
+ def dtd_end(self):
+ """See `xmldtd.WFCDTD`."""
+ self.started = False
+
+ def handle_comment(self, contents):
+ """See `xmldtd.WFCDTD`."""
+ if not self.started:
+ return
+
+ if self.last_comment is not None:
+ self.last_comment += contents
+ elif len(contents) > 0:
+ self.last_comment = contents
+
+ if self.last_comment and not self.last_comment.endswith('\n'):
+ # Comments must end always with a new line.
+ self.last_comment += '\n'
+
+ def new_general_entity(self, name, value):
+ """See `xmldtd.WFCDTD`."""
+ if not self.started:
+ return
+
+ message = TranslationMessageData()
+ message.msgid_singular = name
+ # CarlosPerelloMarin 20070326: xmldtd parser does an inline
+ # parsing which means that the content is all in a single line so we
+ # don't have a way to show the line number with the source reference.
+ message.file_references_list = ["%s(%s)" % (self.filename, name)]
+ message.addTranslation(TranslationConstants.SINGULAR_FORM, value)
+ message.singular_text = value
+ message.context = self.chrome_path
+ message.source_comment = self.last_comment
+ self.messages.append(message)
+ self.started += 1
+ self.last_comment = None
+
+
+class DtdErrorHandler(utils.ErrorCounter):
+ """Error handler for the DTD parser."""
+ filename = None
+
+ def error(self, msg):
+ raise TranslationFormatSyntaxError(
+ filename=self.filename, message=msg)
+
+ def fatal(self, msg):
+ raise TranslationFormatInvalidInputError(
+ filename=self.filename, message=msg)
+
+
+class DummyDtdFile:
+ """"File" returned when DTD SYSTEM entity tries to include a file."""
+ done = False
+
+ def read(self, *args, **kwargs):
+ """Minimally satisfy attempt to read an included DTD file."""
+ if self.done:
+ return ''
+ else:
+ self.done = True
+ return '<!-- SYSTEM entities not supported. -->'
+
+ def close(self):
+ """Satisfy attempt to close file."""
+ pass
+
+
+class DtdInputSourceFactoryStub:
+ """Replace the class the DTD parser uses to include other DTD files."""
+
+ def create_input_source(self, sysid):
+ """Minimally satisfy attempt to open an included DTD file.
+
+ This is called when the DTD parser hits a SYSTEM entity.
+ """
+ return DummyDtdFile()
+
+
+class DtdFile:
+ """Class for reading translatable messages from a .dtd file.
+
+ It uses DTDParser which fills self.messages with parsed messages.
+ """
+ def __init__(self, filename, chrome_path, content):
+ self.messages = []
+ self.filename = filename
+ self.chrome_path = chrome_path
+
+ # .dtd files are supposed to be using UTF-8 encoding, if the file is
+ # using another encoding, it's against the standard so we reject it
+ try:
+ content = content.decode('utf-8')
+ except UnicodeDecodeError:
+ raise TranslationFormatInvalidInputError, (
+ 'Content is not valid UTF-8 text')
+
+ error_handler = DtdErrorHandler()
+ error_handler.filename = filename
+
+ parser = dtdparser.DTDParser()
+ parser.set_error_handler(error_handler)
+ parser.set_inputsource_factory(DtdInputSourceFactoryStub())
+ dtd = MozillaDtdConsumer(parser, filename, chrome_path, self.messages)
+ parser.set_dtd_consumer(dtd)
+ parser.parse_string(content)
=== modified file 'lib/lp/translations/utilities/mozilla_xpi_importer.py'
--- lib/lp/translations/utilities/mozilla_xpi_importer.py 2010-01-05 13:44:13 +0000
+++ lib/lp/translations/utilities/mozilla_xpi_importer.py 2010-07-20 19:16:36 +0000
@@ -4,7 +4,6 @@
__metaclass__ = type
__all__ = [
- 'DtdFile',
'MozillaXpiImporter',
'MozillaZipImportParser',
]
@@ -12,8 +11,6 @@
from cStringIO import StringIO
import textwrap
-from old_xmlplus.parsers.xmlproc import dtdparser, xmldtd, utils
-
from zope.component import getUtility
from zope.interface import implements
@@ -27,13 +24,13 @@
from lp.translations.utilities.translation_common_format import (
TranslationFileData,
TranslationMessageData)
+from lp.translations.utilities.mozilla_dtd_parser import DtdFile
from lp.translations.utilities.mozilla_zip import (
MozillaZipTraversal)
from lp.translations.utilities.xpi_header import XpiHeader
from canonical.librarian.interfaces import ILibrarianClient
-
def add_source_comment(message, comment):
"""Add the given comment inside message.source_comment."""
if message.source_comment:
@@ -160,130 +157,6 @@
self.messages.append(message)
-class MozillaDtdConsumer(xmldtd.WFCDTD):
- """Mozilla DTD translatable message parser.
-
- msgids are stored as entities. This class extracts it along
- with translations, comments and source references.
- """
- def __init__(self, parser, filename, chrome_path, messages):
- self.started = False
- self.last_comment = None
- self.chrome_path = chrome_path
- self.messages = messages
- self.filename = filename
- xmldtd.WFCDTD.__init__(self, parser)
-
- def dtd_start(self):
- """See `xmldtd.WFCDTD`."""
- self.started = True
-
- def dtd_end(self):
- """See `xmldtd.WFCDTD`."""
- self.started = False
-
- def handle_comment(self, contents):
- """See `xmldtd.WFCDTD`."""
- if not self.started:
- return
-
- if self.last_comment is not None:
- self.last_comment += contents
- elif len(contents) > 0:
- self.last_comment = contents
-
- if self.last_comment and not self.last_comment.endswith('\n'):
- # Comments must end always with a new line.
- self.last_comment += '\n'
-
- def new_general_entity(self, name, value):
- """See `xmldtd.WFCDTD`."""
- if not self.started:
- return
-
- message = TranslationMessageData()
- message.msgid_singular = name
- # CarlosPerelloMarin 20070326: xmldtd parser does an inline
- # parsing which means that the content is all in a single line so we
- # don't have a way to show the line number with the source reference.
- message.file_references_list = ["%s(%s)" % (self.filename, name)]
- message.addTranslation(TranslationConstants.SINGULAR_FORM, value)
- message.singular_text = value
- message.context = self.chrome_path
- message.source_comment = self.last_comment
- self.messages.append(message)
- self.started += 1
- self.last_comment = None
-
-
-class DtdErrorHandler(utils.ErrorCounter):
- """Error handler for the DTD parser."""
- filename = None
-
- def error(self, msg):
- raise TranslationFormatSyntaxError(
- filename=self.filename, message=msg)
-
- def fatal(self, msg):
- raise TranslationFormatInvalidInputError(
- filename=self.filename, message=msg)
-
-
-class DummyDtdFile:
- """"File" returned when DTD SYSTEM entity tries to include a file."""
- done = False
-
- def read(self, *args, **kwargs):
- """Minimally satisfy attempt to read an included DTD file."""
- if self.done:
- return ''
- else:
- self.done = True
- return '<!-- SYSTEM entities not supported. -->'
-
- def close(self):
- """Satisfy attempt to close file."""
- pass
-
-
-class DtdInputSourceFactoryStub:
- """Replace the class the DTD parser uses to include other DTD files."""
-
- def create_input_source(self, sysid):
- """Minimally satisfy attempt to open an included DTD file.
-
- This is called when the DTD parser hits a SYSTEM entity.
- """
- return DummyDtdFile()
-
-
-class DtdFile:
- """Class for reading translatable messages from a .dtd file.
-
- It uses DTDParser which fills self.messages with parsed messages.
- """
- def __init__(self, filename, chrome_path, content):
- self.messages = []
- self.filename = filename
- self.chrome_path = chrome_path
-
- # .dtd files are supposed to be using UTF-8 encoding, if the file is
- # using another encoding, it's against the standard so we reject it
- try:
- content = content.decode('utf-8')
- except UnicodeDecodeError:
- raise TranslationFormatInvalidInputError, (
- 'Content is not valid UTF-8 text')
-
- error_handler = DtdErrorHandler()
- error_handler.filename = filename
-
- parser = dtdparser.DTDParser()
- parser.set_error_handler(error_handler)
- parser.set_inputsource_factory(DtdInputSourceFactoryStub())
- dtd = MozillaDtdConsumer(parser, filename, chrome_path, self.messages)
- parser.set_dtd_consumer(dtd)
- parser.parse_string(content)
def valid_property_msgid(msgid):
=== modified file 'lib/lp/translations/utilities/tests/test_xpi_dtd_format.py'
--- lib/lp/translations/utilities/tests/test_xpi_dtd_format.py 2009-07-17 00:26:05 +0000
+++ lib/lp/translations/utilities/tests/test_xpi_dtd_format.py 2010-07-20 19:16:36 +0000
@@ -5,7 +5,7 @@
import unittest
-from lp.translations.utilities.mozilla_xpi_importer import DtdFile
+from lp.translations.utilities.mozilla_dtd_parser import DtdFile
from lp.translations.interfaces.translationimporter import (
TranslationFormatInvalidInputError)
Follow ups