launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #01849
[Merge] lp:~stub/launchpad/update-storm into lp:launchpad
Stuart Bishop has proposed merging lp:~stub/launchpad/update-storm into lp:launchpad.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
Related bugs:
#352965 Update Storm
https://bugs.launchpad.net/bugs/352965
#388798 further precache work
https://bugs.launchpad.net/bugs/388798
#392016 Use GenerationalCache implementation from Storm 0.15
https://bugs.launchpad.net/bugs/392016
#393625 update-pkgcache using too much memory on staging
https://bugs.launchpad.net/bugs/393625
#670906 In() casts str strings differently to ==
https://bugs.launchpad.net/bugs/670906
Code changes to migrate us to modern psycopg2, such as the one packaged in Lucid. And delinting.
Rather than change the tests, I elected to cast to Unicode in the main code - fixing the tests might not be enough as other call sites might still be sending str on untested code paths.
--
https://code.launchpad.net/~stub/launchpad/update-storm/+merge/40264
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~stub/launchpad/update-storm into lp:launchpad.
=== modified file 'lib/canonical/launchpad/database/account.py'
--- lib/canonical/launchpad/database/account.py 2010-10-03 15:30:06 +0000
+++ lib/canonical/launchpad/database/account.py 2010-11-07 00:47:50 +0000
@@ -290,10 +290,12 @@
def getByEmail(self, email):
"""See `IAccountSet`."""
- conditions = [EmailAddress.account == Account.id,
- EmailAddress.email.lower() == email.lower().strip()]
store = IStore(Account)
- account = store.find(Account, *conditions).one()
+ account = store.find(
+ Account,
+ EmailAddress.account == Account.id,
+ EmailAddress.email.lower()
+ == unicode(email).strip().lower()).one()
if account is None:
raise LookupError(email)
return account
=== modified file 'lib/canonical/launchpad/database/stormsugar.py'
--- lib/canonical/launchpad/database/stormsugar.py 2010-08-20 20:31:18 +0000
+++ lib/canonical/launchpad/database/stormsugar.py 2010-11-07 00:47:50 +0000
@@ -14,16 +14,11 @@
__all__ = [
'ForeignKey',
'ObjectNotFound',
- 'StartsWith',
'Sugar',
'UnknownProperty',
]
-from storm.expr import (
- Like,
- SQLRaw,
- )
from storm.locals import (
Int,
Reference,
@@ -64,22 +59,9 @@
Reference.__init__(self, None, remote_key)
-class StartsWith(Like):
- """Allow Like matching but only at the beginning of a string.
-
- The string is properly escaped.
- """
- def __init__(self, expr, string):
- # Escape instances of !, _, and % so they don't interfere with the
- # underlying LIKE operation. Use ! as the escape character.
- string = string.replace("!", "!!") \
- .replace("_", "!_") \
- .replace("%", "!%")
- Like.__init__(self, expr, string+"%", escape=SQLRaw("'!'"))
-
-
# Use Storm.__metaclass__ because storm.properties.PropertyPublisherMeta isn't
# in an __all__.
+
class Sugary(Storm.__metaclass__):
"""Metaclass that adds support for ForeignKey."""
=== removed file 'lib/canonical/launchpad/database/tests/test_stormextensions.py'
--- lib/canonical/launchpad/database/tests/test_stormextensions.py 2010-08-20 20:31:18 +0000
+++ lib/canonical/launchpad/database/tests/test_stormextensions.py 1970-01-01 00:00:00 +0000
@@ -1,101 +0,0 @@
-# Copyright 2009 Canonical Ltd. This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-"""Tests for extensions in stormsugar, but not stormsugar proper."""
-
-__metaclass__ = type
-
-
-from unittest import TestLoader
-
-from storm.expr import Lower
-from zope.component import getUtility
-
-from canonical.launchpad.database.stormsugar import StartsWith
-from canonical.launchpad.webapp.interfaces import (
- IStoreSelector,
- MAIN_STORE,
- MASTER_FLAVOR,
- )
-from canonical.testing.layers import DatabaseFunctionalLayer
-from lp.registry.model.person import Person
-from lp.testing import TestCaseWithFactory
-
-
-class TestStormExpressions(TestCaseWithFactory):
-
- layer = DatabaseFunctionalLayer
-
- def setUp(self):
- TestCaseWithFactory.setUp(self)
- selector = getUtility(IStoreSelector)
- self.store = selector.get(MAIN_STORE, MASTER_FLAVOR)
-
- def test_StartsWith_SQLGeneration(self):
- from storm.databases.postgres import compile
-
- # Show that the SQL generated uses LIKE with the '!' as the escape
- # character.
- expr = StartsWith("name", "value")
- sql = compile(expr)
- self.assertEqual(sql, "? LIKE ? ESCAPE '!'")
-
- # Unlike Storm's Like, StartsWith does not accept a case_sensitive
- # flag.
- self.assertRaises(TypeError, StartsWith, "name", "value",
- case_sensitive=False)
-
- def test_StartsWithUse(self):
- """StartWith correctly performs searches."""
-
- person1 = self.factory.makePerson(name='aa', displayname="John Doe")
- person2 = self.factory.makePerson(name='bb', displayname="Johan Doe")
- person3 = self.factory.makePerson(name='cc', displayname="Joh%n Doe")
-
- # Successful search from the start of the name.
- expr = StartsWith(Person.displayname, 'John')
- results = self.store.find(Person, expr)
- self.assertEqual([person1], [p for p in results])
-
- # Searching for a missing pattern returns no result.
- expr = StartsWith(Person.displayname, 'John Roe')
- results = self.store.find(Person, expr)
- self.assertEqual([], [p for p in results])
-
-
- # Searching for a non-initial pattern returns no result.
- expr = StartsWith(Person.displayname, 'Roe')
- results = self.store.find(Person, expr)
- self.assertEqual([], [p for p in results])
-
- # Multiple matches are returned.
- expr = StartsWith(Person.displayname, 'Joh')
- results = self.store.find(Person, expr)
- results.order_by('name')
- self.assertEqual([person1, person2, person3], [p for p in results])
-
- # Wildcards are properly escaped. No need for quote_like or
- # equivalent.
- expr = StartsWith(Person.displayname, 'Joh%n')
- results = self.store.find(Person, expr)
- self.assertEqual([person3], [p for p in results])
-
- # Searches are case-sensitive.
- expr = StartsWith(Person.displayname, 'john')
- results = self.store.find(Person, expr)
- self.assertEqual([], [p for p in results])
-
- # Use of .lower allows case-insensitive searching.
- expr = StartsWith(Person.displayname.lower(), 'john')
- results = self.store.find(Person, expr)
- self.assertEqual([person1], [p for p in results])
-
- # Use of Lower allows case-insensitive searching.
- expr = StartsWith(Lower(Person.displayname), 'john')
- results = self.store.find(Person, expr)
- self.assertEqual([person1], [p for p in results])
-
-
-
-def test_suite():
- return TestLoader().loadTestsFromName(__name__)
=== modified file 'lib/canonical/launchpad/vocabularies/dbobjects.py'
--- lib/canonical/launchpad/vocabularies/dbobjects.py 2010-09-28 03:01:37 +0000
+++ lib/canonical/launchpad/vocabularies/dbobjects.py 2010-11-07 00:47:50 +0000
@@ -159,7 +159,7 @@
def search(self, query):
"""Search for web bug trackers."""
- query = query.lower()
+ query = unicode(query).lower()
results = IStore(self._table).find(
self._table, And(
self._filter,
=== modified file 'lib/canonical/launchpad/webapp/pgsession.py'
--- lib/canonical/launchpad/webapp/pgsession.py 2010-08-20 20:31:18 +0000
+++ lib/canonical/launchpad/webapp/pgsession.py 2010-11-07 00:47:50 +0000
@@ -95,7 +95,7 @@
def __init__(self, session_data_container, client_id):
self.session_data_container = session_data_container
- self.client_id = client_id
+ self.client_id = unicode(client_id)
self.lastAccessTime = time.time()
# Update the last access time in the db if it is out of date
@@ -105,7 +105,7 @@
WHERE client_id = ?
AND last_accessed < CURRENT_TIMESTAMP - '%d seconds'::interval
""" % (table_name, session_data_container.resolution)
- self.store.execute(query, (client_id,), noresult=True)
+ self.store.execute(query, (self.client_id,), noresult=True)
def _ensureClientId(self):
if self._have_ensured_client_id:
@@ -170,7 +170,7 @@
def __init__(self, session_data, product_id):
self.session_data = session_data
- self.product_id = product_id
+ self.product_id = unicode(product_id)
self.table_name = (
session_data.session_data_container.session_pkg_data_table_name)
self._populate()
@@ -193,12 +193,14 @@
return self._data_cache[key]
def __setitem__(self, key, value):
+ key = unicode(key)
pickled_value = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
self.session_data._ensureClientId()
- self.store.execute("SELECT set_session_pkg_data(?, ?, ?, ?)",
- (self.session_data.client_id, self.product_id,
- key, pickled_value), noresult=True)
+ self.store.execute(
+ "SELECT set_session_pkg_data(?, ?, ?, ?)",
+ (self.session_data.client_id, self.product_id, key, pickled_value),
+ noresult=True)
# Store the value in the cache too
self._data_cache[key] = value
@@ -219,8 +221,10 @@
query = """
DELETE FROM %s WHERE client_id = ? AND product_id = ? AND key = ?
""" % self.table_name
- self.store.execute(query, (self.session_data.client_id,
- self.product_id, key), noresult=True)
+ self.store.execute(
+ query,
+ (self.session_data.client_id, self.product_id, unicode(key)),
+ noresult=True)
def keys(self):
return self._data_cache.keys()
=== modified file 'lib/canonical/launchpad/webapp/vocabulary.py'
--- lib/canonical/launchpad/webapp/vocabulary.py 2010-08-20 20:31:18 +0000
+++ lib/canonical/launchpad/webapp/vocabulary.py 2010-11-07 00:47:50 +0000
@@ -351,7 +351,7 @@
def search(self, query):
"""Return terms where query is a subtring of the name."""
if query:
- clause = CONTAINSSTRING(self._table.q.name, query)
+ clause = CONTAINSSTRING(self._table.q.name, unicode(query))
if self._filter:
clause = AND(clause, self._filter)
return self._table.select(clause, orderBy=self._orderBy)
@@ -384,7 +384,7 @@
if not query:
return self.emptySelectResults()
- query = query.lower()
+ query = unicode(query).lower()
clause = CONTAINSSTRING(self._table.q.name, query)
if self._filter:
clause = AND(clause, self._filter)
=== modified file 'lib/lp/archivepublisher/ftparchive.py'
--- lib/lp/archivepublisher/ftparchive.py 2010-10-17 10:04:15 +0000
+++ lib/lp/archivepublisher/ftparchive.py 2010-11-07 00:47:50 +0000
@@ -8,7 +8,6 @@
from storm.expr import (
Desc,
- In,
Join,
)
from storm.store import EmptyResultSet
@@ -50,6 +49,7 @@
# XXX malcc 2006-09-20 : Move this somewhere useful. If generalised with
# timeout handling and stderr passthrough, could be a single method used for
# this and the similar requirement in test_on_merge.py.
+
def run_subprocess_with_logging(process_and_args, log, prefix):
"""Run a subprocess, gathering the output as it runs and logging it.
@@ -140,6 +140,7 @@
Generates file lists and configuration for apt-ftparchive, and kicks
off generation of the Sources and Releases files.
"""
+
def __init__(self, log, config, diskpool, distro, publisher):
self.log = log
self._config = config
@@ -188,7 +189,6 @@
#
# Empty Pocket Requests
#
-
def createEmptyPocketRequests(self, fullpublish=False):
"""Write out empty file lists etc for pockets.
@@ -247,7 +247,6 @@
#
# Override Generation
#
-
def getSourcesForOverrides(self, distroseries, pocket):
"""Fetch override information about all published sources.
@@ -288,6 +287,7 @@
PackagePublishingStatus.PUBLISHED)
suite = distroseries.getSuite(pocket)
+
def add_suite(result):
name, component, section = result
return (name, suite, component, section)
@@ -336,13 +336,14 @@
(BinaryPackageName.name, Component.name, Section.name,
BinaryPackagePublishingHistory.priority),
BinaryPackagePublishingHistory.archive == self.publisher.archive,
- In(BinaryPackagePublishingHistory.distroarchseriesID,
- architectures_ids),
+ BinaryPackagePublishingHistory.distroarchseriesID.is_in(
+ architectures_ids),
BinaryPackagePublishingHistory.pocket == pocket,
BinaryPackagePublishingHistory.status ==
PackagePublishingStatus.PUBLISHED)
suite = distroseries.getSuite(pocket)
+
def add_suite(result):
name, component, section, priority = result
return (name, suite, component, section, priority.title.lower())
@@ -433,12 +434,14 @@
# Process huge iterations (more than 200k records) in batches.
# See `PublishingTunableLoop`.
self.log.debug("Calculating source overrides")
+
def update_source_override(pub_details):
updateOverride(*pub_details)
process_in_batches(
source_publications, update_source_override, self.log)
self.log.debug("Calculating binary overrides")
+
def update_binary_override(pub_details):
updateOverride(*pub_details)
process_in_batches(
@@ -485,7 +488,7 @@
# Start to write the files out
ef = open(ef_override, "w")
- f = open(main_override , "w")
+ f = open(main_override, "w")
for package, priority, section in bin_overrides:
origin = "\t".join([package, "Origin", "Ubuntu"])
bugs = "\t".join([package, "Bugs",
@@ -538,7 +541,6 @@
#
# File List Generation
#
-
def getSourceFiles(self, distroseries, pocket):
"""Fetch publishing information about all published source files.
@@ -568,6 +570,7 @@
PackagePublishingStatus.PUBLISHED)
suite = distroseries.getSuite(pocket)
+
def add_suite(result):
name, filename, component = result
return (name, suite, filename, component)
@@ -605,6 +608,7 @@
PackagePublishingStatus.PUBLISHED)
suite = distroseries.getSuite(pocket)
+
def add_suite(result):
name, filename, component, architecturetag = result
architecture = 'binary-' + architecturetag
@@ -653,12 +657,14 @@
# Process huge iterations (more than 200K records) in batches.
# See `PublishingTunableLoop`.
self.log.debug("Calculating source filelist.")
+
def update_source_filelist(file_details):
updateFileList(*file_details)
process_in_batches(
sourcefiles, update_source_filelist, self.log)
self.log.debug("Calculating binary filelist.")
+
def update_binary_filelist(file_details):
updateFileList(*file_details)
process_in_batches(
@@ -673,7 +679,8 @@
series, pocket = (
self.distro.getDistroSeriesAndPocket(suite))
if (architecture != 'source' and
- not series.getDistroArchSeries(architecture[7:]).enabled):
+ not series.getDistroArchSeries(
+ architecture[7:]).enabled):
continue
self.writeFileList(architecture, file_names,
suite, component)
@@ -719,7 +726,6 @@
#
# Config Generation
#
-
def generateConfig(self, fullpublish=False):
"""Generate an APT FTPArchive configuration from the provided
config object and the paths we either know or have given to us.
=== modified file 'lib/lp/bugs/model/bug.py'
--- lib/lp/bugs/model/bug.py 2010-11-04 02:32:16 +0000
+++ lib/lp/bugs/model/bug.py 2010-11-07 00:47:50 +0000
@@ -48,7 +48,6 @@
And,
Count,
Func,
- In,
LeftJoin,
Max,
Not,
@@ -224,8 +223,8 @@
:return: A list of tuples, (tag name, open bug count).
"""
- open_statuses_condition = In(
- BugTask.status, sqlvalues(*UNRESOLVED_BUGTASK_STATUSES))
+ open_statuses_condition = BugTask.status.is_in(
+ UNRESOLVED_BUGTASK_STATUSES)
columns = [
BugTag.tag,
Count(),
@@ -422,7 +421,7 @@
"""See `IBug`."""
return Store.of(self).find(
Person,
- In(Person.id, self.user_ids_affected_with_dupes))
+ Person.id.is_in(self.user_ids_affected_with_dupes))
@property
def users_affected_count_with_dupes(self):
@@ -439,9 +438,9 @@
:param include_content: If True retrieve the content for the messages
too.
- :param include_parents: If True retrieve the object for parent messages
- too. If False the parent attribute will be *forced* to None to
- reduce database lookups.
+ :param include_parents: If True retrieve the object for parent
+ messages too. If False the parent attribute will be *forced* to
+ None to reduce database lookups.
"""
# Make all messages be 'in' the main bugtask.
inside = self.default_bugtask
@@ -451,16 +450,18 @@
to_messages = lambda rows: [row[0] for row in rows]
else:
to_messages = lambda rows: rows
+
def eager_load_owners(messages):
- # Because we may have multiple owners, we spend less time in storm
- # with very large bugs by not joining and instead querying a second
- # time. If this starts to show high db time, we can left outer join
- # instead.
+ # Because we may have multiple owners, we spend less time
+ # in storm with very large bugs by not joining and instead
+ # querying a second time. If this starts to show high db
+ # time, we can left outer join instead.
owner_ids = set(message.ownerID for message in messages)
owner_ids.discard(None)
if not owner_ids:
return
list(store.find(Person, Person.id.is_in(owner_ids)))
+
def eager_load_content(messages):
# To avoid the complexity of having multiple rows per
# message, or joining in the database (though perhaps in
@@ -480,11 +481,13 @@
cache = get_property_cache(message)
cache.text_contents = Message.chunks_text(
chunk_map[message.id])
+
def eager_load(rows, slice_info):
messages = to_messages(rows)
eager_load_owners(messages)
if include_content:
eager_load_content(messages)
+
def index_message(row, index):
# convert row to an IndexedMessage
if include_parents:
@@ -913,10 +916,13 @@
def getSubscribersForPerson(self, person):
"""See `IBug."""
+
assert person is not None
+
def cache_unsubscribed(rows):
if not rows:
self._unsubscribed_cache.add(person)
+
def cache_subscriber(row):
_, subscriber, subscription = row
if subscription.bug_id == self.id:
@@ -1911,15 +1917,17 @@
def attachments(self):
"""See `IBug`.
- This property does eager loading of the index_messages so that the API
- which wants the message_link for the attachment can answer that without
- O(N^2) overhead. As such it is moderately expensive to call (it
- currently retrieves all messages before any attachments, and does this
- when attachments is evaluated, not when the resultset is processed).
+ This property does eager loading of the index_messages so that
+ the API which wants the message_link for the attachment can
+ answer that without O(N^2) overhead. As such it is moderately
+ expensive to call (it currently retrieves all messages before
+ any attachments, and does this when attachments is evaluated,
+ not when the resultset is processed).
"""
message_to_indexed = {}
for message in self._indexed_messages(include_parents=False):
message_to_indexed[message.id] = message
+
def set_indexed_message(row):
attachment = row[0]
# row[1] - the LibraryFileAlias is now in the storm cache and
@@ -2196,7 +2204,7 @@
if bug_numbers is None or len(bug_numbers) < 1:
return EmptyResultSet()
store = IStore(Bug)
- result_set = store.find(Bug, In(Bug.id, bug_numbers))
+ result_set = store.find(Bug, Bug.id.is_in(bug_numbers))
return result_set.order_by('id')
def dangerousGetAllBugs(self):
=== modified file 'lib/lp/bugs/model/bugtask.py'
--- lib/lp/bugs/model/bugtask.py 2010-11-03 18:43:07 +0000
+++ lib/lp/bugs/model/bugtask.py 2010-11-07 00:47:50 +0000
@@ -37,7 +37,6 @@
And,
AutoTables,
Desc,
- In,
Join,
LeftJoin,
Or,
@@ -1322,6 +1321,7 @@
:seealso: get_bug_privacy_filter_with_decorator
"""
userid = user.id
+
def cache_user_can_view_bug(bugtask):
get_property_cache(bugtask.bug)._known_viewers = set([userid])
return bugtask
@@ -1495,13 +1495,14 @@
bug_ids = list(set(bugtask.bugID for bugtask in bugtasks))
bug_ids_with_specifications = set(IStore(SpecificationBug).find(
- SpecificationBug.bugID, In(SpecificationBug.bugID, bug_ids)))
+ SpecificationBug.bugID,
+ SpecificationBug.bugID.is_in(bug_ids)))
bug_ids_with_branches = set(IStore(BugBranch).find(
- BugBranch.bugID, In(BugBranch.bugID, bug_ids)))
+ BugBranch.bugID, BugBranch.bugID.is_in(bug_ids)))
# Cache all bugs at once to avoid one query per bugtask. We
# could rely on the Storm cache, but this is explicit.
- bugs = dict(IStore(Bug).find((Bug.id, Bug), In(Bug.id, bug_ids)))
+ bugs = dict(IStore(Bug).find((Bug.id, Bug), Bug.id.is_in(bug_ids)))
badge_properties = {}
for bugtask in bugtasks:
@@ -1744,7 +1745,8 @@
BugTask.productseries = StructuralSubscription.productseries
AND StructuralSubscription.subscriber = %(personid)s
UNION ALL
- SELECT BugTask.id FROM BugTask, StructuralSubscription, Product
+ SELECT BugTask.id
+ FROM BugTask, StructuralSubscription, Product
WHERE
BugTask.product = Product.id
AND Product.project = StructuralSubscription.project
@@ -1926,6 +1928,7 @@
if not decorators:
decorator = lambda x: x
else:
+
def decorator(obj):
for decor in decorators:
obj = decor(obj)
@@ -2221,6 +2224,7 @@
# This may need revisiting if e.g. searches on behalf of different
# users are combined.
decorators.append(decorator)
+
def decorator(row):
bugtask = row[0]
for decorator in decorators:
@@ -2288,7 +2292,7 @@
else:
store = search_results._store
milestones = store.find(
- Milestone, In(Milestone.id, milestone_ids))
+ Milestone, Milestone.id.is_in(milestone_ids))
return sorted(milestones, key=milestone_sort_key, reverse=True)
def createTask(self, bug, owner, product=None, productseries=None,
@@ -2854,8 +2858,9 @@
for subscription in subscriptions))
if recipients is not None:
- # We need to process subscriptions, so pull all the subscribes into
- # the cache, then update recipients with the subscriptions.
+ # We need to process subscriptions, so pull all the
+ # subscribes into the cache, then update recipients with
+ # the subscriptions.
subscribers = list(subscribers)
for subscription in subscriptions:
recipients.addStructuralSubscriber(
=== modified file 'lib/lp/bugs/model/bugwatch.py'
--- lib/lp/bugs/model/bugwatch.py 2010-09-29 19:14:41 +0000
+++ lib/lp/bugs/model/bugwatch.py 2010-11-07 00:47:50 +0000
@@ -29,7 +29,6 @@
from storm.base import Storm
from storm.expr import (
Desc,
- In,
Not,
)
from storm.locals import (
@@ -83,21 +82,22 @@
BUG_TRACKER_URL_FORMATS = {
- BugTrackerType.BUGZILLA: 'show_bug.cgi?id=%s',
- BugTrackerType.DEBBUGS: 'cgi-bin/bugreport.cgi?bug=%s',
+ BugTrackerType.BUGZILLA: 'show_bug.cgi?id=%s',
+ BugTrackerType.DEBBUGS: 'cgi-bin/bugreport.cgi?bug=%s',
BugTrackerType.GOOGLE_CODE: 'detail?id=%s',
- BugTrackerType.MANTIS: 'view.php?id=%s',
- BugTrackerType.ROUNDUP: 'issue%s',
- BugTrackerType.RT: 'Ticket/Display.html?id=%s',
+ BugTrackerType.MANTIS: 'view.php?id=%s',
+ BugTrackerType.ROUNDUP: 'issue%s',
+ BugTrackerType.RT: 'Ticket/Display.html?id=%s',
BugTrackerType.SOURCEFORGE: 'support/tracker.php?aid=%s',
- BugTrackerType.TRAC: 'ticket/%s',
- BugTrackerType.SAVANE: 'bugs/?%s',
- BugTrackerType.PHPPROJECT: 'bug.php?id=%s',
+ BugTrackerType.TRAC: 'ticket/%s',
+ BugTrackerType.SAVANE: 'bugs/?%s',
+ BugTrackerType.PHPPROJECT: 'bug.php?id=%s',
}
WATCH_RESCHEDULE_THRESHOLD = 0.6
+
def get_bug_watch_ids(references):
"""Yield bug watch IDs from any given iterator.
@@ -105,6 +105,7 @@
IBugWatch, and yields if it is an integer. Everything else is
discarded.
"""
+
for reference in references:
if IBugWatch.providedBy(reference):
yield reference.id
@@ -360,14 +361,11 @@
@property
def failed_activity(self):
- store = Store.of(self)
- success_status_ids = [
- status.value for status in BUG_WATCH_ACTIVITY_SUCCESS_STATUSES]
-
- return store.find(
+ return Store.of(self).find(
BugWatchActivity,
BugWatchActivity.bug_watch == self,
- Not(In(BugWatchActivity.result, success_status_ids))).order_by(
+ Not(BugWatchActivity.result.is_in(
+ BUG_WATCH_ACTIVITY_SUCCESS_STATUSES))).order_by(
Desc('activity_date'))
def setNextCheck(self, next_check):
@@ -398,7 +396,7 @@
self.title = 'A set of bug watches'
self.bugtracker_parse_functions = {
BugTrackerType.BUGZILLA: self.parseBugzillaURL,
- BugTrackerType.DEBBUGS: self.parseDebbugsURL,
+ BugTrackerType.DEBBUGS: self.parseDebbugsURL,
BugTrackerType.EMAILADDRESS: self.parseEmailAddressURL,
BugTrackerType.GOOGLE_CODE: self.parseGoogleCodeURL,
BugTrackerType.MANTIS: self.parseMantisURL,
@@ -408,14 +406,14 @@
BugTrackerType.SAVANE: self.parseSavaneURL,
BugTrackerType.SOURCEFORGE: self.parseSourceForgeLikeURL,
BugTrackerType.TRAC: self.parseTracURL,
- }
+ }
def get(self, watch_id):
"""See `IBugWatch`Set."""
try:
return BugWatch.get(watch_id)
except SQLObjectNotFound:
- raise NotFoundError, watch_id
+ raise NotFoundError(watch_id)
def search(self):
return BugWatch.select()
@@ -630,8 +628,7 @@
# Launchpad, so we return that one if the hostname matches.
savannah_tracker = getUtility(ILaunchpadCelebrities).savannah_tracker
savannah_hosts = [
- urlsplit(alias)[1] for alias in savannah_tracker.aliases
- ]
+ urlsplit(alias)[1] for alias in savannah_tracker.aliases]
savannah_hosts.append(urlsplit(savannah_tracker.baseurl)[1])
# The remote bug is actually a key in the query dict rather than
@@ -724,7 +721,7 @@
query = IStore(BugWatch).find(
BugWatch, BugWatch.remotebug == remote_bug)
if bug_watch_ids is not None:
- query = query.find(In(BugWatch.id, bug_watch_ids))
+ query = query.find(BugWatch.id.is_in(bug_watch_ids))
return query
def bulkSetError(self, references, last_error_type=None):
@@ -732,7 +729,7 @@
bug_watch_ids = set(get_bug_watch_ids(references))
if len(bug_watch_ids) > 0:
bug_watches_in_database = IStore(BugWatch).find(
- BugWatch, In(BugWatch.id, list(bug_watch_ids)))
+ BugWatch, BugWatch.id.is_in(bug_watch_ids))
bug_watches_in_database.set(
lastchecked=UTC_NOW,
last_error_type=last_error_type,
@@ -748,8 +745,7 @@
"INSERT INTO BugWatchActivity"
" (bug_watch, result, message, oops_id) "
"SELECT BugWatch.id, %s, %s, %s FROM BugWatch"
- " WHERE BugWatch.id IN %s"
- )
+ " WHERE BugWatch.id IN %s")
IStore(BugWatch).execute(
insert_activity_statement % sqlvalues(
result, message, oops_id, bug_watch_ids))
=== modified file 'lib/lp/code/model/branch.py'
--- lib/lp/code/model/branch.py 2010-11-04 19:59:02 +0000
+++ lib/lp/code/model/branch.py 2010-11-07 00:47:50 +0000
@@ -1075,12 +1075,12 @@
name = "date_trunc"
results = Store.of(self).find(
- (DateTrunc('day', Revision.revision_date), Count(Revision.id)),
+ (DateTrunc(u'day', Revision.revision_date), Count(Revision.id)),
Revision.id == BranchRevision.revision_id,
Revision.revision_date > since,
BranchRevision.branch == self)
results = results.group_by(
- DateTrunc('day', Revision.revision_date))
+ DateTrunc(u'day', Revision.revision_date))
return sorted(results)
@property
=== modified file 'lib/lp/hardwaredb/model/hwdb.py'
--- lib/lp/hardwaredb/model/hwdb.py 2010-09-14 15:32:53 +0000
+++ lib/lp/hardwaredb/model/hwdb.py 2010-11-07 00:47:50 +0000
@@ -45,7 +45,6 @@
Alias,
And,
Count,
- In,
Not,
Or,
Select,
@@ -386,7 +385,7 @@
columns=[HWSubmissionDevice.submissionID],
tables=device_tables, where=And(*device_clauses))
- clauses.append(In(HWSubmission.id, submission_ids))
+ clauses.append(HWSubmission.id.is_in(submission_ids))
submissions_with_device = Select(
columns=[target_column], tables=tables, where=And(*clauses),
distinct=True)
@@ -448,11 +447,11 @@
tables.append(Bug)
if bug_ids is not None and bug_ids is not []:
- clauses.append(In(Bug.id, bug_ids))
+ clauses.append(Bug.id.is_in(bug_ids))
if bug_tags is not None and bug_tags is not []:
clauses.extend([
- Bug.id == BugTag.bugID, In(BugTag.tag, bug_tags)])
+ Bug.id == BugTag.bugID, BugTag.tag.is_in(bug_tags)])
tables.append(BugTag)
# If we OR-combine the search for bug owners, subscribers
@@ -460,6 +459,9 @@
# So let's run the queries separately and join the results
# on Python level.
+ # This would be quicker still if we did it as a single query
+ # using UNION.
+
owner_query = Select(
columns=[HWSubmission.ownerID], tables=tables,
where=And(*(clauses + [Bug.ownerID == HWSubmission.ownerID])))
@@ -493,7 +495,8 @@
if len(user_ids) == 0:
result = store.find(Person, False)
else:
- result = store.find(Person, In(Person.id, list(user_ids)))
+ user_ids = [row[0] for row in user_ids]
+ result = store.find(Person, Person.id.is_in(user_ids))
result.order_by(Person.displayname)
return result
@@ -517,14 +520,14 @@
HWSubmissionDevice.submission == HWSubmission.id,
HWSubmissionDevice.device_driver_link == HWDeviceDriverLink.id,
HWDeviceDriverLink.device == HWDevice.id,
- HWDevice.bus_vendor == HWVendorID.id
- ]
+ HWDevice.bus_vendor == HWVendorID.id]
if bug_ids is not None and bug_ids is not []:
- clauses.append(In(Bug.id, bug_ids))
+ clauses.append(Bug.id.is_in(bug_ids))
if bug_tags is not None and bug_tags is not []:
- clauses.extend([Bug.id == BugTag.bugID, In(BugTag.tag, bug_tags)])
+ clauses.extend(
+ [Bug.id == BugTag.bugID, BugTag.tag.is_in(bug_tags)])
clauses.append(_userCanAccessSubmissionStormClause(user))
@@ -545,8 +548,7 @@
query = Select(
columns=[
Person.name, HWVendorID.bus,
- HWVendorID.vendor_id_for_bus, HWDevice.bus_product_id
- ],
+ HWVendorID.vendor_id_for_bus, HWDevice.bus_product_id],
tables=tables, where=And(*clauses), distinct=True,
order_by=[HWVendorID.bus, HWVendorID.vendor_id_for_bus,
HWDevice.bus_product_id, Person.name])
@@ -631,6 +633,7 @@
HWBus.SCSI: scsi_product,
}
+
def isValidVendorID(bus, id):
"""Check that the string id is a valid vendor ID for this bus.
@@ -1271,8 +1274,7 @@
HWVendorID.vendor_id_for_bus == vendor_id,
HWDevice.bus_vendor == HWVendorID.id,
HWDeviceDriverLink.device == HWDevice.id,
- HWDevice.bus_product_id == product_id
- ])
+ HWDevice.bus_product_id == product_id])
if driver_name is None and package_name is None:
where_clauses.append(HWDeviceDriverLink.driver == None)
@@ -1294,6 +1296,7 @@
return tables, where_clauses
+
def make_distro_target_clause(distro_target):
"""Create a where expression and a table list to limit results to a
distro target.
@@ -1324,6 +1327,7 @@
'IDistroSeries or IDistroArchSeries')
return ([], [])
+
def _userCanAccessSubmissionStormClause(user):
"""Limit results of HWSubmission queries to rows the user can access.
"""
=== modified file 'lib/lp/registry/browser/distributionsourcepackage.py'
--- lib/lp/registry/browser/distributionsourcepackage.py 2010-10-18 15:47:18 +0000
+++ lib/lp/registry/browser/distributionsourcepackage.py 2010-11-07 00:47:50 +0000
@@ -258,7 +258,8 @@
if not_empty(spr.changelog_entry)])
unique_bugs = extract_bug_numbers(the_changelog)
self._bug_data = list(
- getUtility(IBugSet).getByNumbers(unique_bugs.keys()))
+ getUtility(IBugSet).getByNumbers(
+ [int(key) for key in unique_bugs.keys()]))
# Preload email/person data only if user is logged on. In the opposite
# case the emails in the changelog will be obfuscated anyway and thus
# cause no database lookups.
=== modified file 'lib/lp/registry/doc/vocabularies.txt'
--- lib/lp/registry/doc/vocabularies.txt 2010-10-03 15:30:06 +0000
+++ lib/lp/registry/doc/vocabularies.txt 2010-11-07 00:47:50 +0000
@@ -189,7 +189,7 @@
`name`
>>> distroseries_vocabulary = get_naked_vocab(
- ... None,"DistroSeries")
+ ... None, "DistroSeries")
>>> for term in distroseries_vocabulary:
... print "%30s %s" % (term.token, term.title)
ubuntu/breezy-autotest Ubuntu: Breezy Badger Autotest
=== modified file 'lib/lp/registry/model/distribution.py'
--- lib/lp/registry/model/distribution.py 2010-11-02 20:10:56 +0000
+++ lib/lp/registry/model/distribution.py 2010-11-07 00:47:50 +0000
@@ -20,7 +20,6 @@
from sqlobject.sqlbuilder import SQLConstant
from storm.locals import (
Desc,
- In,
Int,
Join,
Or,
@@ -1220,8 +1219,7 @@
SourcePackageRelease.sourcepackagename == SourcePackageName.id,
DistributionSourcePackageCache.sourcepackagename ==
SourcePackageName.id,
- In(
- DistributionSourcePackageCache.archiveID,
+ DistributionSourcePackageCache.archiveID.is_in(
self.all_distro_archive_ids))
def searchBinaryPackages(self, package_name, exact_match=False):
@@ -1232,7 +1230,8 @@
if exact_match:
find_spec = self._binaryPackageSearchClause + (
- BinaryPackageRelease.binarypackagename == BinaryPackageName.id,
+ BinaryPackageRelease.binarypackagename
+ == BinaryPackageName.id,
)
match_clause = (BinaryPackageName.name == package_name,)
else:
@@ -1241,8 +1240,7 @@
# DistributionSourcePackageCache records.
find_spec = (
DistributionSourcePackageCache.distribution == self,
- In(
- DistributionSourcePackageCache.archiveID,
+ DistributionSourcePackageCache.archiveID.is_in(
self.all_distro_archive_ids))
match_clause = (
DistributionSourcePackageCache.binpkgnames.like(
@@ -1256,7 +1254,7 @@
def searchBinaryPackagesFTI(self, package_name):
"""See `IDistribution`."""
search_vector_column = DistroSeriesPackageCache.fti
- query_function = FTQ(package_name)
+ query_function = FTQ(unicode(package_name))
rank = RANK(search_vector_column, query_function)
extra_clauses = (
=== modified file 'lib/lp/registry/model/distributionsourcepackage.py'
--- lib/lp/registry/model/distributionsourcepackage.py 2010-10-24 21:00:11 +0000
+++ lib/lp/registry/model/distributionsourcepackage.py 2010-11-07 00:47:50 +0000
@@ -19,9 +19,7 @@
And,
Count,
Desc,
- In,
Join,
- Lower,
Max,
Sum,
)
@@ -428,7 +426,7 @@
(SourcePackageRelease, SourcePackagePublishingHistory),
SourcePackagePublishingHistory.distroseries == DistroSeries.id,
DistroSeries.distribution == self.distribution,
- In(SourcePackagePublishingHistory.archiveID,
+ SourcePackagePublishingHistory.archiveID.is_in(
self.distribution.all_distro_archive_ids),
SourcePackagePublishingHistory.sourcepackagerelease ==
SourcePackageRelease.id,
@@ -542,7 +540,7 @@
# Get all persons whose email addresses are in the list.
result_set = store.using(*origin).find(
(EmailAddress, Person),
- In(Lower(EmailAddress.email), email_addresses))
+ EmailAddress.email.lower().is_in(email_addresses))
return result_set
@classmethod
=== modified file 'lib/lp/registry/model/person.py'
--- lib/lp/registry/model/person.py 2010-11-04 19:59:02 +0000
+++ lib/lp/registry/model/person.py 2010-11-07 00:47:50 +0000
@@ -53,10 +53,8 @@
And,
Desc,
Exists,
- In,
Join,
LeftJoin,
- Lower,
Min,
Not,
Or,
@@ -115,7 +113,6 @@
OAuthAccessToken,
OAuthRequestToken,
)
-from canonical.launchpad.database.stormsugar import StartsWith
from canonical.launchpad.event.interfaces import (
IJoinTeamEvent,
ITeamInvitationEvent,
@@ -180,10 +177,7 @@
IllegalRelatedBugTasksParams,
)
from lp.bugs.model.bugtarget import HasBugsBase
-from lp.bugs.model.bugtask import (
- BugTask,
- get_related_bugtasks_search_params,
- )
+from lp.bugs.model.bugtask import get_related_bugtasks_search_params
from lp.code.model.hasbranches import (
HasBranchesMixin,
HasMergeProposalsMixin,
@@ -2845,11 +2839,10 @@
join = store.using(
EmailAddress,
LeftJoin(Account, EmailAddress.accountID == Account.id))
- email, account = (
- join.find(
+ email, account = join.find(
(EmailAddress, Account),
- Lower(EmailAddress.email) == Lower(email_address)).one()
- or (None, None))
+ EmailAddress.email.lower() ==
+ unicode(email_address).lower()).one() or (None, None)
identifier = store.find(
OpenIdIdentifier, identifier=openid_identifier).one()
@@ -3151,7 +3144,7 @@
Not(Person.teamowner == None),
Person.merged == None,
EmailAddress.person == Person.id,
- StartsWith(Lower(EmailAddress.email), text))
+ EmailAddress.email.lower().startswith(unicode(text)))
return team_email_query
def _teamNameQuery(self, text):
@@ -3174,9 +3167,7 @@
return EmptyResultSet()
orderBy = Person._sortingColumnsForSetOperations
- text = text.lower()
- inactive_statuses = tuple(
- status.value for status in INACTIVE_ACCOUNT_STATUSES)
+ text = unicode(text).lower()
# Teams may not have email addresses, so we need to either use a LEFT
# OUTER JOIN or do a UNION between four queries. Using a UNION makes
# it a lot faster than with a LEFT OUTER JOIN.
@@ -3185,8 +3176,8 @@
Person.merged == None,
EmailAddress.person == Person.id,
Person.account == Account.id,
- Not(In(Account.status, inactive_statuses)),
- StartsWith(Lower(EmailAddress.email), text))
+ Not(Account.status.is_in(INACTIVE_ACCOUNT_STATUSES)),
+ EmailAddress.email.lower().startswith(text))
store = IStore(Person)
@@ -3203,7 +3194,7 @@
Person.teamowner == None,
Person.merged == None,
Person.account == Account.id,
- Not(In(Account.status, inactive_statuses)),
+ Not(Account.status.is_in(INACTIVE_ACCOUNT_STATUSES)),
SQL("Person.fti @@ ftq(?)", (text, ))
)
@@ -3223,10 +3214,8 @@
must_have_email=False, created_after=None, created_before=None):
"""See `IPersonSet`."""
orderBy = Person._sortingColumnsForSetOperations
- text = text.lower()
+ text = unicode(text).lower()
store = IStore(Person)
- inactive_statuses = tuple(
- status.value for status in INACTIVE_ACCOUNT_STATUSES)
base_query = And(
Person.teamowner == None,
Person.merged == None)
@@ -3238,7 +3227,7 @@
base_query = And(
base_query,
Person.account == Account.id,
- Not(In(Account.status, inactive_statuses)))
+ Not(Account.status.is_in(INACTIVE_ACCOUNT_STATUSES)))
email_clause_tables = clause_tables + ['EmailAddress']
if must_have_email:
clause_tables = email_clause_tables
@@ -3265,7 +3254,7 @@
email_query = And(
base_query,
EmailAddress.person == Person.id,
- StartsWith(Lower(EmailAddress.email), text))
+ EmailAddress.email.lower().startswith(unicode(text)))
name_query = And(
base_query,
@@ -3278,7 +3267,7 @@
def findTeam(self, text=""):
"""See `IPersonSet`."""
orderBy = Person._sortingColumnsForSetOperations
- text = text.lower()
+ text = unicode(text).lower()
# Teams may not have email addresses, so we need to either use a LEFT
# OUTER JOIN or do a UNION between two queries. Using a UNION makes
# it a lot faster than with a LEFT OUTER JOIN.
@@ -3299,18 +3288,11 @@
def getByEmail(self, email):
"""See `IPersonSet`."""
- # We lookup the EmailAddress in the auth store so we can
- # lookup a Person by EmailAddress in the same transaction
- # that the Person or EmailAddress was created. This is not
- # optimal for production as it requires two database lookups,
- # but is required by much of the test suite.
- conditions = (Lower(EmailAddress.email) == email.lower().strip())
- email_address = IStore(EmailAddress).find(
- EmailAddress, conditions).one()
- if email_address is None:
- return None
- else:
- return IStore(Person).get(Person, email_address.personID)
+ email = unicode(email).strip().lower()
+ return IStore(Person).find(
+ Person,
+ Person.id == EmailAddress.personID,
+ EmailAddress.email.lower() == email).one()
def latest_teams(self, limit=5):
"""See `IPersonSet`."""
=== modified file 'lib/lp/registry/model/pillar.py'
--- lib/lp/registry/model/pillar.py 2010-09-03 06:36:45 +0000
+++ lib/lp/registry/model/pillar.py 2010-11-07 00:47:50 +0000
@@ -85,6 +85,7 @@
def __contains__(self, name):
"""See `IPillarNameSet`."""
store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
+ name = unicode(name)
result = store.execute("""
SELECT TRUE
FROM PillarName
@@ -127,6 +128,7 @@
"""
if ignore_inactive:
query += " AND active IS TRUE"
+ name = unicode(name)
result = store.execute(query, [name, name])
row = result.get_one()
if row is None:
@@ -177,7 +179,7 @@
Distribution.fti @@ ftq(%(text)s) OR
lower(Distribution.title) = lower(%(text)s)
)
- ''' % sqlvalues(text=text))
+ ''' % sqlvalues(text=unicode(text)))
store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
columns = [
PillarName, OtherPillarName, Product, ProjectGroup, Distribution]
=== modified file 'lib/lp/registry/model/projectgroup.py'
--- lib/lp/registry/model/projectgroup.py 2010-11-02 20:10:56 +0000
+++ lib/lp/registry/model/projectgroup.py 2010-11-07 00:47:50 +0000
@@ -20,7 +20,6 @@
)
from storm.expr import (
And,
- In,
SQL,
)
from storm.locals import Int
@@ -175,7 +174,7 @@
def getConfigurableProducts(self):
return [product for product in self.products
if check_permission('launchpad.Edit', product)]
-
+
@property
def drivers(self):
"""See `IHasDrivers`."""
@@ -331,9 +330,9 @@
"""See `IHasBugs`."""
if not self.products:
return []
- product_ids = sqlvalues(*self.products)
+ product_ids = [product.id for product in self.products]
return get_bug_tags_open_count(
- In(BugTask.productID, product_ids), user)
+ BugTask.productID.is_in(product_ids), user)
def _getBugTaskContextClause(self):
"""See `HasBugsBase`."""
=== modified file 'lib/lp/registry/model/sourcepackage.py'
--- lib/lp/registry/model/sourcepackage.py 2010-10-29 10:09:04 +0000
+++ lib/lp/registry/model/sourcepackage.py 2010-11-07 00:47:50 +0000
@@ -17,7 +17,6 @@
from storm.locals import (
And,
Desc,
- In,
Select,
SQL,
Store,
@@ -380,12 +379,12 @@
SourcePackageRelease.id,
SourcePackageRelease.sourcepackagename ==
self.sourcepackagename,
- In(SourcePackagePublishingHistory.archiveID,
+ SourcePackagePublishingHistory.archiveID.is_in(
self.distribution.all_distro_archive_ids)))
return IStore(SourcePackageRelease).find(
SourcePackageRelease,
- In(SourcePackageRelease.id, subselect)).order_by(Desc(
+ SourcePackageRelease.id.is_in(subselect)).order_by(Desc(
SQL("debversion_sort_key(SourcePackageRelease.version)")))
@property
=== modified file 'lib/lp/registry/model/sourcepackagename.py'
--- lib/lp/registry/model/sourcepackagename.py 2010-10-04 20:46:55 +0000
+++ lib/lp/registry/model/sourcepackagename.py 2010-11-07 00:47:50 +0000
@@ -62,6 +62,7 @@
def __getitem__(self, name):
"""See canonical.launchpad.interfaces.ISourcePackageNameSet."""
+ name = unicode(name)
try:
return SourcePackageName.byName(name)
except SQLObjectNotFound:
=== modified file 'lib/lp/registry/vocabularies.py'
--- lib/lp/registry/vocabularies.py 2010-10-20 03:13:10 +0000
+++ lib/lp/registry/vocabularies.py 2010-11-07 00:47:50 +0000
@@ -70,7 +70,6 @@
Desc,
Join,
LeftJoin,
- Lower,
Not,
Or,
SQL,
@@ -95,7 +94,6 @@
sqlvalues,
)
from canonical.launchpad.database.emailaddress import EmailAddress
-from canonical.launchpad.database.stormsugar import StartsWith
from canonical.launchpad.helpers import shortlist
from canonical.launchpad.interfaces.emailaddress import EmailAddressStatus
from canonical.launchpad.interfaces.launchpad import ILaunchpadCelebrities
@@ -187,13 +185,11 @@
If the token contains an '@', treat it like an email. Otherwise,
treat it like a name.
"""
+ token = unicode(token)
if "@" in token:
# This looks like an email token, so let's do an object
# lookup based on that.
- # We retrieve the email address via the main store, so
- # we can easily traverse to email.person to retrieve the
- # result from the main Store as expected by our call sites.
- email = IStore(Person).find(
+ email = IStore(EmailAddress).find(
EmailAddress,
EmailAddress.email.lower() == token.strip().lower()).one()
if email is None:
@@ -256,7 +252,7 @@
if query is None or an empty string.
"""
if query:
- query = query.lower()
+ query = unicode(query).lower()
like_query = "'%%' || %s || '%%'" % quote_like(query)
fti_query = quote(query)
sql = "active = 't' AND (name LIKE %s OR fti @@ ftq(%s))" % (
@@ -301,7 +297,7 @@
if query is None or an empty string.
"""
if query:
- query = query.lower()
+ query = unicode(query).lower()
like_query = "'%%' || %s || '%%'" % quote_like(query)
fti_query = quote(query)
sql = "active = 't' AND (name LIKE %s OR fti @@ ftq(%s))" % (
@@ -378,7 +374,7 @@
if not text:
return self.emptySelectResults()
- return self._select(text.lower())
+ return self._select(unicode(text).lower())
class PersonAccountToMergeVocabulary(
@@ -411,7 +407,7 @@
if not text:
return self.emptySelectResults()
- text = text.lower()
+ text = unicode(text).lower()
return self._select(text)
@@ -641,7 +637,7 @@
else:
return self.emptySelectResults()
- text = text.lower()
+ text = unicode(text).lower()
return self._doSearch(text=text)
def searchForTerms(self, query=None):
@@ -686,7 +682,7 @@
email_storm_query = self.store.find(
EmailAddress.personID,
- StartsWith(Lower(EmailAddress.email), text))
+ EmailAddress.email.lower().startswith(text))
email_subquery = Alias(email_storm_query._get_select(),
'EmailAddress')
tables += [
@@ -1011,7 +1007,7 @@
if not query:
return self.emptySelectResults()
- query = query.lower()
+ query = unicode(query).lower()
objs = self._table.select(
AND(
Milestone.q.id == ProductRelease.q.milestoneID,
@@ -1066,7 +1062,7 @@
if not query:
return self.emptySelectResults()
- query = query.lower().strip('/')
+ query = unicode(query).lower().strip('/')
# If there is a slash splitting the product and productseries
# names, they must both match. If there is no slash, we don't
# know whether it is matching the product or the productseries
@@ -1409,7 +1405,7 @@
if not query:
return self.emptySelectResults()
- query = query.lower()
+ query = unicode(query).lower()
objs = self._table.select(
AND(
Distribution.q.id == DistroSeries.q.distributionID,
=== modified file 'lib/lp/scripts/garbo.py'
--- lib/lp/scripts/garbo.py 2010-10-17 22:51:50 +0000
+++ lib/lp/scripts/garbo.py 2010-11-07 00:47:50 +0000
@@ -18,7 +18,6 @@
from psycopg2 import IntegrityError
import pytz
from storm.locals import (
- In,
Max,
Min,
Select,
@@ -203,7 +202,8 @@
class CodeImportEventPruner(TunableLoop):
"""Prune `CodeImportEvent`s that are more than a month old.
- Events that happened more than 30 days ago are really of no interest to us.
+ Events that happened more than 30 days ago are really of no
+ interest to us.
"""
maximum_chunk_size = 10000
@@ -547,7 +547,7 @@
ids_to_remove = list(self._to_remove()[:chunk_size])
num_removed = IMasterStore(BugNotification).find(
BugNotification,
- In(BugNotification.id, ids_to_remove)).remove()
+ BugNotification.id.is_in(ids_to_remove)).remove()
transaction.commit()
self.log.debug("Removed %d rows" % num_removed)
@@ -579,7 +579,7 @@
# constraint is ON DELETE CASCADE.
IMasterStore(Job).find(
Job,
- In(Job.id, ids_to_remove)).remove()
+ Job.id.is_in(ids_to_remove)).remove()
else:
self._is_done = True
transaction.commit()
@@ -719,7 +719,7 @@
chunk_size = int(chunk_size)
ids_to_remove = list(self._to_remove()[:chunk_size])
self.store.find(
- BugAttachment, In(BugAttachment.id, ids_to_remove)).remove()
+ BugAttachment, BugAttachment.id.is_in(ids_to_remove)).remove()
transaction.commit()
=== modified file 'lib/lp/scripts/utilities/sanitizedb.py'
--- lib/lp/scripts/utilities/sanitizedb.py 2010-08-30 06:46:39 +0000
+++ lib/lp/scripts/utilities/sanitizedb.py 2010-11-07 00:47:50 +0000
@@ -380,7 +380,7 @@
EmailAddress.status == EmailAddressStatus.NEW,
EmailAddress.status == EmailAddressStatus.OLD,
EmailAddress.email.lower().like(
- '%@example.com', case_sensitive=True))).remove()
+ u'%@example.com', case_sensitive=True))).remove()
self.store.flush()
self.logger.info(
"Removed %d invalid, unvalidated and old email addresses.", count)
=== modified file 'lib/lp/services/database/bulk.py'
--- lib/lp/services/database/bulk.py 2010-08-20 20:31:18 +0000
+++ lib/lp/services/database/bulk.py 2010-11-07 00:47:50 +0000
@@ -12,7 +12,6 @@
from collections import defaultdict
from storm.base import Storm
-from storm.expr import In
from storm.info import get_cls_info
from storm.store import Store
from zope.security.proxy import removeSecurityProxy
@@ -55,7 +54,7 @@
primary_key_column_getter = primary_key_column.__get__
for store, objects in collate(objects, Store.of):
primary_keys = map(primary_key_column_getter, objects)
- condition = In(primary_key_column, primary_keys)
+ condition = primary_key_column.is_in(primary_keys)
yield store.find(object_type, condition)
=== modified file 'lib/lp/services/worlddata/model/language.py'
--- lib/lp/services/worlddata/model/language.py 2010-10-03 15:30:06 +0000
+++ lib/lp/services/worlddata/model/language.py 2010-11-07 00:47:50 +0000
@@ -243,10 +243,11 @@
def search(self, text):
"""See `ILanguageSet`."""
if text:
+ text = unicode(text).lower()
results = ISlaveStore(Language).find(
Language, Or(
- CONTAINSSTRING(Language.code.lower(), text.lower()),
- CONTAINSSTRING(Language.englishname.lower(), text.lower())
+ CONTAINSSTRING(Language.code.lower(), text),
+ CONTAINSSTRING(Language.englishname.lower(), text)
)).order_by(Language.englishname)
else:
results = None
=== modified file 'lib/lp/soyuz/doc/gina.txt'
--- lib/lp/soyuz/doc/gina.txt 2010-10-20 13:33:24 +0000
+++ lib/lp/soyuz/doc/gina.txt 2010-11-07 00:47:50 +0000
@@ -439,7 +439,7 @@
2 being uploaded by mdz and 2 by doko).
>>> from sqlobject import LIKE
- >>> p = Person.selectOne(LIKE(Person.q.name, "cjwatson%"))
+ >>> p = Person.selectOne(LIKE(Person.q.name, u"cjwatson%"))
>>> print p.name
cjwatson
>>> print Person.select().count() - orig_person_count
=== modified file 'lib/lp/soyuz/doc/packageset.txt'
--- lib/lp/soyuz/doc/packageset.txt 2010-10-09 16:36:22 +0000
+++ lib/lp/soyuz/doc/packageset.txt 2010-11-07 00:47:50 +0000
@@ -1119,7 +1119,7 @@
note that non-existent package sets (e.g. 'not-there') are simply ignored.
>>> to_be_added = (
- ... 'gnome', 'x-win', 'universe', 'multiverse', 'not-there')
+ ... u'gnome', u'x-win', u'universe', u'multiverse', u'not-there')
>>> umbrella_ps.addSubsets(to_be_added)
>>> print_data(umbrella_ps.setsIncluded(direct_inclusion=True))
4 -> mozilla
@@ -1131,7 +1131,7 @@
Package subsets can be removed in a similar fashion. Non-existent sets
or sets which are not (direct) subsets are ignored again.
- >>> to_be_removed = ('umbrella', 'universe', 'multiverse', 'not-mine')
+ >>> to_be_removed = (u'umbrella', u'universe', u'multiverse', u'not-mine')
>>> umbrella_ps.removeSubsets(to_be_removed)
>>> print_data(umbrella_ps.setsIncluded(direct_inclusion=True))
4 -> mozilla
=== modified file 'lib/lp/soyuz/model/archivepermission.py'
--- lib/lp/soyuz/model/archivepermission.py 2010-08-23 17:16:35 +0000
+++ lib/lp/soyuz/model/archivepermission.py 2010-11-07 00:47:50 +0000
@@ -14,10 +14,7 @@
BoolCol,
ForeignKey,
)
-from storm.expr import (
- In,
- SQL,
- )
+from storm.expr import SQL
from storm.locals import (
Int,
Reference,
@@ -116,7 +113,7 @@
elif self.permission == ArchivePermissionType.QUEUE_ADMIN:
alsoProvides(self, IArchiveQueueAdmin)
else:
- raise AssertionError, (
+ raise AssertionError(
"Unknown permission type %s" % self.permission)
@property
@@ -163,8 +160,7 @@
ArchivePermission.permission = %s AND
ArchivePermission.person = TeamParticipation.team AND
TeamParticipation.person = %s
- """ % sqlvalues(archive, permission, person)
- ]
+ """ % sqlvalues(archive, permission, person)]
prejoins = []
@@ -248,8 +244,7 @@
clauses = ["""
ArchivePermission.archive = %s AND
ArchivePermission.permission = %s
- """ % sqlvalues(archive, ArchivePermissionType.UPLOAD)
- ]
+ """ % sqlvalues(archive, ArchivePermissionType.UPLOAD)]
if component is not None:
component = self._nameToComponent(component)
@@ -386,7 +381,8 @@
AND ap.packageset IS NOT NULL
'''
query = SQL(query, (person.id, archive.id))
- return store.find(ArchivePermission, In(ArchivePermission.id, query))
+ return store.find(
+ ArchivePermission, ArchivePermission.id.is_in(query))
def uploadersForPackageset(
self, archive, packageset, direct_permissions=True):
@@ -405,7 +401,8 @@
'''
query += " AND ap.archive = ?"
query = SQL(query, (packageset.id, archive.id))
- return store.find(ArchivePermission, In(ArchivePermission.id, query))
+ return store.find(
+ ArchivePermission, ArchivePermission.id.is_in(query))
def newPackagesetUploader(
self, archive, person, packageset, explicit=False):
@@ -424,7 +421,8 @@
'''
query = SQL(query, (person.id, packageset.id, archive.id))
permissions = list(
- store.find(ArchivePermission, In(ArchivePermission.id, query)))
+ store.find(
+ ArchivePermission, ArchivePermission.id.is_in(query)))
if len(permissions) > 0:
# Found permissions in the database, does the 'explicit' flag
# have the requested value?
@@ -496,7 +494,8 @@
'''
query = SQL(
query, (person.id, sourcepackagename.id, archive.id))
- return store.find(ArchivePermission, In(ArchivePermission.id, query))
+ return store.find(
+ ArchivePermission, ArchivePermission.id.is_in(query))
def packagesetsForSource(
self, archive, sourcepackagename, direct_permissions=True):
=== modified file 'lib/lp/soyuz/model/binarypackagebuild.py'
--- lib/lp/soyuz/model/binarypackagebuild.py 2010-10-06 11:46:51 +0000
+++ lib/lp/soyuz/model/binarypackagebuild.py 2010-11-07 00:47:50 +0000
@@ -14,7 +14,6 @@
from sqlobject import SQLObjectNotFound
from storm.expr import (
Desc,
- In,
Join,
LeftJoin,
)
@@ -821,7 +820,7 @@
store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
return store.find(
BinaryPackageBuild,
- In(BinaryPackageBuild.distro_arch_series_id, archseries_ids),
+ BinaryPackageBuild.distro_arch_series_id.is_in(archseries_ids),
BinaryPackageBuild.package_build == PackageBuild.id,
PackageBuild.build_farm_job == BuildFarmJob.id,
BuildFarmJob.status == BuildStatus.NEEDSBUILD)
@@ -1177,7 +1176,7 @@
result_set = store.using(*origin).find(
(SourcePackageRelease, LibraryFileAlias, SourcePackageName,
LibraryFileContent, Builder, PackageBuild, BuildFarmJob),
- In(BinaryPackageBuild.id, build_ids))
+ BinaryPackageBuild.id.is_in(build_ids))
# Force query execution so that the ancillary data gets fetched
# and added to StupidCache.
=== modified file 'lib/lp/soyuz/model/binarypackagename.py'
--- lib/lp/soyuz/model/binarypackagename.py 2010-10-03 15:30:06 +0000
+++ lib/lp/soyuz/model/binarypackagename.py 2010-11-07 00:47:50 +0000
@@ -13,7 +13,6 @@
# SQLObject/SQLBase
from sqlobject import (
- CONTAINSSTRING,
SQLObjectNotFound,
StringCol,
)
@@ -26,6 +25,7 @@
SQLBase,
sqlvalues,
)
+from canonical.launchpad.interfaces.lpstorm import IStore
from canonical.launchpad.webapp.vocabulary import (
BatchedCountableIterator,
NamedSQLObjectHugeVocabulary,
@@ -66,14 +66,16 @@
def findByName(self, name):
"""Find binarypackagenames by its name or part of it."""
- return BinaryPackageName.select(
- CONTAINSSTRING(BinaryPackageName.q.name, name))
+ return IStore(BinaryPackageName).find(
+ BinaryPackageName,
+ BinaryPackageName.name.contains_string(unicode(name)))
def queryByName(self, name):
- return BinaryPackageName.selectOneBy(name=name)
+ return IStore(BinaryPackageName).find(
+ BinaryPackageName, name=unicode(name)).one()
def new(self, name):
- return BinaryPackageName(name=name)
+ return BinaryPackageName(name=unicode(name))
def ensure(self, name):
"""Ensure that the given BinaryPackageName exists, creating it
@@ -81,6 +83,7 @@
Returns the BinaryPackageName
"""
+ name = unicode(name)
try:
return self[name]
except NotFoundError:
=== modified file 'lib/lp/soyuz/model/distroarchseriesbinarypackage.py'
--- lib/lp/soyuz/model/distroarchseriesbinarypackage.py 2010-10-03 15:30:06 +0000
+++ lib/lp/soyuz/model/distroarchseriesbinarypackage.py 2010-11-07 00:47:50 +0000
@@ -11,10 +11,7 @@
'DistroArchSeriesBinaryPackage',
]
-from storm.locals import (
- Desc,
- In,
- )
+from storm.locals import Desc
from zope.interface import implements
from canonical.database.sqlbase import sqlvalues
@@ -180,7 +177,7 @@
orderBy='-datecreated',
limit=1,
distinct=True,
- clauseTables=['BinaryPackagePublishingHistory',])
+ clauseTables=['BinaryPackagePublishingHistory'])
# Listify to limit the SQL queries to one only.
results = list(releases)
@@ -198,12 +195,10 @@
BinaryPackageRelease.binarypackagename == self.binarypackagename,
BinaryPackagePublishingHistory.distroarchseries ==
self.distroarchseries,
- In(
- BinaryPackagePublishingHistory.archiveID,
+ BinaryPackagePublishingHistory.archiveID.is_in(
self.distribution.all_distro_archive_ids),
BinaryPackagePublishingHistory.binarypackagereleaseID ==
- BinaryPackageRelease.id
- ).config(distinct=True).order_by(
+ BinaryPackageRelease.id).config(distinct=True).order_by(
Desc(BinaryPackagePublishingHistory.datecreated))
@property
@@ -246,4 +241,3 @@
return None
else:
return src_pkg_release.sourcepackage
-
=== modified file 'lib/lp/soyuz/model/packagediff.py'
--- lib/lp/soyuz/model/packagediff.py 2010-08-24 12:05:25 +0000
+++ lib/lp/soyuz/model/packagediff.py 2010-11-07 00:47:50 +0000
@@ -14,10 +14,7 @@
import tempfile
from sqlobject import ForeignKey
-from storm.expr import (
- Desc,
- In,
- )
+from storm.expr import Desc
from storm.store import EmptyResultSet
from zope.component import getUtility
from zope.interface import implements
@@ -135,7 +132,8 @@
ancestry_identifier = "%s (in %s)" % (
self.from_source.version,
ancestry_archive.distribution.name.capitalize())
- return 'diff from %s to %s' % (ancestry_identifier, self.to_source.version)
+ return 'diff from %s to %s' % (
+ ancestry_identifier, self.to_source.version)
@property
def private(self):
@@ -280,7 +278,8 @@
return EmptyResultSet()
store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
spr_ids = [spr.id for spr in sprs]
- result = store.find(PackageDiff, In(PackageDiff.to_sourceID, spr_ids))
+ result = store.find(
+ PackageDiff, PackageDiff.to_sourceID.is_in(spr_ids))
result.order_by(PackageDiff.to_sourceID,
Desc(PackageDiff.date_requested))
return result
=== modified file 'lib/lp/soyuz/model/packageset.py'
--- lib/lp/soyuz/model/packageset.py 2010-08-20 20:31:18 +0000
+++ lib/lp/soyuz/model/packageset.py 2010-11-07 00:47:50 +0000
@@ -6,10 +6,7 @@
import pytz
from storm.exceptions import IntegrityError
-from storm.expr import (
- In,
- SQL,
- )
+from storm.expr import SQL
from storm.locals import (
DateTime,
Int,
@@ -164,7 +161,7 @@
store = IStore(Packageset)
source_names = SQL(source_name_query, (self.id,))
result_set = store.find(
- SourcePackageName, In(SourcePackageName.id, source_names))
+ SourcePackageName, SourcePackageName.id.is_in(source_names))
return _order_result_set(result_set)
def getSourcesIncluded(self, direct_inclusion=False):
@@ -191,7 +188,7 @@
params = (self.id,)
store = IStore(Packageset)
predecessors = SQL(query, params)
- result_set = store.find(Packageset, In(Packageset.id, predecessors))
+ result_set = store.find(Packageset, Packageset.id.is_in(predecessors))
return _order_result_set(result_set)
def setsIncluded(self, direct_inclusion=False):
@@ -213,7 +210,7 @@
params = (self.id,)
store = IStore(Packageset)
successors = SQL(query, params)
- result_set = store.find(Packageset, In(Packageset.id, successors))
+ result_set = store.find(Packageset, Packageset.id.is_in(successors))
return _order_result_set(result_set)
def sourcesSharedBy(self, other_package_set, direct_inclusion=False):
@@ -240,7 +237,7 @@
store = IStore(Packageset)
source_names = SQL(query, (self.id, other_package_set.id))
result_set = store.find(
- SourcePackageName, In(SourcePackageName.id, source_names))
+ SourcePackageName, SourcePackageName.id.is_in(source_names))
return _order_result_set(result_set)
def getSourcesSharedBy(self, other_package_set, direct_inclusion=False):
@@ -253,13 +250,13 @@
if direct_inclusion == False:
query = '''
SELECT pss_this.sourcepackagename
- FROM packagesetsources pss_this,
+ FROM packagesetsources pss_this,
flatpackagesetinclusion fpsi_this
WHERE pss_this.packageset = fpsi_this.child
AND fpsi_this.parent = ?
EXCEPT
SELECT pss_other.sourcepackagename
- FROM packagesetsources pss_other,
+ FROM packagesetsources pss_other,
flatpackagesetinclusion fpsi_other
WHERE pss_other.packageset = fpsi_other.child
AND fpsi_other.parent = ?
@@ -276,7 +273,7 @@
store = IStore(Packageset)
source_names = SQL(query, (self.id, other_package_set.id))
result_set = store.find(
- SourcePackageName, In(SourcePackageName.id, source_names))
+ SourcePackageName, SourcePackageName.id.is_in(source_names))
return _order_result_set(result_set)
def getSourcesNotSharedBy(
@@ -295,25 +292,27 @@
def addSources(self, names):
"""See `IPackageset`."""
- clauses = (SourcePackageName, In(SourcePackageName.name, names))
+ if isinstance(names, basestring):
+ names = [unicode(names)]
+ clauses = (SourcePackageName, SourcePackageName.name.is_in(names))
self._api_add_or_remove(clauses, self._addSourcePackageNames)
def removeSources(self, names):
"""See `IPackageset`."""
- clauses = (SourcePackageName, In(SourcePackageName.name, names))
+ clauses = (SourcePackageName, SourcePackageName.name.is_in(names))
self._api_add_or_remove(clauses, self._removeSourcePackageNames)
def addSubsets(self, names):
"""See `IPackageset`."""
clauses = (
- Packageset, In(Packageset.name, names),
+ Packageset, Packageset.name.is_in(names),
Packageset.distroseries == self.distroseries)
self._api_add_or_remove(clauses, self._addDirectSuccessors)
def removeSubsets(self, names):
"""See `IPackageset`."""
clauses = (
- Packageset, In(Packageset.name, names),
+ Packageset, Packageset.name.is_in(names),
Packageset.distroseries == self.distroseries)
self._api_add_or_remove(clauses, self._removeDirectSuccessors)
@@ -381,7 +380,7 @@
if not isinstance(name, unicode):
name = unicode(name, 'utf-8')
- ubuntu = getUtility(IDistributionSet).getByName('ubuntu')
+ ubuntu = getUtility(IDistributionSet).getByName(u'ubuntu')
extra_args = []
if distroseries is not None:
# If the user just passed a distro series name, look it up.
@@ -439,7 +438,7 @@
'''
store = IStore(Packageset)
psets = SQL(query, (sourcepackagename.id,))
- clauses = [In(Packageset.id, psets)]
+ clauses = [Packageset.id.is_in(psets)]
if distroseries:
clauses.append(Packageset.distroseries == distroseries)
=== modified file 'lib/lp/soyuz/model/publishing.py'
--- lib/lp/soyuz/model/publishing.py 2010-11-03 06:04:16 +0000
+++ lib/lp/soyuz/model/publishing.py 2010-11-07 00:47:50 +0000
@@ -30,7 +30,6 @@
)
from storm.expr import (
Desc,
- In,
LeftJoin,
Sum,
)
@@ -115,6 +114,7 @@
# XXX cprov 2006-08-18: move it away, perhaps archivepublisher/pool.py
+
def makePoolPath(source_name, component_name):
"""Return the pool path for a given source name and component name."""
from lp.archivepublisher.diskpool import poolify
@@ -136,7 +136,8 @@
sha1 = filealias.content.sha1
path = diskpool.pathFor(component, source, filename)
- action = diskpool.addFile(component, source, filename, sha1, filealias)
+ action = diskpool.addFile(
+ component, source, filename, sha1, filealias)
if action == diskpool.results.FILE_ADDED:
log.debug("Added %s from library" % path)
elif action == diskpool.results.SYMLINK_ADDED:
@@ -294,7 +295,7 @@
for pub_file in self.files:
pub_file.publish(diskpool, log)
except PoolFileOverwriteError, e:
- message = "PoolFileOverwriteError: %s, skipping." % e
+ message = "PoolFileOverwriteError: %s, skipping." % e
properties = [('error-explanation', message)]
request = ScriptRequest(properties)
error_utility = ErrorReportingUtility()
@@ -636,22 +637,19 @@
def meta_sourcepackage(self):
"""see `ISourcePackagePublishingHistory`."""
return self.distroseries.getSourcePackage(
- self.sourcepackagerelease.sourcepackagename
- )
+ self.sourcepackagerelease.sourcepackagename)
@property
def meta_sourcepackagerelease(self):
"""see `ISourcePackagePublishingHistory`."""
return self.distroseries.distribution.getSourcePackageRelease(
- self.sourcepackagerelease
- )
+ self.sourcepackagerelease)
@property
def meta_distroseriessourcepackagerelease(self):
"""see `ISourcePackagePublishingHistory`."""
return self.distroseries.getSourcePackageRelease(
- self.sourcepackagerelease
- )
+ self.sourcepackagerelease)
@property
def meta_supersededby(self):
@@ -659,8 +657,7 @@
if not self.supersededby:
return None
return self.distroseries.distribution.getSourcePackageRelease(
- self.supersededby
- )
+ self.supersededby)
@property
def source_package_name(self):
@@ -779,8 +776,7 @@
distroseries,
self.component,
self.section,
- pocket
- )
+ pocket)
def getStatusSummaryForBuilds(self):
"""See `ISourcePackagePublishingHistory`."""
@@ -1007,7 +1003,8 @@
by new overrides from superseding itself.
"""
available_architectures = [
- das.id for das in self.distroarchseries.distroseries.architectures]
+ das.id for das in
+ self.distroarchseries.distroseries.architectures]
return IMasterStore(BinaryPackagePublishingHistory).find(
BinaryPackagePublishingHistory,
BinaryPackagePublishingHistory.status.is_in(
@@ -1371,8 +1368,7 @@
DistroArchSeries.distroseriesID,
SourcePackagePublishingHistory.sourcepackagereleaseID ==
BinaryPackageBuild.source_package_release_id,
- In(SourcePackagePublishingHistory.id, source_publication_ids)
- )
+ SourcePackagePublishingHistory.id.is_in(source_publication_ids))
# First, we'll find the builds that were built in the same
# archive context as the published sources.
@@ -1487,16 +1483,14 @@
SourcePackagePublishingHistory.pocket,
BinaryPackagePublishingHistory.archiveID ==
SourcePackagePublishingHistory.archiveID,
- In(SourcePackagePublishingHistory.id, source_publication_ids)
- ]
+ SourcePackagePublishingHistory.id.is_in(source_publication_ids)]
# If the call-site requested to join only on binaries published
# with an active publishing status then we need to further restrict
# the join.
if active_binaries_only:
- join.append(
- In(BinaryPackagePublishingHistory.status,
- [enum.value for enum in active_publishing_status]))
+ join.append(BinaryPackagePublishingHistory.status.is_in(
+ active_publishing_status))
return join
@@ -1520,11 +1514,9 @@
one_or_more_source_publications)
store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
- published_builds = store.find((
- SourcePackagePublishingHistory,
- BinaryPackageBuild,
- DistroArchSeries
- ),
+ published_builds = store.find(
+ (SourcePackagePublishingHistory, BinaryPackageBuild,
+ DistroArchSeries),
self._getSourceBinaryJoinForSources(
source_publication_ids, active_binaries_only=False),
BinaryPackagePublishingHistory.datepublished != None,
@@ -1568,7 +1560,7 @@
BinaryPackageRelease.id,
BinaryPackagePublishingHistory.archiveID ==
SourcePackagePublishingHistory.archiveID,
- In(SourcePackagePublishingHistory.id, source_publication_ids))
+ SourcePackagePublishingHistory.id.is_in(source_publication_ids))
return binary_result.order_by(LibraryFileAlias.id)
@@ -1585,7 +1577,7 @@
LibraryFileAlias.id == SourcePackageReleaseFile.libraryfileID,
SourcePackageReleaseFile.sourcepackagerelease ==
SourcePackagePublishingHistory.sourcepackagereleaseID,
- In(SourcePackagePublishingHistory.id, source_publication_ids))
+ SourcePackagePublishingHistory.id.is_in(source_publication_ids))
binary_result = self.getBinaryFilesForSources(
one_or_more_source_publications)
@@ -1639,7 +1631,7 @@
LibraryFileAlias, LibraryFileContent),
SourcePackagePublishingHistory.sourcepackagereleaseID ==
PackageDiff.to_sourceID,
- In(SourcePackagePublishingHistory.id, source_publication_ids))
+ SourcePackagePublishingHistory.id.is_in(source_publication_ids))
result_set.order_by(
SourcePackagePublishingHistory.id,
@@ -1677,7 +1669,7 @@
SourcePackageRelease.id,
SourcePackageRelease.id ==
SourcePackagePublishingHistory.sourcepackagereleaseID,
- In(SourcePackagePublishingHistory.id, source_publication_ids))
+ SourcePackagePublishingHistory.id.is_in(source_publication_ids))
result_set.order_by(SourcePackagePublishingHistory.id)
return result_set
@@ -1750,8 +1742,7 @@
if unpublished_builds:
augmented_summary = {
'status': BuildSetStatus.FULLYBUILT_PENDING,
- 'builds': unpublished_builds
- }
+ 'builds': unpublished_builds}
return augmented_summary
def requestDeletion(self, sources, removed_by, removal_comment=None):
=== modified file 'lib/lp/soyuz/model/queue.py'
--- lib/lp/soyuz/model/queue.py 2010-10-22 04:12:39 +0000
+++ lib/lp/soyuz/model/queue.py 2010-11-07 00:47:50 +0000
@@ -26,7 +26,6 @@
)
from storm.locals import (
Desc,
- In,
Join,
)
from storm.store import Store
@@ -103,7 +102,6 @@
# of the archivepublisher which cause circular import errors if they
# are placed here.
-
def debug(logger, msg):
"""Shorthand debug notation for publish() methods."""
if logger is not None:
@@ -675,23 +673,20 @@
"""See `IPackageUpload`."""
return PackageUploadSource(
packageupload=self,
- sourcepackagerelease=spr.id
- )
+ sourcepackagerelease=spr.id)
def addBuild(self, build):
"""See `IPackageUpload`."""
return PackageUploadBuild(
packageupload=self,
- build=build.id
- )
+ build=build.id)
def addCustom(self, library_file, custom_type):
"""See `IPackageUpload`."""
return PackageUploadCustom(
packageupload=self,
libraryfilealias=library_file.id,
- customformat=custom_type
- )
+ customformat=custom_type)
def isPPA(self):
"""See `IPackageUpload`."""
@@ -767,12 +762,12 @@
# uploads.
for build in self.builds:
for bpr in build.build.binarypackages:
- files.extend(
- [(bpf.libraryfile.filename,'','') for bpf in bpr.files])
+ files.extend([
+ (bpf.libraryfile.filename, '', '') for bpf in bpr.files])
if self.customfiles:
files.extend(
- [(file.libraryfilealias.filename,'','')
+ [(file.libraryfilealias.filename, '', '')
for file in self.customfiles])
return files
@@ -1115,7 +1110,7 @@
# There can be no recipients if none of the emails are registered
# in LP.
if not recipients:
- debug(self.logger,"No recipients on email, not sending.")
+ debug(self.logger, "No recipients on email, not sending.")
return
# Make the content of the actual changes file available to the
@@ -1227,7 +1222,7 @@
:attach_changes: A flag governing whether the original changesfile
content shall be attached to the email.
"""
- extra_headers = { 'X-Katie' : 'Launchpad actually' }
+ extra_headers = {'X-Katie': 'Launchpad actually'}
# XXX cprov 20071212: ideally we only need to check archive.purpose,
# however the current code in uploadprocessor.py (around line 259)
@@ -1403,8 +1398,7 @@
packageupload = ForeignKey(
dbName='packageupload',
- foreignKey='PackageUpload'
- )
+ foreignKey='PackageUpload')
build = ForeignKey(dbName='build', foreignKey='BinaryPackageBuild')
@@ -1440,6 +1434,7 @@
# At this point (uploads are already processed) sections are
# guaranteed to exist in the DB. We don't care if sections are
# not official.
+ pass
def publish(self, logger=None):
"""See `IPackageUploadBuild`."""
@@ -1499,8 +1494,7 @@
component=binary.component,
section=binary.section,
priority=binary.priority,
- pocket=self.packageupload.pocket
- )
+ pocket=self.packageupload.pocket)
published_binaries.append(bpph)
return published_binaries
@@ -1514,13 +1508,11 @@
packageupload = ForeignKey(
dbName='packageupload',
- foreignKey='PackageUpload'
- )
+ foreignKey='PackageUpload')
sourcepackagerelease = ForeignKey(
dbName='sourcepackagerelease',
- foreignKey='SourcePackageRelease'
- )
+ foreignKey='SourcePackageRelease')
def getSourceAncestry(self):
"""See `IPackageUploadSource`."""
@@ -1627,6 +1619,7 @@
# At this point (uploads are already processed) sections are
# guaranteed to exist in the DB. We don't care if sections are
# not official.
+ pass
def publish(self, logger=None):
"""See `IPackageUploadSource`."""
@@ -1644,8 +1637,7 @@
distroseries=self.packageupload.distroseries,
component=self.sourcepackagerelease.component,
section=self.sourcepackagerelease.section,
- pocket=self.packageupload.pocket
- )
+ pocket=self.packageupload.pocket)
class PackageUploadCustom(SQLBase):
@@ -1656,8 +1648,7 @@
packageupload = ForeignKey(
dbName='packageupload',
- foreignKey='PackageUpload'
- )
+ foreignKey='PackageUpload')
customformat = EnumCol(dbName='customformat', unique=False,
notNull=True, schema=PackageUploadCustomFormat)
@@ -1903,11 +1894,11 @@
# method can be removed and call sites updated to use this one.
store = Store.of(distroseries)
- def dbitem_values_tuple(item_or_list):
+ def dbitem_tuple(item_or_list):
if not isinstance(item_or_list, list):
- return (item_or_list.value,)
+ return (item_or_list,)
else:
- return tuple(item.value for item in item_or_list)
+ return tuple(item_or_list)
timestamp_query_clause = ()
if created_since_date is not None:
@@ -1916,34 +1907,31 @@
status_query_clause = ()
if status is not None:
- status = dbitem_values_tuple(status)
- status_query_clause = (
- In(PackageUpload.status, status),)
+ status = dbitem_tuple(status)
+ status_query_clause = (PackageUpload.status.is_in(status),)
archives = distroseries.distribution.getArchiveIDList(archive)
- archive_query_clause = (
- In(PackageUpload.archiveID, archives),)
+ archive_query_clause = (PackageUpload.archiveID.is_in(archives),)
pocket_query_clause = ()
if pocket is not None:
- pocket = dbitem_values_tuple(pocket)
- pocket_query_clause = (
- In(PackageUpload.pocket, pocket),)
+ pocket = dbitem_tuple(pocket)
+ pocket_query_clause = (PackageUpload.pocket.is_in(pocket),)
custom_type_query_clause = ()
if custom_type is not None:
- custom_type = dbitem_values_tuple(custom_type)
+ custom_type = dbitem_tuple(custom_type)
custom_type_query_clause = (
PackageUpload.id == PackageUploadCustom.packageuploadID,
- In(PackageUploadCustom.customformat, custom_type))
+ PackageUploadCustom.customformat.is_in(custom_type))
return store.find(
PackageUpload,
PackageUpload.distroseries == distroseries,
*(status_query_clause + archive_query_clause +
pocket_query_clause + timestamp_query_clause +
- custom_type_query_clause)
- ).order_by(Desc(PackageUpload.id)).config(distinct=True)
+ custom_type_query_clause)).order_by(
+ Desc(PackageUpload.id)).config(distinct=True)
def getBuildByBuildIDs(self, build_ids):
"""See `IPackageUploadSet`."""
=== modified file 'lib/lp/soyuz/scripts/initialise_distroseries.py'
--- lib/lp/soyuz/scripts/initialise_distroseries.py 2010-10-18 04:19:10 +0000
+++ lib/lp/soyuz/scripts/initialise_distroseries.py 2010-11-07 00:47:50 +0000
@@ -65,7 +65,7 @@
self.distroseries = distroseries
self.parent = self.distroseries.parent_series
self.arches = arches
- self.packagesets = packagesets
+ self.packagesets = [unicode(packageset) for packageset in packagesets]
self.rebuild = rebuild
self._store = IMasterStore(DistroSeries)
=== modified file 'lib/lp/translations/model/pofile.py'
--- lib/lp/translations/model/pofile.py 2010-10-29 10:17:14 +0000
+++ lib/lp/translations/model/pofile.py 2010-11-07 00:47:50 +0000
@@ -27,7 +27,6 @@
And,
Coalesce,
Exists,
- In,
Join,
LeftJoin,
Not,
@@ -1572,7 +1571,7 @@
TranslationTemplateItem.potemplateID == POFile.potemplateID,
POTMsgSet.id == TranslationTemplateItem.potmsgsetID,
POTMsgSet.msgid_singular == POMsgID.id,
- In(POMsgID.msgid, POTMsgSet.credits_message_ids)]
+ POMsgID.msgid.is_in(POTMsgSet.credits_message_ids)]
if untranslated:
message_select = Select(
True,
=== modified file 'lib/lp/translations/model/potemplate.py'
--- lib/lp/translations/model/potemplate.py 2010-10-29 05:58:51 +0000
+++ lib/lp/translations/model/potemplate.py 2010-11-07 00:47:50 +0000
@@ -34,7 +34,6 @@
And,
Count,
Desc,
- In,
Join,
LeftJoin,
Or,
@@ -467,7 +466,7 @@
result = store.using(POTMsgSet, origin1, origin2).find(
POTMsgSet,
TranslationTemplateItem.potemplate == self,
- In(POMsgID.msgid, POTMsgSet.credits_message_ids))
+ POMsgID.msgid.is_in(POTMsgSet.credits_message_ids))
# Filter these candidates because is_translation_credit checks for
# more conditions than the special msgids.
for potmsgset in result:
=== modified file 'lib/lp/translations/model/translationimportqueue.py'
--- lib/lp/translations/model/translationimportqueue.py 2010-10-29 10:09:04 +0000
+++ lib/lp/translations/model/translationimportqueue.py 2010-11-07 00:47:50 +0000
@@ -28,7 +28,6 @@
)
from storm.expr import (
And,
- Like,
Or,
)
from storm.locals import (
@@ -1363,7 +1362,7 @@
deletion_clauses.append(And(
TranslationImportQueueEntry.distroseries_id != None,
TranslationImportQueueEntry.date_status_changed < blocked_cutoff,
- Like(TranslationImportQueueEntry.path, '%.po')))
+ TranslationImportQueueEntry.path.like(u'%.po')))
entries = store.find(
TranslationImportQueueEntry, Or(*deletion_clauses))
=== modified file 'lib/lp/translations/scripts/migrate_variants.py'
--- lib/lp/translations/scripts/migrate_variants.py 2010-08-20 20:31:18 +0000
+++ lib/lp/translations/scripts/migrate_variants.py 2010-11-07 00:47:50 +0000
@@ -8,7 +8,6 @@
import logging
-from storm.expr import In
from zope.component import getUtility
from zope.interface import implements
@@ -70,15 +69,16 @@
self.start_at = None
else:
if self.title == 'TranslationMessage':
- results = self.store.find(TranslationMessage,
- In(TranslationMessage.id, object_ids))
- results.set(TranslationMessage.language==self.language,
- variant=None)
+ results = self.store.find(
+ TranslationMessage,
+ TranslationMessage.id.is_in(object_ids))
+ results.set(
+ TranslationMessage.language==self.language, variant=None)
else:
- results = self.store.find(POFile,
- In(POFile.id, object_ids))
- results.set(POFile.language==self.language,
- variant=None)
+ results = self.store.find(
+ POFile, POFile.id.is_in(object_ids))
+ results.set(
+ POFile.language==self.language, variant=None)
self.transaction.commit()
self.transaction.begin()
=== modified file 'setup.py'
--- setup.py 2010-10-26 03:50:55 +0000
+++ setup.py 2010-11-07 00:47:50 +0000
@@ -55,6 +55,7 @@
'numpy',
'oauth',
'paramiko',
+ 'psycopg2',
'python-memcached',
'pyasn1',
'pydkim',
=== modified file 'versions.cfg'
--- versions.cfg 2010-11-04 00:51:34 +0000
+++ versions.cfg 2010-11-07 00:47:50 +0000
@@ -49,6 +49,7 @@
paramiko = 1.7.4
Paste = 1.7.2
PasteDeploy = 1.3.3
+psycopg2 = 2.2.2
pyasn1 = 0.0.9a
pycrypto = 2.0.1
pydkim = 0.3-mbp-r7
Follow ups