zeitgeist team mailing list archive
-
zeitgeist team
-
Mailing list archive
-
Message #02527
[Merge] lp:~seif/zeitgeist/add_cache_for_get_events into lp:zeitgeist
Seif Lotfy has proposed merging lp:~seif/zeitgeist/add_cache_for_get_events into lp:zeitgeist.
Requested reviews:
Zeitgeist Framework Team (zeitgeist)
I added a cache to reduce the calls on get_events
basically i look at separate the cached ids from the ids handed to get_events
then i call the sql db to give me all uncached events then i sort them all and voila
the speed improvement is very nice
old DEBUG:zeitgeist.engine:Got 34 events in 0.001895s
new DEBUG:zeitgeist.engine:Got 34 events in 0.000248s
--
https://code.launchpad.net/~seif/zeitgeist/add_cache_for_get_events/+merge/42327
Your team Zeitgeist Framework Team is requested to review the proposed merge of lp:~seif/zeitgeist/add_cache_for_get_events into lp:zeitgeist.
=== modified file '_zeitgeist/engine/main.py'
--- _zeitgeist/engine/main.py 2010-11-29 09:09:13 +0000
+++ _zeitgeist/engine/main.py 2010-12-01 00:31:58 +0000
@@ -118,6 +118,7 @@
self._manifestation = TableLookup(cursor, "manifestation")
self._mimetype = TableLookup(cursor, "mimetype")
self._actor = TableLookup(cursor, "actor")
+ self._event_cache = {}
@property
def extensions(self):
@@ -166,10 +167,14 @@
if not ids:
return []
- rows = self._cursor.execute("""
- SELECT * FROM event_view
- WHERE id IN (%s)
- """ % ",".join("%d" % id for id in ids)).fetchall()
+ # Split ids into cached and uncached
+ uncached_ids = []
+ cached_ids = []
+ for id in ids:
+ if self._event_cache.has_key(id):
+ cached_ids.append(id)
+ else:
+ uncached_ids.append(id)
id_hash = defaultdict(list)
for n, id in enumerate(ids):
@@ -183,6 +188,22 @@
# deleted
events = {}
sorted_events = [None]*len(ids)
+
+ for id in cached_ids:
+ event = self._event_cache[id]
+ if event:
+ event = self.extensions.apply_get_hooks(event, sender)
+ if event is not None:
+ for n in id_hash[event.id]:
+ # insert the event into all necessary spots (LP: #673916)
+ sorted_events[n] = event
+
+ # Get uncached events
+ rows = self._cursor.execute("""
+ SELECT * FROM event_view
+ WHERE id IN (%s)
+ """ % ",".join("%d" % id for id in uncached_ids)).fetchall()
+
for row in rows:
# Assumption: all rows of a same event for its different
# subjects are in consecutive order.
@@ -194,12 +215,14 @@
events[event.id] = event
else:
event = events[event.id]
+ self._event_cache[event.id] = event
event.append_subject(self._get_subject_from_row(row))
event = self.extensions.apply_get_hooks(event, sender)
if event is not None:
for n in id_hash[event.id]:
# insert the event into all necessary spots (LP: #673916)
sorted_events[n] = event
+
log.debug("Got %d events in %fs" % (len(sorted_events), time.time()-t))
return sorted_events
@@ -603,6 +626,11 @@
""" % ",".join(str(int(_id)) for _id in ids))
timestamps = self._cursor.fetchone()
+ # Remove events from cache
+ for id in ids:
+ if self._event_cache.has_key(id):
+ del self._event_cache[id]
+
# Make sure that we actually found some events with these ids...
# We can't do all(timestamps) here because the timestamps may be 0
if timestamps and timestamps[0] is not None and timestamps[1] is not None:
@@ -612,7 +640,6 @@
log.debug("Deleted %s" % map(int, ids))
self.extensions.apply_post_delete(ids, sender)
-
return timestamps
else:
log.debug("Tried to delete non-existing event(s): %s" % map(int, ids))