launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #22242
[Merge] lp:~cjwatson/python-oops-datedir-repo/py3 into lp:python-oops-datedir-repo
Colin Watson has proposed merging lp:~cjwatson/python-oops-datedir-repo/py3 into lp:python-oops-datedir-repo.
Commit message:
Add Python 3 support.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~cjwatson/python-oops-datedir-repo/py3/+merge/341299
I had to take quite a bit of care around serialisation (unsurprisingly).
--
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~cjwatson/python-oops-datedir-repo/py3 into lp:python-oops-datedir-repo.
=== modified file '.bzrignore'
--- .bzrignore 2011-12-20 05:25:36 +0000
+++ .bzrignore 2018-03-12 12:11:36 +0000
@@ -1,3 +1,4 @@
+__pycache__
./eggs/*
./.installed.cfg
./develop-eggs
=== modified file 'NEWS'
--- NEWS 2015-12-01 15:20:19 +0000
+++ NEWS 2018-03-12 12:11:36 +0000
@@ -6,6 +6,9 @@
Next
----
+* Fix test failure with recent versions of bson. (Colin Watson)
+* Add Python 3 support. (Colin Watson)
+
0.0.23
------
=== modified file 'oops_datedir_repo/__init__.py'
--- oops_datedir_repo/__init__.py 2015-12-01 15:20:19 +0000
+++ oops_datedir_repo/__init__.py 2018-03-12 12:11:36 +0000
@@ -14,6 +14,8 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# GNU Lesser General Public License version 3 (see the file LICENSE).
+from __future__ import absolute_import, print_function
+
# same format as sys.version_info: "A tuple containing the five components of
# the version number: major, minor, micro, releaselevel, and serial. All
# values except releaselevel are integers; the release level is 'alpha',
=== modified file 'oops_datedir_repo/anybson.py'
--- oops_datedir_repo/anybson.py 2012-02-10 19:24:56 +0000
+++ oops_datedir_repo/anybson.py 2018-03-12 12:11:36 +0000
@@ -13,6 +13,8 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# GNU Lesser General Public License version 3 (see the file LICENSE).
+from __future__ import absolute_import, print_function
+
__all__ = [
'dumps',
'loads',
=== modified file 'oops_datedir_repo/bsondump.py'
--- oops_datedir_repo/bsondump.py 2012-02-10 19:24:56 +0000
+++ oops_datedir_repo/bsondump.py 2018-03-12 12:11:36 +0000
@@ -22,10 +22,12 @@
usage: bsondump FILE
"""
+from __future__ import absolute_import, print_function
+
from pprint import pprint
import sys
-import anybson as bson
+from oops_datedir_repo import anybson as bson
def main(argv=None):
=== modified file 'oops_datedir_repo/prune.py'
--- oops_datedir_repo/prune.py 2012-09-26 06:57:23 +0000
+++ oops_datedir_repo/prune.py 2018-03-12 12:11:36 +0000
@@ -19,6 +19,8 @@
Currently only has support for the Launchpad bug tracker.
"""
+from __future__ import absolute_import, print_function
+
__metaclass__ = type
import datetime
=== modified file 'oops_datedir_repo/repository.py'
--- oops_datedir_repo/repository.py 2015-12-01 11:26:52 +0000
+++ oops_datedir_repo/repository.py 2018-03-12 12:11:36 +0000
@@ -16,6 +16,8 @@
"""The primary interface to oopses stored on disk - the DateDirRepo."""
+from __future__ import absolute_import, print_function
+
__metaclass__ = type
__all__ = [
@@ -31,9 +33,11 @@
from pytz import utc
-import anybson as bson
-import serializer
-import serializer_bson
+from oops_datedir_repo import (
+ anybson as bson,
+ serializer,
+ serializer_bson,
+ )
class DateDirRepo:
@@ -173,7 +177,7 @@
if prune:
os.unlink(candidate)
continue
- with file(candidate, 'rb') as report_file:
+ with open(candidate, 'rb') as report_file:
try:
report = serializer.read(report_file)
except IOError as e:
@@ -284,7 +288,7 @@
os.unlink(candidate)
deleted += 1
continue
- with file(candidate, 'rb') as report_file:
+ with open(candidate, 'rb') as report_file:
report = serializer.read(report_file)
report_time = report.get('time', None)
if (report_time is None or
=== modified file 'oops_datedir_repo/serializer.py'
--- oops_datedir_repo/serializer.py 2012-03-01 21:07:07 +0000
+++ oops_datedir_repo/serializer.py 2018-03-12 12:11:36 +0000
@@ -29,12 +29,14 @@
"""
+from __future__ import absolute_import, print_function
+
__all__ = [
'read',
]
import bz2
-from StringIO import StringIO
+from io import BytesIO
from oops_datedir_repo import (
anybson as bson,
@@ -46,7 +48,8 @@
def read(fp):
"""Deserialize an OOPS from a bson or rfc822 message.
- The whole file is read regardless of the OOPS format.
+ The whole file is read regardless of the OOPS format. It should be
+ opened in binary mode.
:raises IOError: If the file has no content.
"""
@@ -55,9 +58,9 @@
if len(content) == 0:
# This OOPS has no content
raise IOError("Empty OOPS Report")
- if content[0:3] == "BZh":
+ if content[0:3] == b"BZh":
content = bz2.decompress(content)
try:
- return serializer_bson.read(StringIO(content))
- except (KeyError, bson.InvalidBSON):
- return serializer_rfc822.read(StringIO(content))
+ return serializer_bson.read(BytesIO(content))
+ except (KeyError, ValueError, IndexError, bson.InvalidBSON):
+ return serializer_rfc822.read(BytesIO(content))
=== modified file 'oops_datedir_repo/serializer_bson.py'
--- oops_datedir_repo/serializer_bson.py 2012-02-10 19:24:56 +0000
+++ oops_datedir_repo/serializer_bson.py 2018-03-12 12:11:36 +0000
@@ -41,6 +41,8 @@
"""
+from __future__ import absolute_import, print_function
+
__all__ = [
'dumps',
'read',
@@ -49,7 +51,7 @@
__metaclass__ = type
-import anybson as bson
+from oops_datedir_repo import anybson as bson
def read(fp):
=== modified file 'oops_datedir_repo/serializer_rfc822.py'
--- oops_datedir_repo/serializer_rfc822.py 2011-11-16 03:44:36 +0000
+++ oops_datedir_repo/serializer_rfc822.py 2018-03-12 12:11:36 +0000
@@ -45,6 +45,8 @@
"""
+from __future__ import absolute_import, print_function
+
__all__ = [
'read',
'write',
@@ -52,45 +54,52 @@
__metaclass__ = type
-import datetime
+try:
+ from email.parser import BytesParser
+except ImportError:
+ # On Python 2, email.parser.Parser will do well enough, since
+ # bytes == str.
+ from email.parser import Parser as BytesParser
import logging
-import rfc822
import re
import urllib
import iso8601
+import six
+from six.moves import intern
+from six.moves.urllib_parse import (
+ quote,
+ unquote,
+ )
def read(fp):
"""Deserialize an OOPS from an RFC822 format message."""
- msg = rfc822.Message(fp)
- id = msg.getheader('oops-id')
- exc_type = msg.getheader('exception-type')
- exc_value = msg.getheader('exception-value')
- datestr = msg.getheader('date')
+ msg = BytesParser().parse(fp, headersonly=True)
+ id = msg.get('oops-id')
+ exc_type = msg.get('exception-type')
+ exc_value = msg.get('exception-value')
+ datestr = msg.get('date')
if datestr is not None:
- date = iso8601.parse_date(msg.getheader('date'))
+ date = iso8601.parse_date(msg.get('date'))
else:
date = None
- topic = msg.getheader('topic')
+ topic = msg.get('topic')
if topic is None:
- topic = msg.getheader('page-id')
- username = msg.getheader('user')
- url = msg.getheader('url')
+ topic = msg.get('page-id')
+ username = msg.get('user')
+ url = msg.get('url')
try:
- duration = float(msg.getheader('duration', '-1'))
+ duration = float(msg.get('duration', '-1'))
except ValueError:
duration = float(-1)
- informational = msg.getheader('informational')
- branch_nick = msg.getheader('branch')
- revno = msg.getheader('revision')
- reporter = msg.getheader('oops-reporter')
+ informational = msg.get('informational')
+ branch_nick = msg.get('branch')
+ revno = msg.get('revision')
+ reporter = msg.get('oops-reporter')
- # Explicitly use an iterator so we can process the file
- # sequentially. In most instances the iterator will actually
- # be the file object passed in because file objects should
- # support iteration.
- lines = iter(msg.fp)
+ # Explicitly use an iterator so we can process the file sequentially.
+ lines = iter(msg.get_payload().splitlines(True))
statement_pat = re.compile(r'^(\d+)-(\d+)(?:@([\w-]+))?\s+(.*)')
@@ -119,7 +128,7 @@
[int(start), int(end), db_id, statement])
elif is_req_var(line):
key, value = line.split('=', 1)
- req_vars.append([urllib.unquote(key), urllib.unquote(value)])
+ req_vars.append([unquote(key), unquote(value)])
elif is_traceback(line):
break
req_vars = dict(req_vars)
@@ -139,35 +148,39 @@
def _normalise_whitespace(s):
- """Normalise the whitespace in a string to spaces"""
+ """Normalise the whitespace in a bytestring to spaces."""
if s is None:
return None # (used by the cast to %s to get 'None')
- return ' '.join(s.split())
+ return b' '.join(s.split())
def _safestr(obj):
- if isinstance(obj, unicode):
+ if isinstance(obj, six.text_type):
return obj.replace('\\', '\\\\').encode('ASCII',
'backslashreplace')
# A call to str(obj) could raise anything at all.
# We'll ignore these errors, and print something
# useful instead, but also log the error.
# We disable the pylint warning for the blank except.
- try:
- value = str(obj)
- except:
- logging.getLogger('oops_datedir_repo.serializer_rfc822').exception(
- 'Error while getting a str '
- 'representation of an object')
- value = '<unprintable %s object>' % (
- str(type(obj).__name__))
- # Some str() calls return unicode objects.
- if isinstance(value, unicode):
- return _safestr(value)
+ if isinstance(obj, six.binary_type):
+ value = obj
+ else:
+ try:
+ value = str(obj)
+ except:
+ logging.getLogger('oops_datedir_repo.serializer_rfc822').exception(
+ 'Error while getting a str '
+ 'representation of an object')
+ value = '<unprintable %s object>' % (
+ str(type(obj).__name__))
+ # Some str() calls return unicode objects.
+ if isinstance(value, six.text_type):
+ return _safestr(value)
# encode non-ASCII characters
- value = value.replace('\\', '\\\\')
- value = re.sub(r'[\x80-\xff]',
- lambda match: '\\x%02x' % ord(match.group(0)), value)
+ value = value.replace(b'\\', b'\\\\')
+ value = re.sub(
+ br'[\x80-\xff]',
+ lambda match: ('\\x%02x' % ord(match.group(0))).encode('UTF-8'), value)
return value
@@ -179,12 +192,13 @@
return
value = _safestr(report[key])
value = _normalise_whitespace(value)
- chunks.append('%s: %s\n' % (label, value))
+ chunks.append(label.encode('UTF-8') + b': ' + value + b'\n')
header('Oops-Id', 'id', optional=False)
header('Exception-Type', 'type')
header('Exception-Value', 'value')
if 'time' in report:
- chunks.append('Date: %s\n' % report['time'].isoformat())
+ chunks.append(
+ ('Date: %s\n' % report['time'].isoformat()).encode('UTF-8'))
header('Page-Id', 'topic')
header('Branch', 'branch_nick')
header('Revision', 'revno')
@@ -193,7 +207,7 @@
header('Duration', 'duration')
header('Informational', 'informational')
header('Oops-Reporter', 'reporter')
- chunks.append('\n')
+ chunks.append(b'\n')
safe_chars = ';/\\?:@&+$, ()*!'
if 'req_vars' in report:
try:
@@ -201,17 +215,19 @@
except AttributeError:
items = report['req_vars']
for key, value in items:
- chunks.append('%s=%s\n' % (
- urllib.quote(_safestr(key), safe_chars),
- urllib.quote(_safestr(value), safe_chars)))
- chunks.append('\n')
+ chunk = '%s=%s\n' % (
+ quote(_safestr(key), safe_chars),
+ quote(_safestr(value), safe_chars))
+ chunks.append(chunk.encode('UTF-8'))
+ chunks.append(b'\n')
if 'timeline' in report:
for row in report['timeline']:
(start, end, category, statement) = row[:4]
- chunks.append('%05d-%05d@%s %s\n' % (
- start, end, _safestr(category),
- _safestr(_normalise_whitespace(statement))))
- chunks.append('\n')
+ chunks.append(
+ ('%05d-%05d@' % (start, end)).encode('UTF-8') +
+ _safestr(category) + b' ' +
+ _normalise_whitespace(_safestr(statement)) + b'\n')
+ chunks.append(b'\n')
if 'tb_text' in report:
chunks.append(_safestr(report['tb_text']))
return chunks
=== modified file 'oops_datedir_repo/tests/__init__.py'
--- oops_datedir_repo/tests/__init__.py 2012-09-26 06:35:34 +0000
+++ oops_datedir_repo/tests/__init__.py 2018-03-12 12:11:36 +0000
@@ -15,6 +15,8 @@
"""Tests for oops_datedir_repo."""
+from __future__ import absolute_import, print_function
+
from unittest import TestLoader
=== modified file 'oops_datedir_repo/tests/test_repository.py'
--- oops_datedir_repo/tests/test_repository.py 2015-12-01 12:00:09 +0000
+++ oops_datedir_repo/tests/test_repository.py 2018-03-12 12:11:36 +0000
@@ -15,6 +15,8 @@
"""Tests for the date-directory based repository."""
+from __future__ import absolute_import, print_function
+
__metaclass__ = type
import datetime
@@ -28,6 +30,7 @@
TempDir,
)
from pytz import utc
+import six
import testtools
from testtools.matchers import (
Equals,
@@ -76,7 +79,7 @@
def test_publish_permissions_hashnames(self):
repo = DateDirRepo(self.useFixture(TempDir()).path, stash_path=True)
report = {'id': 'OOPS-91T1'}
- now = datetime.datetime(2006, 04, 01, 00, 30, 00, tzinfo=utc)
+ now = datetime.datetime(2006, 4, 1, 0, 30, 0, tzinfo=utc)
# Set up default file creation mode to rwx------ as some restrictive
# servers do.
@@ -102,7 +105,7 @@
def test_publish_via_hash(self):
repo = DateDirRepo(self.useFixture(TempDir()).path)
- now = datetime.datetime(2006, 04, 01, 00, 30, 00, tzinfo=utc)
+ now = datetime.datetime(2006, 4, 1, 0, 30, 0, tzinfo=utc)
# Note the presence of 'id' in the report: this is included in the hash
# calculation (because there is no reason not to - we don't promise
# that reports only differing by id will be assigned the same id even
@@ -128,7 +131,7 @@
def test_multiple_hash_publications(self):
# The initial datedir hash code could only publish one oops a day.
repo = DateDirRepo(self.useFixture(TempDir()).path)
- now = datetime.datetime(2006, 04, 01, 00, 30, 00, tzinfo=utc)
+ now = datetime.datetime(2006, 4, 1, 0, 30, 0, tzinfo=utc)
report = {'time': now}
repo.publish(report, now)
report2 = {'time': now, 'foo': 'bar'}
@@ -138,7 +141,7 @@
# oops_amqp wants to publish to a DateDirRepo but already has an id
# that the user has been told about.
repo = DateDirRepo(self.useFixture(TempDir()).path, inherit_id=True)
- now = datetime.datetime(2006, 04, 01, 00, 30, 00, tzinfo=utc)
+ now = datetime.datetime(2006, 4, 1, 0, 30, 0, tzinfo=utc)
report = {'time': now, 'id': '45'}
self.assertEqual(['45'], repo.publish(dict(report), now))
# And to be sure, check the file on disk.
@@ -151,7 +154,7 @@
# The id reuse and file allocation strategies should be separate.
repo = DateDirRepo(self.useFixture(TempDir()).path,
inherit_id=True, stash_path=True)
- now = datetime.datetime(2006, 04, 01, 00, 30, 00, tzinfo=utc)
+ now = datetime.datetime(2006, 4, 1, 0, 30, 0, tzinfo=utc)
report = {'time': now, 'id': '45'}
published_report = dict(report)
self.assertEqual(['45'], repo.publish(published_report, now))
@@ -168,7 +171,7 @@
# too tightly bound to disk publishing.
repo = DateDirRepo(self.useFixture(TempDir()).path, stash_path=True,
inherit_id=True)
- now = datetime.datetime(2006, 04, 01, 00, 30, 00, tzinfo=utc)
+ now = datetime.datetime(2006, 4, 1, 0, 30, 0, tzinfo=utc)
report = {'time': now, 'id': '45'}
expected_disk_report = dict(report)
self.assertEqual(['45'], repo.publish(report, now))
@@ -183,7 +186,7 @@
# If an OOPS being republished is not republished, it is preserved on
# disk.
repo = DateDirRepo(self.useFixture(TempDir()).path)
- now = datetime.datetime(2006, 04, 01, 00, 30, 00, tzinfo=utc)
+ now = datetime.datetime(2006, 4, 1, 0, 30, 0, tzinfo=utc)
report = {'time': now}
repo.publish(report, now)
dir = repo.root + '/2006-04-01/'
@@ -254,7 +257,7 @@
# A .tmp file more than 24 hours old is probably never going to get
# renamed into place, so we just unlink it.
repo = DateDirRepo(self.useFixture(TempDir()).path)
- now = datetime.datetime(2006, 04, 01, 00, 30, 00, tzinfo=utc)
+ now = datetime.datetime(2006, 4, 1, 0, 30, 0, tzinfo=utc)
report = {'time': now}
repo.publish(report, now)
dir = repo.root + '/2006-04-01/'
@@ -274,7 +277,7 @@
# are unlikely to ever get fleshed out when more than 24 hours old,
# so we prune them.
repo = DateDirRepo(self.useFixture(TempDir()).path)
- now = datetime.datetime(2006, 04, 01, 00, 30, 00, tzinfo=utc)
+ now = datetime.datetime(2006, 4, 1, 0, 30, 0, tzinfo=utc)
report = {'time': now}
repo.publish(report, now)
dir = repo.root + '/2006-04-01/'
@@ -306,7 +309,7 @@
def test_get_config_value(self):
# Config values can be asked for from the repository.
repo = DateDirRepo(self.useFixture(TempDir()).path)
- pruned = datetime.datetime(2006, 04, 01, 00, 30, 00, tzinfo=utc)
+ pruned = datetime.datetime(2006, 4, 1, 0, 30, 0, tzinfo=utc)
repo.set_config('pruned-until', pruned)
# Fresh instance, no memory tricks.
repo = DateDirRepo(repo.root)
@@ -315,7 +318,7 @@
def test_set_config_value(self):
# Config values are just keys in a bson document.
repo = DateDirRepo(self.useFixture(TempDir()).path)
- pruned = datetime.datetime(2006, 04, 01, 00, 30, 00, tzinfo=utc)
+ pruned = datetime.datetime(2006, 4, 1, 0, 30, 0, tzinfo=utc)
repo.set_config('pruned-until', pruned)
with open(repo.root + '/metadata/config.bson', 'rb') as config_file:
from_bson = bson.loads(config_file.read())
@@ -345,14 +348,14 @@
def test_prune_unreferenced_no_oopses(self):
# This shouldn't crash.
repo = DateDirRepo(self.useFixture(TempDir()).path, inherit_id=True)
- now = datetime.datetime(2006, 04, 01, 00, 30, 00, tzinfo=utc)
+ now = datetime.datetime(2006, 4, 1, 0, 30, 0, tzinfo=utc)
old = now - datetime.timedelta(weeks=1)
repo.prune_unreferenced(old, now, [])
def test_prune_unreferenced_no_references(self):
# When there are no references, everything specified is zerged.
repo = DateDirRepo(self.useFixture(TempDir()).path, inherit_id=True)
- now = datetime.datetime(2006, 04, 01, 00, 30, 00, tzinfo=utc)
+ now = datetime.datetime(2006, 4, 1, 0, 30, 0, tzinfo=utc)
old = now - datetime.timedelta(weeks=1)
report = {'time': now - datetime.timedelta(hours=5)}
repo.publish(report, report['time'])
@@ -363,7 +366,7 @@
# Pruning only affects stuff in the datedirs selected by the dates.
repo = DateDirRepo(
self.useFixture(TempDir()).path, inherit_id=True, stash_path=True)
- now = datetime.datetime(2006, 04, 01, 00, 30, 00, tzinfo=utc)
+ now = datetime.datetime(2006, 4, 1, 0, 30, 0, tzinfo=utc)
old = now - datetime.timedelta(weeks=1)
before = {'time': old - datetime.timedelta(minutes=1)}
after = {'time': now + datetime.timedelta(minutes=1)}
@@ -376,7 +379,7 @@
def test_prune_referenced_inside_dates_kept(self):
repo = DateDirRepo(
self.useFixture(TempDir()).path, inherit_id=True, stash_path=True)
- now = datetime.datetime(2006, 04, 01, 00, 30, 00, tzinfo=utc)
+ now = datetime.datetime(2006, 4, 1, 0, 30, 0, tzinfo=utc)
old = now - datetime.timedelta(weeks=1)
report = {'id': 'foo', 'time': now - datetime.timedelta(minutes=1)}
repo.publish(report, report['time'])
@@ -387,7 +390,7 @@
# If a report has a wonky or missing time, pruning treats it as being
# timed on midnight of the datedir day it is on.
repo = DateDirRepo(self.useFixture(TempDir()).path, stash_path=True)
- now = datetime.datetime(2006, 04, 01, 00, 01, 00, tzinfo=utc)
+ now = datetime.datetime(2006, 4, 1, 0, 1, 0, tzinfo=utc)
old = now - datetime.timedelta(minutes=2)
badtime = {'time': now - datetime.timedelta(weeks=2)}
missingtime = {}
@@ -420,7 +423,11 @@
if name.startswith('OOPS-') and name.endswith('.tmp'):
oops_tmp.append(f)
return f
- self.useFixture(MonkeyPatch('__builtin__.open', open_intercept))
+ if six.PY3:
+ open_name = 'builtins.open'
+ else:
+ open_name = '__builtin__.open'
+ self.useFixture(MonkeyPatch(open_name, open_intercept))
repo = DateDirRepo(self.useFixture(TempDir()).path)
repo.publish({'id': '1'})
=== modified file 'oops_datedir_repo/tests/test_serializer.py'
--- oops_datedir_repo/tests/test_serializer.py 2012-03-01 20:42:48 +0000
+++ oops_datedir_repo/tests/test_serializer.py 2018-03-12 12:11:36 +0000
@@ -15,11 +15,13 @@
"""Tests for the generic serialization support."""
+from __future__ import absolute_import, print_function
+
__metaclass__ = type
import bz2
import datetime
-import StringIO
+from io import BytesIO
from pytz import utc
import testtools
@@ -57,23 +59,23 @@
expected_dict['revno'] = None
def test_read_detect_rfc822(self):
- source_file = StringIO.StringIO()
+ source_file = BytesIO()
write(dict(self.source_dict), source_file)
source_file.seek(0)
self.assertEqual(self.expected_dict, read(source_file))
def test_read_detect_bson(self):
- source_file = StringIO.StringIO()
+ source_file = BytesIO()
source_file.write(dumps(dict(self.source_dict)))
source_file.seek(0)
self.assertEqual(self.expected_dict, read(source_file))
def test_read_detect_bz2(self):
- source_file = StringIO.StringIO()
+ source_file = BytesIO()
source_file.write(bz2.compress(dumps(dict(self.source_dict))))
source_file.seek(0)
self.assertEqual(self.expected_dict, read(source_file))
def test_ioerror_on_empty_oops(self):
- source_file = StringIO.StringIO()
+ source_file = BytesIO()
self.assertRaises(IOError, read, source_file)
=== modified file 'oops_datedir_repo/tests/test_serializer_bson.py'
--- oops_datedir_repo/tests/test_serializer_bson.py 2012-02-10 19:24:56 +0000
+++ oops_datedir_repo/tests/test_serializer_bson.py 2018-03-12 12:11:36 +0000
@@ -16,10 +16,12 @@
"""Tests for bson based serialization."""
+from __future__ import absolute_import, print_function
+
__metaclass__ = type
import datetime
-import StringIO
+from io import BytesIO
from pytz import utc
import testtools
@@ -54,7 +56,7 @@
[5, 10, 'store_b', 'SELECT 2'],
]
}
- source_file = StringIO.StringIO(bson.dumps(source_dict))
+ source_file = BytesIO(bson.dumps(source_dict))
expected_dict = dict(source_dict)
# Unsupplied but filled on read
expected_dict['branch_nick'] = None
@@ -69,7 +71,7 @@
source_dict = {
'id': 'OOPS-A0001',
}
- source_file = StringIO.StringIO(bson.dumps(source_dict))
+ source_file = BytesIO(bson.dumps(source_dict))
report = read(source_file)
self.assertEqual(report['id'], 'OOPS-A0001')
self.assertEqual(report['type'], None)
@@ -93,7 +95,7 @@
'id': 'OOPS-A0001',
'type': 'NotFound',
'value': 'error message',
- 'time': datetime.datetime(2005, 04, 01, 00, 00, 00, tzinfo=utc),
+ 'time': datetime.datetime(2005, 4, 1, 0, 0, 0, tzinfo=utc),
'topic': 'IFoo:+foo-template',
'tb_text': 'traceback-text',
'username': 'Sample User',
=== modified file 'oops_datedir_repo/tests/test_serializer_rfc822.py'
--- oops_datedir_repo/tests/test_serializer_rfc822.py 2011-11-16 03:44:36 +0000
+++ oops_datedir_repo/tests/test_serializer_rfc822.py 2018-03-12 12:11:36 +0000
@@ -15,10 +15,12 @@
"""Tests for the legacy rfc822 based [de]serializer."""
+from __future__ import absolute_import, print_function
+
__metaclass__ = type
import datetime
-import StringIO
+from io import BytesIO
from textwrap import dedent
from pytz import utc
@@ -35,7 +37,7 @@
def test_read(self):
"""Test ErrorReport.read()."""
- fp = StringIO.StringIO(dedent("""\
+ fp = BytesIO(dedent("""\
Oops-Id: OOPS-A0001
Exception-Type: NotFound
Exception-Value: error message
@@ -52,7 +54,7 @@
00001-00005@store_a SELECT 1
00005-00010@store_b SELECT 2
- traceback-text"""))
+ traceback-text""").encode('UTF-8'))
report = read(fp)
self.assertEqual(report['id'], 'OOPS-A0001')
self.assertEqual(report['type'], 'NotFound')
@@ -76,7 +78,7 @@
def test_read_blankline_req_vars(self):
"""Test ErrorReport.read() for old logs with a blankline between
reqvars."""
- fp = StringIO.StringIO(dedent("""\
+ fp = BytesIO(dedent("""\
Oops-Id: OOPS-A0001
Exception-Type: NotFound
Exception-Value: error message
@@ -95,7 +97,7 @@
00005-00010@store_b SELECT 2
traceback-text
- foo/bar"""))
+ foo/bar""").encode('UTF-8'))
report = read(fp)
self.assertEqual(report['id'], 'OOPS-A0001')
self.assertEqual({
@@ -112,7 +114,7 @@
def test_read_no_store_id(self):
"""Test ErrorReport.read() for old logs with no store_id."""
- fp = StringIO.StringIO(dedent("""\
+ fp = BytesIO(dedent("""\
Oops-Id: OOPS-A0001
Exception-Type: NotFound
Exception-Value: error message
@@ -129,7 +131,7 @@
00001-00005 SELECT 1
00005-00010 SELECT 2
- traceback-text"""))
+ traceback-text""").encode('UTF-8'))
report = read(fp)
self.assertEqual(report['id'], 'OOPS-A0001')
self.assertEqual(report['type'], 'NotFound')
@@ -152,7 +154,7 @@
def test_read_branch_nick_revno(self):
"""Test ErrorReport.read()."""
- fp = StringIO.StringIO(dedent("""\
+ fp = BytesIO(dedent("""\
Oops-Id: OOPS-A0001
Exception-Type: NotFound
Exception-Value: error message
@@ -170,57 +172,57 @@
00001-00005@store_a SELECT 1
00005-00010@store_b SELECT 2
- traceback-text"""))
+ traceback-text""").encode('UTF-8'))
report = read(fp)
self.assertEqual(report['branch_nick'], 'mybranch')
self.assertEqual(report['revno'], '45')
def test_read_duration_as_string(self):
"""Test ErrorReport.read()."""
- fp = StringIO.StringIO(dedent("""\
+ fp = BytesIO(dedent("""\
Oops-Id: OOPS-A0001
Duration: foo/bar
- """))
+ """).encode('UTF-8'))
report = read(fp)
self.assertEqual(report['duration'], -1)
def test_read_reporter(self):
"""Test ErrorReport.read()."""
- fp = StringIO.StringIO(dedent("""\
+ fp = BytesIO(dedent("""\
Oops-Id: OOPS-A0001
Oops-Reporter: foo/bar
- """))
+ """).encode('UTF-8'))
report = read(fp)
self.assertEqual(report['reporter'], 'foo/bar')
def test_read_pageid_to_topic(self):
"""Test ErrorReport.read()."""
- fp = StringIO.StringIO(dedent("""\
+ fp = BytesIO(dedent("""\
Oops-Id: OOPS-A0001
Page-Id: IFoo:+foo-template
- """))
+ """).encode('UTF-8'))
report = read(fp)
self.assertEqual(report['topic'], 'IFoo:+foo-template')
def test_read_informational_read(self):
"""Test ErrorReport.read()."""
- fp = StringIO.StringIO(dedent("""\
+ fp = BytesIO(dedent("""\
Oops-Id: OOPS-A0001
Informational: True
- """))
+ """).encode('UTF-8'))
report = read(fp)
self.assertEqual('True', report['informational'])
def test_read_no_informational_no_key(self):
"""Test ErrorReport.read()."""
- fp = StringIO.StringIO(dedent("""\
+ fp = BytesIO(dedent("""\
Oops-Id: OOPS-A0001
- """))
+ """).encode('UTF-8'))
report = read(fp)
self.assertFalse('informational' in report)
@@ -228,9 +230,9 @@
# If we get a crazy-small oops, we can read it sensibly. Because there
# is existing legacy code, all keys are filled in with None, [] or {}
# rather than being empty.
- fp = StringIO.StringIO(dedent("""\
+ fp = BytesIO(dedent("""\
Oops-Id: OOPS-A0001
- """))
+ """).encode('UTF-8'))
report = read(fp)
self.assertEqual(report['id'], 'OOPS-A0001')
self.assertEqual(report['type'], None)
@@ -250,12 +252,12 @@
class TestSerializing(testtools.TestCase):
def test_write_file(self):
- output = StringIO.StringIO()
+ output = BytesIO()
report = {
'id': 'OOPS-A0001',
'type': 'NotFound',
'value': 'error message',
- 'time': datetime.datetime(2005, 04, 01, 00, 00, 00, tzinfo=utc),
+ 'time': datetime.datetime(2005, 4, 1, 0, 0, 0, tzinfo=utc),
'topic': 'IFoo:+foo-template',
'tb_text': 'traceback-text',
'username': 'Sample User',
@@ -271,7 +273,7 @@
'revno': '45',
}
write(report, output)
- self.assertEqual(output.getvalue(), dedent("""\
+ self.assertEqual(output.getvalue().decode('UTF-8'), dedent("""\
Oops-Id: OOPS-A0001
Exception-Type: NotFound
Exception-Value: error message
@@ -298,7 +300,7 @@
'id': 'OOPS-A0001',
'type': 'NotFound',
'value': 'error message',
- 'time': datetime.datetime(2005, 04, 01, 00, 00, 00, tzinfo=utc),
+ 'time': datetime.datetime(2005, 4, 1, 0, 0, 0, tzinfo=utc),
'topic': 'IFoo:+foo-template',
'tb_text': 'traceback-text',
'username': 'Sample User',
@@ -314,26 +316,26 @@
'revno': '45',
}
self.assertEqual([
- "Oops-Id: OOPS-A0001\n",
- "Exception-Type: NotFound\n",
- "Exception-Value: error message\n",
- "Date: 2005-04-01T00:00:00+00:00\n",
- "Page-Id: IFoo:+foo-template\n",
- "Branch: mybranch\n",
- "Revision: 45\n",
- "User: Sample User\n",
- "URL: http://localhost:9000/foo\n",
- "Duration: 42\n",
- "Informational: False\n",
- "\n",
- "HTTP_USER_AGENT=Mozilla/5.0\n",
- "HTTP_REFERER=http://localhost:9000/\n",
- "name%3Dfoo=hello%0Aworld\n",
- "\n",
- "00001-00005@store_a SELECT 1\n",
- "00005-00010@store_b SELECT 2\n",
- "\n",
- "traceback-text",
+ b"Oops-Id: OOPS-A0001\n",
+ b"Exception-Type: NotFound\n",
+ b"Exception-Value: error message\n",
+ b"Date: 2005-04-01T00:00:00+00:00\n",
+ b"Page-Id: IFoo:+foo-template\n",
+ b"Branch: mybranch\n",
+ b"Revision: 45\n",
+ b"User: Sample User\n",
+ b"URL: http://localhost:9000/foo\n",
+ b"Duration: 42\n",
+ b"Informational: False\n",
+ b"\n",
+ b"HTTP_USER_AGENT=Mozilla/5.0\n",
+ b"HTTP_REFERER=http://localhost:9000/\n",
+ b"name%3Dfoo=hello%0Aworld\n",
+ b"\n",
+ b"00001-00005@store_a SELECT 1\n",
+ b"00005-00010@store_b SELECT 2\n",
+ b"\n",
+ b"traceback-text",
],
to_chunks(report))
@@ -342,16 +344,16 @@
# sensibly.
report = {'id': 'OOPS-1234'}
self.assertEqual([
- "Oops-Id: OOPS-1234\n",
- "\n"
+ b"Oops-Id: OOPS-1234\n",
+ b"\n"
], to_chunks(report))
def test_reporter(self):
report = {'reporter': 'foo', 'id': 'bar'}
self.assertEqual([
- "Oops-Id: bar\n",
- "Oops-Reporter: foo\n",
- "\n",
+ b"Oops-Id: bar\n",
+ b"Oops-Reporter: foo\n",
+ b"\n",
], to_chunks(report))
def test_bad_strings(self):
@@ -360,13 +362,13 @@
# passed through an escape process.
report = {'id': u'\xeafoo'}
self.assertEqual([
- "Oops-Id: \\xeafoo\n",
- "\n",
+ b"Oops-Id: \\xeafoo\n",
+ b"\n",
], to_chunks(report))
report = {'id': '\xeafoo'}
self.assertEqual([
- "Oops-Id: \\xeafoo\n",
- "\n",
+ b"Oops-Id: \\xeafoo\n",
+ b"\n",
], to_chunks(report))
def test_write_reqvars_dict(self):
@@ -377,12 +379,12 @@
'id': 'OOPS-1234',
}
self.assertEqual([
- "Oops-Id: OOPS-1234\n",
- "\n",
- "HTTP_REFERER=http://localhost:9000/\n",
- "HTTP_USER_AGENT=Mozilla/5.0\n",
- "name%3Dfoo=hello%0Aworld\n",
- "\n",
+ b"Oops-Id: OOPS-1234\n",
+ b"\n",
+ b"HTTP_REFERER=http://localhost:9000/\n",
+ b"HTTP_USER_AGENT=Mozilla/5.0\n",
+ b"name%3Dfoo=hello%0Aworld\n",
+ b"\n",
], to_chunks(report))
def test_to_chunks_enhanced_timeline(self):
@@ -396,9 +398,9 @@
]
}
self.assertEqual([
- "Oops-Id: OOPS-1234\n",
- "\n",
- "00000-00001@foo bar\n",
- "\n",
+ b"Oops-Id: OOPS-1234\n",
+ b"\n",
+ b"00000-00001@foo bar\n",
+ b"\n",
], to_chunks(report))
=== modified file 'setup.py'
--- setup.py 2015-12-01 15:20:19 +0000
+++ setup.py 2018-03-12 12:11:36 +0000
@@ -19,7 +19,8 @@
from distutils.core import setup
import os.path
-description = file(os.path.join(os.path.dirname(__file__), 'README'), 'rb').read()
+with open(os.path.join(os.path.dirname(__file__), 'README')) as f:
+ description = f.read()
setup(name="oops_datedir_repo",
version="0.0.24",
@@ -36,6 +37,8 @@
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 3',
],
install_requires = [
'bson',
@@ -43,6 +46,7 @@
'launchpadlib', # Needed for pruning - perhaps should be optional.
'oops>=0.0.11',
'pytz',
+ 'six',
],
extras_require = dict(
test=[
=== modified file 'versions.cfg'
--- versions.cfg 2013-03-12 14:37:56 +0000
+++ versions.cfg 2018-03-12 12:11:36 +0000
@@ -18,6 +18,7 @@
pytz = 2011n
setuptools = 0.6c11
simplejson = 2.1.3
+six = 1.11.0
testtools = 0.9.14
wadllib = 1.2.0
wsgi-intercept = 0.4