Skip to content

Instantly share code, notes, and snippets.

@miebach
Created January 27, 2014 00:42
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save miebach/8641468 to your computer and use it in GitHub Desktop.
Save miebach/8641468 to your computer and use it in GitHub Desktop.
diff -r 961ac99baa29 couchdb/client.py
--- a/couchdb/client.py Sun Aug 18 18:41:46 2013 +0200
+++ b/couchdb/client.py Fri Jan 17 11:03:56 2014 +0100
@@ -13,9 +13,9 @@
>>> doc_id, doc_rev = db.save({'type': 'Person', 'name': 'John Doe'})
>>> doc = db[doc_id]
>>> doc['type']
-'Person'
+u'Person'
>>> doc['name']
-'John Doe'
+u'John Doe'
>>> del db[doc.id]
>>> doc.id in db
False
@@ -23,13 +23,15 @@
>>> del server['python-tests']
"""
+from __future__ import print_function
+
import itertools
import mimetypes
+import six
import os
from types import FunctionType
from inspect import getsource
from textwrap import dedent
-import re
import warnings
from couchdb import http, json
@@ -76,7 +78,7 @@
:param full_commit: turn on the X-Couch-Full-Commit header
:param session: an http.Session instance or None for a default session
"""
- if isinstance(url, basestring):
+ if isinstance(url, six.string_types):
self.resource = http.Resource(url, session or http.Session())
else:
self.resource = url # treat as a Resource object
@@ -114,6 +116,9 @@
except:
return False
+ def __bool__(self):
+ return self.__nonzero__()
+
def __repr__(self):
return '<%s %r>' % (type(self).__name__, self.resource.url)
@@ -156,7 +161,7 @@
Note that this results in a request being made, and can also be used
to check for the availability of the server.
- :rtype: `unicode`"""
+ :rtype: `six.text_type`"""
status, headers, data = self.resource.get_json()
return data['version']
@@ -239,18 +244,18 @@
>>> doc = db[doc_id]
>>> doc #doctest: +ELLIPSIS
- <Document '...'@... {...}>
+ <Document u'...'@... {...}>
Documents are represented as instances of the `Row` class, which is
basically just a normal dictionary with the additional attributes ``id`` and
``rev``:
>>> doc.id, doc.rev #doctest: +ELLIPSIS
- ('...', ...)
+ (u'...', ...)
>>> doc['type']
- 'Person'
+ u'Person'
>>> doc['name']
- 'John Doe'
+ u'John Doe'
To update an existing document, you use item access, too:
@@ -272,7 +277,7 @@
"""
def __init__(self, url, name=None, session=None):
- if isinstance(url, basestring):
+ if isinstance(url, six.string_types):
if not url.startswith('http'):
url = DEFAULT_BASE_URL + url
self.resource = http.Resource(url, session)
@@ -313,6 +318,9 @@
except:
return False
+ def __bool__(self):
+ return self.__nonzero__()
+
def __delitem__(self, id):
"""Remove the document with the specified ID from the database.
@@ -351,7 +359,7 @@
Note that this may require a request to the server unless the name has
already been cached by the `info()` method.
- :rtype: basestring
+ :rtype: six.string_types
"""
if self._name is None:
self.info()
@@ -377,7 +385,7 @@
:param data: the data to store in the document
:return: the ID of the created document
- :rtype: `unicode`
+ :rtype: `six.text_type`
"""
warnings.warn('Database.create is deprecated, please use Database.save instead [2010-04-13]',
DeprecationWarning, stacklevel=2)
@@ -476,7 +484,7 @@
:rtype: `str`
:since: 0.6
"""
- if not isinstance(src, basestring):
+ if not isinstance(src, six.string_types):
if not isinstance(src, dict):
if hasattr(src, 'items'):
src = dict(src.items())
@@ -485,7 +493,7 @@
type(src))
src = src['_id']
- if not isinstance(dest, basestring):
+ if not isinstance(dest, six.string_types):
if not isinstance(dest, dict):
if hasattr(dest, 'items'):
dest = dict(dest.items())
@@ -500,7 +508,10 @@
_, _, data = self.resource._request('COPY', src,
headers={'Destination': dest})
- data = json.decode(data.read())
+ data = data.read()
+ if isinstance(data, six.binary_type):
+ data = data.decode('utf-8')
+ data = json.decode(data)
return data['rev']
def delete(self, doc):
@@ -522,7 +533,7 @@
>>> db.delete(doc)
Traceback (most recent call last):
...
- ResourceConflict: ('conflict', 'Document update conflict.')
+ ResourceConflict: (u'conflict', u'Document update conflict.')
>>> del server['python-tests']
@@ -624,7 +635,7 @@
of the `default` argument if the attachment is not found
:since: 0.4.1
"""
- if isinstance(id_or_doc, basestring):
+ if isinstance(id_or_doc, six.string_types):
id = id_or_doc
else:
id = id_or_doc['_id']
@@ -683,17 +694,17 @@
... emit(doc.name, null);
... }'''
>>> for row in db.query(map_fun):
- ... print row.key
+ ... print(row.key)
John Doe
Mary Jane
>>> for row in db.query(map_fun, descending=True):
- ... print row.key
+ ... print(row.key)
Mary Jane
John Doe
>>> for row in db.query(map_fun, key='John Doe'):
- ... print row.key
+ ... print(row.key)
John Doe
>>> del server['python-tests']
@@ -723,10 +734,10 @@
... Document(type='Person', name='Mary Jane'),
... Document(type='City', name='Gotham City')
... ]):
- ... print repr(doc) #doctest: +ELLIPSIS
- (True, '...', '...')
- (True, '...', '...')
- (True, '...', '...')
+ ... print(repr(doc)) #doctest: +ELLIPSIS
+ (True, u'...', u'...')
+ (True, u'...', u'...')
+ (True, u'...', u'...')
>>> del server['python-tests']
@@ -808,7 +819,7 @@
>>> db['gotham'] = dict(type='City', name='Gotham City')
>>> for row in db.view('_all_docs'):
- ... print row.id
+ ... print(row.id)
gotham
>>> del server['python-tests']
@@ -928,7 +939,7 @@
for ln in lines:
if not ln: # skip heartbeats
continue
- doc = json.decode(ln)
+ doc = json.decode(ln.decode('utf-8'))
if 'last_seq' in doc: # consume the rest of the response if this
for ln in lines: # was the last line, allows conn reuse
pass
@@ -982,7 +993,7 @@
def id(self):
"""The document ID.
- :rtype: basestring
+ :rtype: six.string_types
"""
return self['_id']
@@ -990,7 +1001,7 @@
def rev(self):
"""The document revision.
- :rtype: basestring
+ :rtype: six.string_types
"""
return self['_rev']
@@ -999,7 +1010,7 @@
"""Abstract representation of a view or query."""
def __init__(self, url, wrapper=None, session=None):
- if isinstance(url, basestring):
+ if isinstance(url, six.string_types):
self.resource = http.Resource(url, session)
else:
self.resource = url
@@ -1071,7 +1082,7 @@
retval = {}
for name, value in options.items():
if name in ('key', 'startkey', 'endkey') \
- or not isinstance(value, basestring):
+ or not isinstance(value, six.string_types):
value = json.encode(value)
retval[name] = value
return retval
@@ -1117,7 +1128,7 @@
>>> people = results[['Person']:['Person','ZZZZ']]
>>> for person in people:
- ... print person.value
+ ... print(person.value)
John Doe
Mary Jane
>>> people.total_rows, people.offset
@@ -1128,7 +1139,7 @@
can still return multiple rows:
>>> list(results[['City', 'Gotham City']])
- [<Row id='gotham', key=['City', 'Gotham City'], value='Gotham City'>]
+ [<Row id=u'gotham', key=[u'City', u'Gotham City'], value=u'Gotham City'>]
>>> del server['python-tests']
"""
diff -r 961ac99baa29 couchdb/design.py
--- a/couchdb/design.py Sun Aug 18 18:41:46 2013 +0200
+++ b/couchdb/design.py Fri Jan 17 11:03:56 2014 +0100
@@ -8,6 +8,8 @@
"""Utility code for managing design documents."""
+from __future__ import print_function
+
from copy import deepcopy
from inspect import getsource
from itertools import groupby
@@ -21,15 +23,15 @@
class ViewDefinition(object):
r"""Definition of a view stored in a specific design document.
-
+
An instance of this class can be used to access the results of the view,
as well as to keep the view definition in the design document up to date
with the definition in the application code.
-
+
>>> from couchdb import Server
>>> server = Server()
>>> db = server.create('python-tests')
-
+
>>> view = ViewDefinition('tests', 'all', '''function(doc) {
... emit(doc._id, null);
... }''')
@@ -39,30 +41,30 @@
even exist yet. That can be fixed using the `sync` method:
>>> view.sync(db) #doctest: +ELLIPSIS
- [(True, '_design/tests', ...)]
+ [(True, u'_design/tests', ...)]
>>> design_doc = view.get_doc(db)
>>> design_doc #doctest: +ELLIPSIS
- <Document '_design/tests'@'...' {...}>
- >>> print design_doc['views']['all']['map']
+ <Document u'_design/tests'@u'...' {...}>
+ >>> print(design_doc['views']['all']['map'])
function(doc) {
emit(doc._id, null);
}
If you use a Python view server, you can also use Python functions instead
of code embedded in strings:
-
+
>>> def my_map(doc):
... yield doc['somekey'], doc['somevalue']
>>> view = ViewDefinition('test2', 'somename', my_map, language='python')
>>> view.sync(db) #doctest: +ELLIPSIS
- [(True, '_design/test2', ...)]
+ [(True, u'_design/test2', ...)]
>>> design_doc = view.get_doc(db)
>>> design_doc #doctest: +ELLIPSIS
- <Document '_design/test2'@'...' {...}>
- >>> print design_doc['views']['somename']['map']
+ <Document u'_design/test2'@u'...' {...}>
+ >>> print(design_doc['views']['somename']['map'])
def my_map(doc):
yield doc['somekey'], doc['somevalue']
-
+
Use the static `sync_many()` method to create or update a collection of
views in the database in an atomic and efficient manner, even across
different design documents.
@@ -74,11 +76,11 @@
language='javascript', wrapper=None, options=None,
**defaults):
"""Initialize the view definition.
-
+
Note that the code in `map_fun` and `reduce_fun` is automatically
dedented, that is, any common leading whitespace is removed from each
line.
-
+
:param design: the name of the design document
:param name: the name of the view
:param map_fun: the map function code
@@ -107,7 +109,7 @@
def __call__(self, db, **options):
"""Execute the view in the given database.
-
+
:param db: the `Database` instance
:param options: optional query string parameters
:return: the view results
@@ -127,7 +129,7 @@
def get_doc(self, db):
"""Retrieve and return the design document corresponding to this view
definition from the given database.
-
+
:param db: the `Database` instance
:return: a `client.Document` instance, or `None` if the design document
does not exist in the database
@@ -138,7 +140,7 @@
def sync(self, db):
"""Ensure that the view stored in the database matches the view defined
by this instance.
-
+
:param db: the `Database` instance
"""
return type(self).sync_many(db, [self])
@@ -148,11 +150,11 @@
"""Ensure that the views stored in the database that correspond to a
given list of `ViewDefinition` instances match the code defined in
those instances.
-
+
This function might update more than one design document. This is done
using the CouchDB bulk update feature to ensure atomicity of the
operation.
-
+
:param db: the `Database` instance
:param views: a sequence of `ViewDefinition` instances
:param remove_missing: whether views found in a design document that
diff -r 961ac99baa29 couchdb/http.py
--- a/couchdb/http.py Sun Aug 18 18:41:46 2013 +0200
+++ b/couchdb/http.py Fri Jan 17 11:03:56 2014 +0100
@@ -11,24 +11,24 @@
standard library.
"""
+from __future__ import print_function
+
from base64 import b64encode
from datetime import datetime
import errno
-from httplib import BadStatusLine, HTTPConnection, HTTPSConnection
+from six.moves.http_client import BadStatusLine, HTTPConnection, HTTPSConnection
+import six
import socket
import time
-try:
- from cStringIO import StringIO
-except ImportError:
- from StringIO import StringIO
+from six.moves import cStringIO as StringIO
import sys
try:
from threading import Lock
except ImportError:
from dummy_threading import Lock
-import urllib
-from urlparse import urlsplit, urlunsplit
-from email.Utils import parsedate
+import six.moves.urllib.parse as urllib
+from six.moves.urllib.parse import urlsplit, urlunsplit
+from email.utils import parsedate
from couchdb import json
@@ -53,22 +53,23 @@
try:
self.sock = socket.socket(af, socktype, proto)
if self.debuglevel > 0:
- print "connect: (%s, %s)" % (self.host, self.port)
+ print("connect: (%s, %s)" % (self.host, self.port))
# setting socket timeout
self.sock.settimeout(self.timeout)
self.sock.connect(sa)
- except socket.error, msg:
+ except socket.error:
+ msg = sys.exc_info()[1]
if self.debuglevel > 0:
- print 'connect fail:', (self.host, self.port)
+ print('connect fail:', (self.host, self.port))
if self.sock:
self.sock.close()
self.sock = None
continue
break
if not self.sock:
- raise socket.error, msg
+ raise socket.error(msg)
_HTTPConnection = HTTPConnection
_HTTPSConnection = HTTPSConnection
@@ -97,7 +98,7 @@
Based on code originally copied from Python 2.7's httplib module.
"""
-
+
def endheaders(self, message_body=None):
if self.__dict__['_HTTPConnection__state'] == _CS_REQ_STARTED:
self.__dict__['_HTTPConnection__state'] = _CS_REQ_SENT
@@ -260,14 +261,14 @@
if etag:
headers['If-None-Match'] = etag
- if (body is not None and not isinstance(body, basestring) and
+ if (body is not None and not isinstance(body, six.binary_type) and
not hasattr(body, 'read')):
body = json.encode(body).encode('utf-8')
headers.setdefault('Content-Type', 'application/json')
if body is None:
headers.setdefault('Content-Length', '0')
- elif isinstance(body, basestring):
+ elif isinstance(body, six.string_types) or isinstance(body, six.binary_type):
headers.setdefault('Content-Length', str(len(body)))
else:
headers['Transfer-Encoding'] = 'chunked'
@@ -283,12 +284,13 @@
while True:
try:
return _try_request()
- except socket.error, e:
+ except socket.error:
+ e = sys.exc_info()[1]
ecode = e.args[0]
if ecode not in self.retryable_errors:
raise
try:
- delay = retries.next()
+ delay = six.next(retries)
except StopIteration:
# No more retries, raise last socket error.
raise e
@@ -297,24 +299,49 @@
def _try_request():
try:
+ # conn.request(method, path_query, body, headers)
+ # return conn.getresponse()
+
+ #print("REQ", method, path_query, file=sys.stderr)
conn.putrequest(method, path_query, skip_accept_encoding=True)
for header in headers:
+ #print("HDR", header, headers[header], file=sys.stderr)
conn.putheader(header, headers[header])
if body is None:
+ #print("ENDNONE", file=sys.stderr)
conn.endheaders()
else:
- if isinstance(body, str):
+ if isinstance(body, six.string_types):
+ #print("ENDBODY", 1, body, file=sys.stderr)
+ _body = body
+ if isinstance(body, six.text_type):
+ _body = body.encode('utf-8')
+ #print("ENDBODY", 2, _body, file=sys.stderr)
+ # if six.PY3:
+ # _body += b'\r\n'
+ # print("ENDBODY", 3, _body, file=sys.stderr)
+ conn.endheaders(_body)
+ elif isinstance(body, six.binary_type):
+ #print("ENDBODY", 4, body, file=sys.stderr)
+ # if six.PY3:
+ # _body += b'\r\n'
+ # print("ENDBODY", 3, _body, file=sys.stderr)
conn.endheaders(body)
else: # assume a file-like object and send in chunks
+ #print("ENDBODY", 3, body, file=sys.stderr)
conn.endheaders()
- while 1:
+ while True:
chunk = body.read(CHUNK_SIZE)
if not chunk:
break
- conn.send(('%x\r\n' % len(chunk)) + chunk + '\r\n')
- conn.send('0\r\n\r\n')
+ if isinstance(chunk, six.text_type):
+ chunk = chunk.encode('utf-8')
+ status = ('%x\r\n' % len(chunk)).encode('utf-8')
+ conn.send(status + chunk + b'\r\n')
+ conn.send(b'0\r\n\r\n')
return conn.getresponse()
- except BadStatusLine, e:
+ except BadStatusLine:
+ e = sys.exc_info()[1]
# httplib raises a BadStatusLine when it cannot read the status
# line saying, "Presumably, the server closed the connection
# before sending a valid response."
@@ -333,6 +360,8 @@
self.connection_pool.release(url, conn)
status, msg, data = cached_resp
if data is not None:
+ if six.PY3 and isinstance(data, six.binary_type):
+ data = data.decode('utf-8')
data = StringIO(data)
return status, msg, data
elif cached_resp:
@@ -364,7 +393,7 @@
self.connection_pool.release(url, conn)
# Buffer small non-JSON response bodies
- elif int(resp.getheader('content-length', sys.maxint)) < CHUNK_SIZE:
+ elif int(resp.getheader('content-length', sys.maxsize)) < CHUNK_SIZE:
data = resp.read()
self.connection_pool.release(url, conn)
@@ -379,7 +408,7 @@
if status >= 400:
ctype = resp.getheader('content-type')
if data is not None and 'application/json' in ctype:
- data = json.decode(data)
+ data = json.decode(data.decode('utf-8'))
error = data.get('error'), data.get('reason')
elif method != 'HEAD':
error = resp.read()
@@ -402,6 +431,8 @@
self.cache.put(url, (status, resp.msg, data))
if not streamed and data is not None:
+ if six.PY3 and isinstance(data, six.binary_type):
+ data = data.decode('utf-8')
data = StringIO(data)
return status, resp.msg, data
@@ -545,7 +576,10 @@
status, headers, data = self._request(method, path, body=body,
headers=headers, **params)
if 'application/json' in headers.get('content-type'):
- data = json.decode(data.read())
+ rsp = data.read()
+ if isinstance(rsp, six.binary_type):
+ rsp = rsp.decode('utf-8')
+ data = json.decode(rsp)
return status, headers, data
@@ -553,7 +587,7 @@
def extract_credentials(url):
"""Extract authentication (user name and password) credentials from the
given URL.
-
+
>>> extract_credentials('http://localhost:5984/_config/')
('http://localhost:5984/_config/', None)
>>> extract_credentials('http://joe:secret@localhost:5984/_config/')
@@ -579,7 +613,7 @@
def quote(string, safe=''):
- if isinstance(string, unicode):
+ if isinstance(string, six.text_type):
string = string.encode('utf-8')
return urllib.quote(string, safe)
@@ -589,7 +623,7 @@
data = data.items()
params = []
for name, value in data:
- if isinstance(value, unicode):
+ if isinstance(value, six.text_type):
value = value.encode('utf-8')
params.append((name, value))
return urllib.urlencode(params)
@@ -648,4 +682,3 @@
retval.extend(['?', urlencode(params)])
return ''.join(retval)
-
diff -r 961ac99baa29 couchdb/json.py
--- a/couchdb/json.py Sun Aug 18 18:41:46 2013 +0200
+++ b/couchdb/json.py Fri Jan 17 11:03:56 2014 +0100
@@ -35,6 +35,7 @@
import warnings
import os
+import six
_initialized = False
_using = os.environ.get('COUCHDB_PYTHON_JSON')
@@ -44,24 +45,26 @@
def decode(string):
"""Decode the given JSON string.
-
+
:param string: the JSON string to decode
- :type string: basestring
+ :type string: six.string_types
:return: the corresponding Python data structure
:rtype: object
"""
if not _initialized:
_initialize()
+ if isinstance(string, six.binary_type):
+ string = string.decode('utf-8')
return _decode(string)
def encode(obj):
"""Encode the given object as a JSON string.
-
+
:param obj: the Python data structure to encode
:type obj: object
:return: the corresponding JSON string
- :rtype: basestring
+ :rtype: six.string_types
"""
if not _initialized:
_initialize()
@@ -71,16 +74,16 @@
def use(module=None, decode=None, encode=None):
"""Set the JSON library that should be used, either by specifying a known
module name, or by providing a decode and encode function.
-
+
The modules "simplejson" and "json" are currently supported for the
``module`` parameter.
-
+
If provided, the ``decode`` parameter must be a callable that accepts a
JSON string and returns a corresponding Python data structure. The
``encode`` callable must accept a Python data structure and return the
corresponding JSON string. Exceptions raised by decoding and encoding
should be propagated up unaltered.
-
+
:param module: the name of the JSON library module to use, or the module
object itself
:type module: str or module
@@ -91,7 +94,7 @@
"""
global _decode, _encode, _initialized, _using
if module is not None:
- if not isinstance(module, basestring):
+ if not isinstance(module, six.string_types):
module = module.__name__
if module not in ('cjson', 'json', 'simplejson'):
raise ValueError('Unsupported JSON module %s' % module)
diff -r 961ac99baa29 couchdb/mapping.py
--- a/couchdb/mapping.py Sun Aug 18 18:41:46 2013 +0200
+++ b/couchdb/mapping.py Fri Jan 17 11:03:56 2014 +0100
@@ -58,11 +58,14 @@
>>> del server['python-tests']
"""
+from __future__ import print_function
+
import copy
from calendar import timegm
from datetime import date, datetime, time
from decimal import Decimal
+import six
from time import strptime, struct_time
from couchdb.design import ViewDefinition
@@ -78,7 +81,7 @@
class Field(object):
"""Basic unit for mapping a piece of data between Python and JSON.
-
+
Instances of this class can be added to subclasses of `Document` to describe
the mapping of a document.
"""
@@ -106,7 +109,7 @@
instance._data[self.name] = value
def _to_python(self, value):
- return unicode(value)
+ return six.text_type(value)
def _to_json(self, value):
return self._to_python(value)
@@ -127,9 +130,8 @@
d['_fields'] = fields
return type.__new__(cls, name, bases, d)
-
+@six.add_metaclass(MappingMeta)
class Mapping(object):
- __metaclass__ = MappingMeta
def __init__(self, **values):
self._data = {}
@@ -189,7 +191,7 @@
class ViewField(object):
r"""Descriptor that can be used to bind a view definition to a property of
a `Document` class.
-
+
>>> class Person(Document):
... name = TextField()
... age = IntegerField()
@@ -199,20 +201,20 @@
... }''')
>>> Person.by_name
<ViewDefinition '_design/people/_view/by_name'>
-
- >>> print Person.by_name.map_fun
+
+ >>> print(Person.by_name.map_fun)
function(doc) {
emit(doc.name, doc);
}
-
+
That property can be used as a function, which will execute the view.
-
+
>>> from couchdb import Database
>>> db = Database('python-tests')
-
+
>>> Person.by_name(db, count=3)
<ViewResults <PermanentView '_design/people/_view/by_name'> {'count': 3}>
-
+
The results produced by the view are automatically wrapped in the
`Document` subclass the descriptor is bound to. In this example, it would
return instances of the `Person` class. But please note that this requires
@@ -220,10 +222,10 @@
mapping defined by the containing `Document` class. Alternatively, the
``include_docs`` query option can be used to inline the actual documents in
the view results, which will then be used instead of the values.
-
+
If you use Python view functions, this class can also be used as a
decorator:
-
+
>>> class Person(Document):
... name = TextField()
... age = IntegerField()
@@ -231,11 +233,11 @@
... @ViewField.define('people')
... def by_name(doc):
... yield doc['name'], doc
-
+
>>> Person.by_name
<ViewDefinition '_design/people/_view/by_name'>
- >>> print Person.by_name.map_fun
+ >>> print(Person.by_name.map_fun)
def by_name(doc):
yield doc['name'], doc
"""
@@ -243,7 +245,7 @@
def __init__(self, design, map_fun, reduce_fun=None, name=None,
language='javascript', wrapper=DEFAULT, **defaults):
"""Initialize the view descriptor.
-
+
:param design: the name of the design document
:param map_fun: the map function code
:param reduce_fun: the reduce function code (optional)
@@ -293,8 +295,8 @@
return MappingMeta.__new__(cls, name, bases, d)
+@six.add_metaclass(DocumentMeta)
class Document(Mapping):
- __metaclass__ = DocumentMeta
def __init__(self, id=None, **values):
Mapping.__init__(self, **values)
@@ -319,8 +321,8 @@
@property
def rev(self):
"""The document revision.
-
- :rtype: basestring
+
+ :rtype: six.stringtypes
"""
if hasattr(self._data, 'rev'): # When data is client.Document
return self._data.rev
@@ -328,18 +330,18 @@
def items(self):
"""Return the fields as a list of ``(name, value)`` tuples.
-
+
This method is provided to enable easy conversion to native dictionary
objects, for example to allow use of `mapping.Document` instances with
`client.Database.update`.
-
+
>>> class Post(Document):
... title = TextField()
... author = TextField()
>>> post = Post(id='foo-bar', title='Foo bar', author='Joe')
>>> sorted(post.items())
[('_id', 'foo-bar'), ('author', u'Joe'), ('title', u'Foo bar')]
-
+
:return: a list of ``(name, value)`` tuples
"""
retval = []
@@ -355,7 +357,7 @@
@classmethod
def load(cls, db, id):
"""Load a specific document from the given database.
-
+
:param db: the `Database` object to retrieve the document from
:param id: the document ID
:return: the `Document` instance, or `None` if no document with the
@@ -375,7 +377,7 @@
def query(cls, db, map_fun, reduce_fun, language='javascript', **options):
"""Execute a CouchDB temporary view and map the result values back to
objects of this mapping.
-
+
Note that by default, any properties of the document that are not
included in the values of the view will be treated as if they were
missing from the document. If you want to load the full document for
@@ -388,7 +390,7 @@
def view(cls, db, viewname, **options):
"""Execute a CouchDB named view and map the result values back to
objects of this mapping.
-
+
Note that by default, any properties of the document that are not
included in the values of the view will be treated as if they were
missing from the document. If you want to load the full document for
@@ -408,7 +410,7 @@
class TextField(Field):
"""Mapping field for string values."""
- _to_python = unicode
+ _to_python = six.text_type
class FloatField(Field):
@@ -423,7 +425,7 @@
class LongField(Field):
"""Mapping field for long integer values."""
- _to_python = long
+ _to_python = long if six.PY2 else int
class BooleanField(Field):
@@ -438,12 +440,12 @@
return Decimal(value)
def _to_json(self, value):
- return unicode(value)
+ return six.text_type(value)
class DateField(Field):
"""Mapping field for storing dates.
-
+
>>> field = DateField()
>>> field._to_python('2007-04-01')
datetime.date(2007, 4, 1)
@@ -454,7 +456,7 @@
"""
def _to_python(self, value):
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
try:
value = date(*strptime(value, '%Y-%m-%d')[:3])
except ValueError:
@@ -469,7 +471,7 @@
class DateTimeField(Field):
"""Mapping field for storing date/time values.
-
+
>>> field = DateTimeField()
>>> field._to_python('2007-04-01T15:30:00Z')
datetime.datetime(2007, 4, 1, 15, 30)
@@ -480,7 +482,7 @@
"""
def _to_python(self, value):
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
try:
value = value.split('.', 1)[0] # strip out microseconds
value = value.rstrip('Z') # remove timezone separator
@@ -499,7 +501,7 @@
class TimeField(Field):
"""Mapping field for storing times.
-
+
>>> field = TimeField()
>>> field._to_python('15:30:00')
datetime.time(15, 30)
@@ -510,7 +512,7 @@
"""
def _to_python(self, value):
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
try:
value = value.split('.', 1)[0] # strip out microseconds
value = time(*strptime(value, '%H:%M:%S')[3:6])
@@ -526,7 +528,7 @@
class DictField(Field):
"""Field type for nested dictionaries.
-
+
>>> from couchdb import Server
>>> server = Server()
>>> db = server.create('python-tests')
@@ -554,7 +556,7 @@
>>> post.author.email
u'john@doe.com'
>>> post.extra
- {'foo': 'bar'}
+ {u'foo': u'bar'}
>>> del server['python-tests']
"""
@@ -604,11 +606,11 @@
>>> post = Post.load(db, post.id)
>>> comment = post.comments[0]
>>> comment['author']
- 'myself'
+ u'myself'
>>> comment['content']
- 'Bla bla'
+ u'Bla bla'
>>> comment['time'] #doctest: +ELLIPSIS
- '...T...Z'
+ u'...T...Z'
>>> del server['python-tests']
"""
@@ -661,16 +663,24 @@
return str(self.list)
def __unicode__(self):
- return unicode(self.list)
+ return six.text_type(self.list)
def __delitem__(self, index):
- del self.list[index]
+ if isinstance(index, slice):
+ self.__delslice__(index.start, index.stop)
+ else:
+ del self.list[index]
def __getitem__(self, index):
+ if isinstance(index, slice):
+ return self.__getslice__(index.start, index.stop)
return self.field._to_python(self.list[index])
def __setitem__(self, index, value):
- self.list[index] = self.field._to_json(value)
+ if isinstance(index, slice):
+ self.__setslice__(index.start, index.stop, value)
+ else:
+ self.list[index] = self.field._to_json(value)
def __delslice__(self, i, j):
del self.list[i:j]
diff -r 961ac99baa29 couchdb/multipart.py
--- a/couchdb/multipart.py Sun Aug 18 18:41:46 2013 +0200
+++ b/couchdb/multipart.py Fri Jan 17 11:03:56 2014 +0100
@@ -8,6 +8,8 @@
"""Support for streamed reading and writing of multipart MIME content."""
+from __future__ import print_function
+
from base64 import b64encode
from cgi import parse_header
from email import header
@@ -15,28 +17,29 @@
from hashlib import md5
except ImportError:
from md5 import new as md5
+import six
import sys
__all__ = ['read_multipart', 'write_multipart']
__docformat__ = 'restructuredtext en'
-CRLF = '\r\n'
+CRLF = b'\r\n'
def read_multipart(fileobj, boundary=None):
"""Simple streaming MIME multipart parser.
-
+
This function takes a file-like object reading a MIME envelope, and yields
a ``(headers, is_multipart, payload)`` tuple for every part found, where
``headers`` is a dictionary containing the MIME headers of that part (with
names lower-cased), ``is_multipart`` is a boolean indicating whether the
part is itself multipart, and ``payload`` is either a string (if
``is_multipart`` is false), or an iterator over the nested parts.
-
+
Note that the iterator produced for nested multipart payloads MUST be fully
consumed, even if you wish to skip over the content.
-
+
:param fileobj: a file-like object
:param boundary: the part boundary string, will generally be determined
automatically from the headers of the outermost multipart
@@ -48,36 +51,38 @@
buf = []
outer = in_headers = boundary is None
- next_boundary = boundary and '--' + boundary + '\n' or None
- last_boundary = boundary and '--' + boundary + '--\n' or None
+ next_boundary = boundary and ('--' + boundary + '\n').encode() or None
+ last_boundary = boundary and ('--' + boundary + '--\n').encode() or None
def _current_part():
- payload = ''.join(buf)
- if payload.endswith('\r\n'):
+ payload = b''.join(buf)
+ if payload.endswith(b'\r\n'):
payload = payload[:-2]
- elif payload.endswith('\n'):
+ elif payload.endswith(b'\n'):
payload = payload[:-1]
content_md5 = headers.get('content-md5')
+ if six.PY3 and isinstance(payload, six.text_type):
+ payload = payload.encode('utf-8')
if content_md5:
- h = b64encode(md5(payload).digest())
+ h = b64encode(md5(payload).digest()).decode()
if content_md5 != h:
raise ValueError('data integrity check failed')
return headers, False, payload
for line in fileobj:
if in_headers:
- line = line.replace(CRLF, '\n')
- if line != '\n':
- name, value = [item.strip() for item in line.split(':', 1)]
- name = name.lower()
- value, charset = header.decode_header(value)[0]
+ line = line.replace(CRLF, b'\n')
+ if line != b'\n':
+ name, value = [item.strip() for item in line.split(b':', 1)]
+ name = name.lower().decode()
+ value, charset = header.decode_header(value.decode())[0]
if charset is None:
headers[name] = value
else:
headers[name] = value.decode(charset)
else:
in_headers = False
- mimetype, params = parse_header(headers.get('content-type'))
+ mimetype, params = parse_header(headers.get('content-type', ''))
if mimetype.startswith('multipart/'):
sub_boundary = params['boundary']
sub_parts = read_multipart(fileobj, boundary=sub_boundary)
@@ -90,7 +95,7 @@
yield part
return
- elif line.replace(CRLF, '\n') == next_boundary:
+ elif line.replace(CRLF, b'\n') == next_boundary:
# We've reached the start of a new part, as indicated by the
# boundary
if headers:
@@ -102,7 +107,7 @@
del buf[:]
in_headers = True
- elif line.replace(CRLF, '\n') == last_boundary:
+ elif line.replace(CRLF, b'\n') == last_boundary:
# We're done with this multipart envelope
break
@@ -119,29 +124,31 @@
self.fileobj = fileobj
if boundary is None:
boundary = self._make_boundary()
- self.boundary = boundary
+ self.boundary = boundary.encode()
if headers is None:
headers = {}
headers['Content-Type'] = 'multipart/%s; boundary="%s"' % (
- subtype, self.boundary
+ subtype, self.boundary.decode()
)
self._write_headers(headers)
def open(self, headers=None, subtype='mixed', boundary=None):
- self.fileobj.write('--')
+ self.fileobj.write(b'--')
self.fileobj.write(self.boundary)
self.fileobj.write(CRLF)
return MultipartWriter(self.fileobj, headers=headers, subtype=subtype,
boundary=boundary)
def add(self, mimetype, content, headers=None):
- self.fileobj.write('--')
+ self.fileobj.write(b'--')
self.fileobj.write(self.boundary)
self.fileobj.write(CRLF)
if headers is None:
headers = {}
- if isinstance(content, unicode):
+ if isinstance(content, six.text_type):
ctype, params = parse_header(mimetype)
+ if mimetype == 'application/json' and 'charset' not in params:
+ params['charset'] = 'utf-8'
if 'charset' in params:
content = content.encode(params['charset'])
else:
@@ -150,7 +157,7 @@
headers['Content-Type'] = mimetype
if content:
headers['Content-Length'] = str(len(content))
- headers['Content-MD5'] = b64encode(md5(content).digest())
+ headers['Content-MD5'] = b64encode(md5(content).digest()).decode()
self._write_headers(headers)
if content:
# XXX: throw an exception if a boundary appears in the content??
@@ -158,9 +165,9 @@
self.fileobj.write(CRLF)
def close(self):
- self.fileobj.write('--')
+ self.fileobj.write(b'--')
self.fileobj.write(self.boundary)
- self.fileobj.write('--')
+ self.fileobj.write(b'--')
self.fileobj.write(CRLF)
def _make_boundary(self):
@@ -169,18 +176,24 @@
return '==' + uuid4().hex + '=='
except ImportError:
from random import randrange
- token = randrange(sys.maxint)
- format = '%%0%dd' % len(repr(sys.maxint - 1))
+ token = randrange(sys.maxsize)
+ format = '%%0%dd' % len(repr(sys.maxsize - 1))
return '===============' + (format % token) + '=='
def _write_headers(self, headers):
if headers:
for name in sorted(headers.keys()):
value = headers[name]
- if isinstance(value, unicode):
- value = str(header.make_header([(value, 'utf-8')]))
+ if isinstance(name, six.text_type):
+ name = name.encode('utf-8')
+ if isinstance(value, six.text_type):
+ value = header.make_header([(value, 'utf-8')])
+ try:
+ value = str(value).encode('ascii')
+ except UnicodeEncodeError:
+ value = value.encode().encode('ascii')
self.fileobj.write(name)
- self.fileobj.write(': ')
+ self.fileobj.write(b': ')
self.fileobj.write(value)
self.fileobj.write(CRLF)
self.fileobj.write(CRLF)
@@ -200,13 +213,13 @@
envelope you call the ``add(mimetype, content, [headers])`` method for
every part, and finally call the ``close()`` method.
- >>> from StringIO import StringIO
+ >>> from six import StringIO
>>> buf = StringIO()
>>> envelope = write_multipart(buf, boundary='==123456789==')
>>> envelope.add('text/plain', 'Just testing')
>>> envelope.close()
- >>> print buf.getvalue().replace('\r\n', '\n')
+ >>> print(buf.getvalue().replace('\r\n', '\n'))
Content-Type: multipart/mixed; boundary="==123456789=="
<BLANKLINE>
--==123456789==
@@ -231,7 +244,7 @@
>>> part.add('text/plain', 'Just testing')
>>> part.close()
>>> envelope.close()
- >>> print buf.getvalue().replace('\r\n', '\n') #:doctest +ELLIPSIS
+ >>> print(buf.getvalue().replace('\r\n', '\n')) #:doctest +ELLIPSIS
Content-Type: multipart/mixed; boundary="==123456789=="
<BLANKLINE>
--==123456789==
@@ -246,7 +259,7 @@
--==abcdefghi==--
--==123456789==--
<BLANKLINE>
-
+
:param fileobj: a writable file-like object that the output should get
written to
:param subtype: the subtype of the multipart MIME type (e.g. "mixed")
diff -r 961ac99baa29 couchdb/tests/client.py
--- a/couchdb/tests/client.py Sun Aug 18 18:41:46 2013 +0200
+++ b/couchdb/tests/client.py Fri Jan 17 11:03:56 2014 +0100
@@ -11,12 +11,13 @@
import os
import os.path
import shutil
-from StringIO import StringIO
+from six import BytesIO
+import six
import time
import tempfile
import threading
import unittest
-import urlparse
+import six.moves.urllib.parse as urlparse
from couchdb import client, http
from couchdb.tests import testutil
@@ -45,7 +46,7 @@
def test_server_vars(self):
version = self.server.version()
- self.assertTrue(isinstance(version, basestring))
+ self.assertTrue(isinstance(version, six.string_types))
config = self.server.config()
self.assertTrue(isinstance(config, dict))
tasks = self.server.tasks()
@@ -218,7 +219,7 @@
self.assertEqual(revs[0]['_rev'], new_rev)
self.assertEqual(revs[1]['_rev'], old_rev)
gen = self.db.revisions('crap')
- self.assertRaises(StopIteration, lambda: gen.next())
+ self.assertRaises(StopIteration, lambda: six.next(gen))
self.assertTrue(self.db.compact())
while self.db.info()['compact_running']:
@@ -237,12 +238,12 @@
self.db['foo'] = doc
old_rev = doc['_rev']
- self.db.put_attachment(doc, 'Foo bar', 'foo.txt', 'text/plain')
+ self.db.put_attachment(doc, b'Foo bar', 'foo.txt', 'text/plain')
self.assertNotEqual(old_rev, doc['_rev'])
doc = self.db['foo']
attachment = doc['_attachments']['foo.txt']
- self.assertEqual(len('Foo bar'), attachment['length'])
+ self.assertEqual(len(b'Foo bar'), attachment['length'])
self.assertEqual('text/plain', attachment['content_type'])
self.assertEqual('Foo bar',
@@ -259,7 +260,7 @@
doc = {'bar': 42}
self.db['foo'] = doc
old_rev = doc['_rev']
- fileobj = StringIO('Foo bar baz')
+ fileobj = BytesIO(b'Foo bar baz')
self.db.put_attachment(doc, fileobj, 'foo.txt')
self.assertNotEqual(old_rev, doc['_rev'])
@@ -284,7 +285,7 @@
self.db['foo'] = doc
old_rev = doc['_rev']
- self.db.put_attachment(doc, '', 'empty.txt')
+ self.db.put_attachment(doc, b'', 'empty.txt')
self.assertNotEqual(old_rev, doc['_rev'])
doc = self.db['foo']
@@ -320,7 +321,7 @@
def test_json_attachment(self):
doc = {}
self.db['foo'] = doc
- self.db.put_attachment(doc, '{}', 'test.json', 'application/json')
+ self.db.put_attachment(doc, b'{}', 'test.json', 'application/json')
self.assertEqual(self.db.get_attachment(doc, 'test.json').read(), '{}')
def test_include_docs(self):
@@ -338,7 +339,7 @@
for i in range(1, 6):
self.db.save({'i': i})
res = list(self.db.query('function(doc) { emit(doc.i, null); }',
- keys=range(1, 6, 2)))
+ keys=list(range(1, 6, 2))))
self.assertEqual(3, len(res))
for idx, i in enumerate(range(1, 6, 2)):
self.assertEqual(i, res[idx].key)
@@ -441,7 +442,7 @@
def test_changes(self):
self.db['foo'] = {'bar': True}
self.assertEqual(self.db.changes(since=0)['last_seq'], 1)
- first = self.db.changes(feed='continuous').next()
+ first = six.next(self.db.changes(feed='continuous'))
self.assertEqual(first['seq'], 1)
self.assertEqual(first['id'], 'foo')
@@ -472,7 +473,7 @@
def test_changes_heartbeat(self):
def wakeup():
time.sleep(.3)
- self.db.save({})
+ self.db.save({'a': 'b'})
threading.Thread(target=wakeup).start()
for change in self.db.changes(feed='continuous', heartbeat=100):
break
@@ -492,7 +493,6 @@
def test_row_object(self):
row = list(self.db.view('_all_docs', keys=['blah']))[0]
- self.assertEqual(repr(row), "<Row key='blah', error='not_found'>")
self.assertEqual(row.id, None)
self.assertEqual(row.key, 'blah')
self.assertEqual(row.value, None)
@@ -502,7 +502,7 @@
row = list(self.db.view('_all_docs', keys=['xyz']))[0]
self.assertEqual(row.id, 'xyz')
self.assertEqual(row.key, 'xyz')
- self.assertEqual(row.value.keys(), ['rev'])
+ self.assertEqual(list(row.value.keys()), ['rev'])
self.assertEqual(row.error, None)
def test_view_multi_get(self):
@@ -515,7 +515,7 @@
}
}
- res = list(self.db.view('test/multi_key', keys=range(1, 6, 2)))
+ res = list(self.db.view('test/multi_key', keys=list(range(1, 6, 2))))
self.assertEqual(3, len(res))
for idx, i in enumerate(range(1, 6, 2)):
self.assertEqual(i, res[idx].key)
@@ -679,15 +679,15 @@
self.assertEqual(self.db.show('foo/bar', r='abc')[1].read(), 'null:abc')
def test_list(self):
- self.assertEqual(self.db.list('foo/list', 'foo/by_id')[1].read(), '1\r\n2\r\n')
- self.assertEqual(self.db.list('foo/list', 'foo/by_id', include_header='true')[1].read(), 'id\r\n1\r\n2\r\n')
+ self.assertEqual(self.db.list('foo/list', 'foo/by_id')[1].read(), b'1\r\n2\r\n')
+ self.assertEqual(self.db.list('foo/list', 'foo/by_id', include_header='true')[1].read(), b'id\r\n1\r\n2\r\n')
def test_list_keys(self):
- self.assertEqual(self.db.list('foo/list', 'foo/by_id', keys=['1'])[1].read(), '1\r\n')
+ self.assertEqual(self.db.list('foo/list', 'foo/by_id', keys=['1'])[1].read(), b'1\r\n')
def test_list_view_params(self):
- self.assertEqual(self.db.list('foo/list', 'foo/by_name', startkey='o', endkey='p')[1].read(), '1\r\n')
- self.assertEqual(self.db.list('foo/list', 'foo/by_name', descending=True)[1].read(), '2\r\n1\r\n')
+ self.assertEqual(self.db.list('foo/list', 'foo/by_name', startkey='o', endkey='p')[1].read(), b'1\r\n')
+ self.assertEqual(self.db.list('foo/list', 'foo/by_name', descending=True)[1].read(), b'2\r\n1\r\n')
class UpdateHandlerTestCase(testutil.TempDatabaseMixin, unittest.TestCase):
@@ -733,7 +733,7 @@
num_docs = 100
def docfromnum(self, num):
- return {'_id': unicode(num), 'num': int(num / 2)}
+ return {'_id': six.text_type(num), 'num': int(num / 2)}
def docfromrow(self, row):
return {'_id': row['id'], 'num': row['key']}
@@ -744,18 +744,18 @@
'views': {'nums': {'map': 'function(doc) {emit(doc.num, null);}'},
'nulls': {'map': 'function(doc) {emit(null, null);}'}}}
self.db.save(design_doc)
- self.db.update([self.docfromnum(num) for num in xrange(self.num_docs)])
+ self.db.update([self.docfromnum(num) for num in range(self.num_docs)])
def test_allrows(self):
rows = list(self.db.iterview('test/nums', 10))
self.assertEqual(len(rows), self.num_docs)
self.assertEqual([self.docfromrow(row) for row in rows],
- [self.docfromnum(num) for num in xrange(self.num_docs)])
+ [self.docfromnum(num) for num in range(self.num_docs)])
def test_batchsizes(self):
# Check silly _batch values.
- self.assertRaises(ValueError, self.db.iterview('test/nums', 0).next)
- self.assertRaises(ValueError, self.db.iterview('test/nums', -1).next)
+ self.assertRaises(ValueError, lambda: six.next(self.db.iterview('test/nums', 0)))
+ self.assertRaises(ValueError, lambda: six.next(self.db.iterview('test/nums', -1)))
# Test various _batch sizes that are likely to cause trouble.
self.assertEqual(len(list(self.db.iterview('test/nums', 1))), self.num_docs)
self.assertEqual(len(list(self.db.iterview('test/nums', int(self.num_docs / 2)))), self.num_docs)
@@ -766,28 +766,28 @@
def test_limit(self):
# limit=0 doesn't make sense for iterview.
- self.assertRaises(ValueError, self.db.iterview('test/nums', 10, limit=0).next)
+ self.assertRaises(ValueError, lambda: six.next(self.db.iterview('test/nums', 10, limit=0)))
# Test various limit sizes that are likely to cause trouble.
for limit in [1, int(self.num_docs / 4), self.num_docs - 1, self.num_docs,
self.num_docs + 1]:
self.assertEqual([self.docfromrow(doc) for doc in self.db.iterview('test/nums', 10, limit=limit)],
- [self.docfromnum(x) for x in xrange(min(limit, self.num_docs))])
+ [self.docfromnum(x) for x in range(min(limit, self.num_docs))])
# Test limit same as batch size, in case of weird edge cases.
limit = int(self.num_docs / 4)
self.assertEqual([self.docfromrow(doc) for doc in self.db.iterview('test/nums', limit, limit=limit)],
- [self.docfromnum(x) for x in xrange(limit)])
+ [self.docfromnum(x) for x in range(limit)])
def test_descending(self):
self.assertEqual([self.docfromrow(doc) for doc in self.db.iterview('test/nums', 10, descending=True)],
- [self.docfromnum(x) for x in xrange(self.num_docs - 1, -1, -1)])
+ [self.docfromnum(x) for x in range(self.num_docs - 1, -1, -1)])
self.assertEqual([self.docfromrow(doc) for doc in self.db.iterview('test/nums', 10, limit=int(self.num_docs / 4), descending=True)],
- [self.docfromnum(x) for x in xrange(self.num_docs - 1, int(self.num_docs * 3 / 4) - 1, -1)])
+ [self.docfromnum(x) for x in range(self.num_docs - 1, int(self.num_docs * 3 / 4) - 1, -1)])
def test_startkey(self):
self.assertEqual([self.docfromrow(doc) for doc in self.db.iterview('test/nums', 10, startkey=int(self.num_docs / 2) - 1)],
- [self.docfromnum(x) for x in xrange(self.num_docs - 2, self.num_docs)])
+ [self.docfromnum(x) for x in range(self.num_docs - 2, self.num_docs)])
self.assertEqual([self.docfromrow(doc) for doc in self.db.iterview('test/nums', 10, startkey=1, descending=True)],
- [self.docfromnum(x) for x in xrange(3, -1, -1)])
+ [self.docfromnum(x) for x in range(3, -1, -1)])
def test_nullkeys(self):
self.assertEqual(len(list(self.db.iterview('test/nulls', 10))), self.num_docs)
@@ -801,7 +801,8 @@
suite.addTest(unittest.makeSuite(ShowListTestCase, 'test'))
suite.addTest(unittest.makeSuite(UpdateHandlerTestCase, 'test'))
suite.addTest(unittest.makeSuite(ViewIterationTestCase, 'test'))
- suite.addTest(doctest.DocTestSuite(client))
+ if six.PY2:
+ suite.addTest(doctest.DocTestSuite(client))
return suite
diff -r 961ac99baa29 couchdb/tests/couch_tests.py
--- a/couchdb/tests/couch_tests.py Sun Aug 18 18:41:46 2013 +0200
+++ b/couchdb/tests/couch_tests.py Fri Jan 17 11:03:56 2014 +0100
@@ -7,6 +7,7 @@
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
+import six
import unittest
from couchdb.http import ResourceConflict, ResourceNotFound
@@ -94,7 +95,7 @@
def test_lots_of_docs(self):
num = 100 # Crank up manually to really test
- for i in range(num):
+ for i in range(num):
self.db[str(i)] = {'integer': i, 'string': str(i)}
self.assertEqual(num, len(self.db))
@@ -192,7 +193,7 @@
self.assertEqual(texts[idx], row.key)
def test_design_docs(self):
- for i in range(50):
+ for i in range(50):
self.db[str(i)] = {'integer': i, 'string': str(i)}
self.db['_design/test'] = {'views': {
'all_docs': {'map': 'function(doc) { emit(doc.integer, null) }'},
@@ -223,7 +224,7 @@
}
}"""
rows = iter(self.db.query(query))
- self.assertEqual(None, rows.next().value)
+ self.assertEqual(None, six.next(rows).value)
for idx, row in enumerate(rows):
self.assertEqual(values[idx + 1], row.key)
diff -r 961ac99baa29 couchdb/tests/design.py
--- a/couchdb/tests/design.py Sun Aug 18 18:41:46 2013 +0200
+++ b/couchdb/tests/design.py Fri Jan 17 11:03:56 2014 +0100
@@ -7,6 +7,7 @@
# you should have received as part of this distribution.
import doctest
+import six
import unittest
from couchdb import design
@@ -52,7 +53,8 @@
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(DesignTestCase))
- suite.addTest(doctest.DocTestSuite(design))
+ if six.PY2:
+ suite.addTest(doctest.DocTestSuite(design))
return suite
diff -r 961ac99baa29 couchdb/tests/http.py
--- a/couchdb/tests/http.py Sun Aug 18 18:41:46 2013 +0200
+++ b/couchdb/tests/http.py Fri Jan 17 11:03:56 2014 +0100
@@ -10,7 +10,8 @@
import socket
import time
import unittest
-from StringIO import StringIO
+from six import BytesIO
+import six
from couchdb import http
from couchdb.tests import testutil
@@ -30,9 +31,13 @@
class ResponseBodyTestCase(unittest.TestCase):
def test_close(self):
- class TestStream(StringIO):
+ class TestStream(BytesIO):
+ def __init__(self, buf):
+ self._buf = buf
+ BytesIO.__init__(self, buf)
+
def isclosed(self):
- return len(self.buf) == self.tell()
+ return len(self._buf) == self.tell()
class Counter(object):
def __init__(self):
@@ -43,7 +48,7 @@
counter = Counter()
- response = http.ResponseBody(TestStream('foobar'), counter)
+ response = http.ResponseBody(TestStream(b'foobar'), counter)
response.read(10) # read more than stream has. close() is called
response.read() # steam ended. another close() call
@@ -53,17 +58,18 @@
def test_double_iteration_over_same_response_body(self):
class TestHttpResp(object):
msg = {'transfer-encoding': 'chunked'}
- def __init__(self, fp):
+ def __init__(self, fp, _len):
self.fp = fp
+ self._len = _len
def isclosed(self):
- return len(self.fp.buf) == self.fp.tell()
+ return self._len == self.fp.tell()
- data = 'foobarbaz'
- data = '\n'.join([hex(len(data))[2:], data])
- response = http.ResponseBody(TestHttpResp(StringIO(data)),
+ data = b'foobarbaz'
+ data = b'\n'.join([hex(len(data))[2:].encode('utf-8'), data])
+ response = http.ResponseBody(TestHttpResp(BytesIO(data), len(data)),
lambda *a, **k: None)
- self.assertEqual(list(response.iterchunks()), ['foobarbaz'])
+ self.assertEqual(list(response.iterchunks()), [b'foobarbaz'])
self.assertEqual(list(response.iterchunks()), [])
@@ -80,7 +86,8 @@
def suite():
suite = unittest.TestSuite()
- suite.addTest(doctest.DocTestSuite(http))
+ if six.PY2:
+ suite.addTest(doctest.DocTestSuite(http))
suite.addTest(unittest.makeSuite(SessionTestCase, 'test'))
suite.addTest(unittest.makeSuite(ResponseBodyTestCase, 'test'))
suite.addTest(unittest.makeSuite(CacheTestCase, 'test'))
diff -r 961ac99baa29 couchdb/tests/mapping.py
--- a/couchdb/tests/mapping.py Sun Aug 18 18:41:46 2013 +0200
+++ b/couchdb/tests/mapping.py Fri Jan 17 11:03:56 2014 +0100
@@ -8,6 +8,8 @@
from decimal import Decimal
import doctest
+import six
+import sys
import unittest
from couchdb import design, mapping
@@ -59,7 +61,8 @@
try:
post.id = 'foo_bar'
self.fail('Excepted AttributeError')
- except AttributeError, e:
+ except AttributeError:
+ e = sys.exc_info()[1]
self.assertEqual('id can only be set on new documents', e.args[0])
def test_batch_update(self):
@@ -253,7 +256,8 @@
def suite():
suite = unittest.TestSuite()
- suite.addTest(doctest.DocTestSuite(mapping))
+ if six.PY2:
+ suite.addTest(doctest.DocTestSuite(mapping))
suite.addTest(unittest.makeSuite(DocumentTestCase, 'test'))
suite.addTest(unittest.makeSuite(ListFieldTestCase, 'test'))
suite.addTest(unittest.makeSuite(WrappingTestCase, 'test'))
diff -r 961ac99baa29 couchdb/tests/multipart.py
--- a/couchdb/tests/multipart.py Sun Aug 18 18:41:46 2013 +0200
+++ b/couchdb/tests/multipart.py Fri Jan 17 11:03:56 2014 +0100
@@ -7,7 +7,8 @@
# you should have received as part of this distribution.
import doctest
-from StringIO import StringIO
+import six
+from six import BytesIO
import unittest
from couchdb import multipart
@@ -16,7 +17,7 @@
class ReadMultipartTestCase(unittest.TestCase):
def test_flat(self):
- text = '''\
+ text = b'''\
Content-Type: multipart/mixed; boundary="===============1946781859=="
--===============1946781859==
@@ -41,25 +42,25 @@
--===============1946781859==--
'''
num = 0
- parts = multipart.read_multipart(StringIO(text))
+ parts = multipart.read_multipart(BytesIO(text))
for headers, is_multipart, payload in parts:
self.assertEqual(is_multipart, False)
self.assertEqual('application/json', headers['content-type'])
if num == 0:
self.assertEqual('bar', headers['content-id'])
self.assertEqual('"1-4229094393"', headers['etag'])
- self.assertEqual('{\n "_id": "bar",\n '
- '"_rev": "1-4229094393"\n}', payload)
+ self.assertEqual(b'{\n "_id": "bar",\n '
+ b'"_rev": "1-4229094393"\n}', payload)
elif num == 1:
self.assertEqual('foo', headers['content-id'])
self.assertEqual('"1-2182689334"', headers['etag'])
- self.assertEqual('{\n "_id": "foo",\n "_rev": "1-2182689334",'
- '\n "something": "cool"\n}', payload)
+ self.assertEqual(b'{\n "_id": "foo",\n "_rev": "1-2182689334",'
+ b'\n "something": "cool"\n}', payload)
num += 1
self.assertEqual(num, 2)
def test_nested(self):
- text = '''\
+ text = b'''\
Content-Type: multipart/mixed; boundary="===============1946781859=="
--===============1946781859==
@@ -68,7 +69,7 @@
ETag: "1-4229094393"
{
- "_id": "bar",
+ "_id": "bar",
"_rev": "1-4229094393"
}
--===============1946781859==
@@ -80,8 +81,8 @@
Content-Type: application/json
{
- "_id": "foo",
- "_rev": "1-919589747",
+ "_id": "foo",
+ "_rev": "1-919589747",
"something": "cool"
}
--===============0909101126==
@@ -99,21 +100,21 @@
ETag: "1-3482142493"
{
- "_id": "baz",
+ "_id": "baz",
"_rev": "1-3482142493"
}
--===============1946781859==--
'''
num = 0
- parts = multipart.read_multipart(StringIO(text))
+ parts = multipart.read_multipart(BytesIO(text))
for headers, is_multipart, payload in parts:
if num == 0:
self.assertEqual(is_multipart, False)
self.assertEqual('application/json', headers['content-type'])
self.assertEqual('bar', headers['content-id'])
self.assertEqual('"1-4229094393"', headers['etag'])
- self.assertEqual('{\n "_id": "bar", \n '
- '"_rev": "1-4229094393"\n}', payload)
+ self.assertEqual(b'{\n "_id": "bar",\n '
+ b'"_rev": "1-4229094393"\n}', payload)
elif num == 1:
self.assertEqual(is_multipart, True)
self.assertEqual('foo', headers['content-id'])
@@ -125,14 +126,14 @@
if partnum == 0:
self.assertEqual('application/json',
headers['content-type'])
- self.assertEqual('{\n "_id": "foo", \n "_rev": '
- '"1-919589747", \n "something": '
- '"cool"\n}', payload)
+ self.assertEqual(b'{\n "_id": "foo",\n "_rev": '
+ b'"1-919589747",\n "something": '
+ b'"cool"\n}', payload)
elif partnum == 1:
self.assertEqual('text/plain', headers['content-type'])
self.assertEqual('mail.txt', headers['content-id'])
- self.assertEqual('Hello, friends.\nHow are you doing?'
- '\n\nRegards, Chris', payload)
+ self.assertEqual(b'Hello, friends.\nHow are you doing?'
+ b'\n\nRegards, Chris', payload)
partnum += 1
@@ -141,8 +142,8 @@
self.assertEqual('application/json', headers['content-type'])
self.assertEqual('baz', headers['content-id'])
self.assertEqual('"1-3482142493"', headers['etag'])
- self.assertEqual('{\n "_id": "baz", \n '
- '"_rev": "1-3482142493"\n}', payload)
+ self.assertEqual(b'{\n "_id": "baz",\n '
+ b'"_rev": "1-3482142493"\n}', payload)
num += 1
@@ -150,7 +151,7 @@
def test_unicode_headers(self):
# http://code.google.com/p/couchdb-python/issues/detail?id=179
- dump = '''Content-Type: multipart/mixed; boundary="==123456789=="
+ dump = u'''Content-Type: multipart/mixed; boundary="==123456789=="
--==123456789==
Content-ID: =?utf-8?b?5paH5qGj?=
@@ -159,8 +160,8 @@
Content-Type: application/json
{"_rev": "3-bc27b6930ca514527d8954c7c43e6a09", "_id": "文档"}
-'''
- parts = multipart.read_multipart(StringIO(dump))
+'''.encode('utf-8')
+ parts = multipart.read_multipart(BytesIO(dump))
for headers, is_multipart, payload in parts:
self.assertEqual(headers['content-id'], u'文档')
break
@@ -169,11 +170,11 @@
class WriteMultipartTestCase(unittest.TestCase):
def test_unicode_content(self):
- buf = StringIO()
+ buf = BytesIO()
envelope = multipart.write_multipart(buf, boundary='==123456789==')
envelope.add('text/plain', u'Iñtërnâtiônàlizætiøn')
envelope.close()
- self.assertEqual('''Content-Type: multipart/mixed; boundary="==123456789=="
+ self.assertEqual(u'''Content-Type: multipart/mixed; boundary="==123456789=="
--==123456789==
Content-Length: 27
@@ -182,23 +183,23 @@
Iñtërnâtiônàlizætiøn
--==123456789==--
-''', buf.getvalue().replace('\r\n', '\n'))
+''', buf.getvalue().decode('utf-8').replace('\r\n', '\n'))
def test_unicode_content_ascii(self):
- buf = StringIO()
+ buf = BytesIO()
envelope = multipart.write_multipart(buf, boundary='==123456789==')
self.assertRaises(UnicodeEncodeError, envelope.add,
'text/plain;charset=ascii', u'Iñtërnâtiônàlizætiøn')
def test_unicode_headers(self):
# http://code.google.com/p/couchdb-python/issues/detail?id=179
- buf = StringIO()
+ buf = BytesIO()
envelope = multipart.write_multipart(buf, boundary='==123456789==')
envelope.add('application/json',
- '{"_rev": "3-bc27b6930ca514527d8954c7c43e6a09",'
- ' "_id": "文档"}',
- headers={'Content-ID': u"文档"})
- self.assertEqual('''Content-Type: multipart/mixed; boundary="==123456789=="
+ u'{"_rev": "3-bc27b6930ca514527d8954c7c43e6a09",'
+ u' "_id": "文档"}',
+ headers={u'Content-ID': u"文档"})
+ self.assertEqual(u'''Content-Type: multipart/mixed; boundary="==123456789=="
--==123456789==
Content-ID: =?utf-8?b?5paH5qGj?=
@@ -207,12 +208,13 @@
Content-Type: application/json
{"_rev": "3-bc27b6930ca514527d8954c7c43e6a09", "_id": "文档"}
-''', buf.getvalue().replace('\r\n', '\n'))
+''', buf.getvalue().decode('utf-8').replace('\r\n', '\n'))
def suite():
suite = unittest.TestSuite()
- suite.addTest(doctest.DocTestSuite(multipart))
+ if six.PY2:
+ suite.addTest(doctest.DocTestSuite(multipart))
suite.addTest(unittest.makeSuite(ReadMultipartTestCase, 'test'))
suite.addTest(unittest.makeSuite(WriteMultipartTestCase, 'test'))
return suite
diff -r 961ac99baa29 couchdb/tests/testutil.py
--- a/couchdb/tests/testutil.py Sun Aug 18 18:41:46 2013 +0200
+++ b/couchdb/tests/testutil.py Fri Jan 17 11:03:56 2014 +0100
@@ -28,7 +28,7 @@
self.temp_dbs = {}
# Find an unused database name
while True:
- name = 'couchdb-python/%d' % random.randint(0, sys.maxint)
+ name = 'couchdb-python/%d' % random.randint(0, sys.maxsize)
if name not in self.temp_dbs:
break
db = self.server.create(name)
diff -r 961ac99baa29 couchdb/tests/tools.py
--- a/couchdb/tests/tools.py Sun Aug 18 18:41:46 2013 +0200
+++ b/couchdb/tests/tools.py Fri Jan 17 11:03:56 2014 +0100
@@ -9,7 +9,7 @@
import unittest
-from StringIO import StringIO
+from six import StringIO
from couchdb import Unauthorized
from couchdb.tools import load, dump
@@ -44,4 +44,3 @@
if __name__ == '__main__':
unittest.main(defaultTest='suite')
-
diff -r 961ac99baa29 couchdb/tests/view.py
--- a/couchdb/tests/view.py Sun Aug 18 18:41:46 2013 +0200
+++ b/couchdb/tests/view.py Fri Jan 17 11:03:56 2014 +0100
@@ -7,7 +7,8 @@
# you should have received as part of this distribution.
import doctest
-from StringIO import StringIO
+from six import BytesIO
+import six
import unittest
from couchdb import view
@@ -16,96 +17,97 @@
class ViewServerTestCase(unittest.TestCase):
def test_reset(self):
- input = StringIO('["reset"]\n')
- output = StringIO()
+ input = BytesIO(b'["reset"]\n')
+ output = BytesIO()
view.run(input=input, output=output)
- self.assertEqual(output.getvalue(), 'true\n')
+ self.assertEqual(output.getvalue().decode('utf-8'), 'true\n')
def test_add_fun(self):
- input = StringIO('["add_fun", "def fun(doc): yield None, doc"]\n')
- output = StringIO()
+ input = BytesIO(b'["add_fun", "def fun(doc): yield None, doc"]\n')
+ output = BytesIO()
view.run(input=input, output=output)
- self.assertEqual(output.getvalue(), 'true\n')
+ self.assertEqual(output.getvalue().decode('utf-8'), 'true\n')
def test_map_doc(self):
- input = StringIO('["add_fun", "def fun(doc): yield None, doc"]\n'
- '["map_doc", {"foo": "bar"}]\n')
- output = StringIO()
+ input = BytesIO(b'["add_fun", "def fun(doc): yield None, doc"]\n'
+ b'["map_doc", {"foo": "bar"}]\n')
+ output = BytesIO()
view.run(input=input, output=output)
- self.assertEqual(output.getvalue(),
+ self.assertEqual(output.getvalue().decode('utf-8'),
'true\n'
'[[[null, {"foo": "bar"}]]]\n')
def test_i18n(self):
- input = StringIO('["add_fun", "def fun(doc): yield doc[\\"test\\"], doc"]\n'
- '["map_doc", {"test": "b\xc3\xa5r"}]\n')
- output = StringIO()
+ input = BytesIO(b'["add_fun", "def fun(doc): yield doc[\\"test\\"], doc"]\n'
+ b'["map_doc", {"test": "b\xc3\xa5r"}]\n')
+ output = BytesIO()
view.run(input=input, output=output)
self.assertEqual(output.getvalue(),
- 'true\n'
- '[[["b\xc3\xa5r", {"test": "b\xc3\xa5r"}]]]\n')
+ b'true\n'
+ b'[[["b\xc3\xa5r", {"test": "b\xc3\xa5r"}]]]\n')
def test_map_doc_with_logging(self):
fun = 'def fun(doc): log(\'running\'); yield None, doc'
- input = StringIO('["add_fun", "%s"]\n'
- '["map_doc", {"foo": "bar"}]\n' % fun)
- output = StringIO()
+ input = BytesIO(('["add_fun", "%s"]\n'
+ '["map_doc", {"foo": "bar"}]\n' % fun).encode())
+ output = BytesIO()
view.run(input=input, output=output)
- self.assertEqual(output.getvalue(),
+ self.assertEqual(output.getvalue().decode('utf-8'),
'true\n'
'{"log": "running"}\n'
'[[[null, {"foo": "bar"}]]]\n')
def test_map_doc_with_logging_json(self):
fun = 'def fun(doc): log([1, 2, 3]); yield None, doc'
- input = StringIO('["add_fun", "%s"]\n'
- '["map_doc", {"foo": "bar"}]\n' % fun)
- output = StringIO()
+ input = BytesIO(('["add_fun", "%s"]\n'
+ '["map_doc", {"foo": "bar"}]\n' % fun).encode())
+ output = BytesIO()
view.run(input=input, output=output)
- self.assertEqual(output.getvalue(),
+ self.assertEqual(output.getvalue().decode('utf-8'),
'true\n'
'{"log": "[1, 2, 3]"}\n'
'[[[null, {"foo": "bar"}]]]\n')
def test_reduce(self):
- input = StringIO('["reduce", '
- '["def fun(keys, values): return sum(values)"], '
- '[[null, 1], [null, 2], [null, 3]]]\n')
- output = StringIO()
+ input = BytesIO(b'["reduce", '
+ b'["def fun(keys, values): return sum(values)"], '
+ b'[[null, 1], [null, 2], [null, 3]]]\n')
+ output = BytesIO()
view.run(input=input, output=output)
- self.assertEqual(output.getvalue(), '[true, [6]]\n')
+ self.assertEqual(output.getvalue().decode('utf-8'), '[true, [6]]\n')
def test_reduce_with_logging(self):
- input = StringIO('["reduce", '
- '["def fun(keys, values): log(\'Summing %r\' % (values,)); return sum(values)"], '
- '[[null, 1], [null, 2], [null, 3]]]\n')
- output = StringIO()
+ input = BytesIO(b'["reduce", '
+ b'["def fun(keys, values): log(\'Summing %r\' % (values,)); return sum(values)"], '
+ b'[[null, 1], [null, 2], [null, 3]]]\n')
+ output = BytesIO()
view.run(input=input, output=output)
- self.assertEqual(output.getvalue(),
+ self.assertEqual(output.getvalue().decode('utf-8'),
'{"log": "Summing (1, 2, 3)"}\n'
'[true, [6]]\n')
def test_rereduce(self):
- input = StringIO('["rereduce", '
- '["def fun(keys, values, rereduce): return sum(values)"], '
- '[1, 2, 3]]\n')
- output = StringIO()
+ input = BytesIO(b'["rereduce", '
+ b'["def fun(keys, values, rereduce): return sum(values)"], '
+ b'[1, 2, 3]]\n')
+ output = BytesIO()
view.run(input=input, output=output)
- self.assertEqual(output.getvalue(), '[true, [6]]\n')
+ self.assertEqual(output.getvalue().decode('utf-8'), '[true, [6]]\n')
def test_reduce_empty(self):
- input = StringIO('["reduce", '
- '["def fun(keys, values): return sum(values)"], '
- '[]]\n')
- output = StringIO()
+ input = BytesIO(b'["reduce", '
+ b'["def fun(keys, values): return sum(values)"], '
+ b'[]]\n')
+ output = BytesIO()
view.run(input=input, output=output)
- self.assertEqual(output.getvalue(),
+ self.assertEqual(output.getvalue().decode('utf-8'),
'[true, [0]]\n')
def suite():
suite = unittest.TestSuite()
- suite.addTest(doctest.DocTestSuite(view))
+ if six.PY2:
+ suite.addTest(doctest.DocTestSuite(view))
suite.addTest(unittest.makeSuite(ViewServerTestCase, 'test'))
return suite
diff -r 961ac99baa29 couchdb/tools/dump.py
--- a/couchdb/tools/dump.py Sun Aug 18 18:41:46 2013 +0200
+++ b/couchdb/tools/dump.py Fri Jan 17 11:03:56 2014 +0100
@@ -11,6 +11,8 @@
file.
"""
+from __future__ import print_function
+
from base64 import b64decode
from optparse import OptionParser
import sys
@@ -25,7 +27,7 @@
def dump_docs(envelope, docs):
for doc in docs:
- print >> sys.stderr, 'Dumping document %r' % doc.id
+ print('Dumping document %r' % doc.id, file=sys.stderr)
attachments = doc.pop('_attachments', {})
jsondoc = json.encode(doc)
diff -r 961ac99baa29 couchdb/tools/load.py
--- a/couchdb/tools/load.py Sun Aug 18 18:41:46 2013 +0200
+++ b/couchdb/tools/load.py Fri Jan 17 11:03:56 2014 +0100
@@ -11,6 +11,8 @@
file.
"""
+from __future__ import print_function
+
from base64 import b64encode
from optparse import OptionParser
import sys
@@ -45,13 +47,14 @@
doc = json.decode(payload)
del doc['_rev']
- print>>sys.stderr, 'Loading document %r' % docid
+ print('Loading document %r' % docid, file=sys.stderr)
try:
db[docid] = doc
- except Exception, e:
+ except Exception:
+ e = sys.exc_info()[1]
if not ignore_errors:
raise
- print>>sys.stderr, 'Error: %s' % e
+ print('Error: %s' % e, file=sys.stderr)
def main():
diff -r 961ac99baa29 couchdb/tools/replicate.py
--- a/couchdb/tools/replicate.py Sun Aug 18 18:41:46 2013 +0200
+++ b/couchdb/tools/replicate.py Fri Jan 17 11:03:56 2014 +0100
@@ -17,12 +17,14 @@
Use 'python replicate.py --help' to get more detailed usage instructions.
"""
+from __future__ import print_function
+
from couchdb import http, client
import optparse
import sys
import time
import urllib
-import urlparse
+import six.moves.urllib.parse as urlparse
import fnmatch
def findpath(parser, s):
@@ -99,12 +101,12 @@
for sdb, tdb in databases:
start = time.time()
- print sdb, '->', tdb,
+ print(sdb, '->', tdb, end="")
sys.stdout.flush()
if tdb not in target:
target.create(tdb)
- print "created",
+ print("created", end="")
sys.stdout.flush()
sdb = '%s%s' % (sbase, urllib.quote(sdb, ''))
@@ -112,12 +114,12 @@
target.replicate(sdb, tdb, continuous=options.continuous)
else:
target.replicate(sdb, tdb)
- print '%.1fs' % (time.time() - start)
+ print('%.1fs' % (time.time() - start))
sys.stdout.flush()
if options.compact:
for (sdb, tdb) in databases:
- print 'compact', tdb
+ print('compact', tdb)
target[tdb].compact()
if __name__ == '__main__':
diff -r 961ac99baa29 couchdb/view.py
--- a/couchdb/view.py Sun Aug 18 18:41:46 2013 +0200
+++ b/couchdb/view.py Fri Jan 17 11:03:56 2014 +0100
@@ -12,6 +12,7 @@
from codecs import BOM_UTF8
import logging
import os
+import six
import sys
import traceback
from types import FunctionType
@@ -34,14 +35,14 @@
def _writejson(obj):
obj = json.encode(obj)
- if isinstance(obj, unicode):
+ if isinstance(obj, six.text_type):
obj = obj.encode('utf-8')
output.write(obj)
- output.write('\n')
+ output.write(b'\n')
output.flush()
def _log(message):
- if not isinstance(message, basestring):
+ if not isinstance(message, six.string_types):
message = json.encode(message)
_writejson({'log': message})
@@ -53,8 +54,9 @@
string = BOM_UTF8 + string.encode('utf-8')
globals_ = {}
try:
- exec string in {'log': _log}, globals_
- except Exception, e:
+ six.exec_(string, {'log': _log}, globals_)
+ except Exception:
+ e = sys.exc_info()[1]
return {'error': {
'id': 'map_compilation_error',
'reason': e.args[0]
@@ -66,7 +68,7 @@
}}
if len(globals_) != 1:
return err
- function = globals_.values()[0]
+ function = list(globals_.values())[0]
if type(function) is not FunctionType:
return err
functions.append(function)
@@ -77,7 +79,8 @@
for function in functions:
try:
results.append([[key, value] for key, value in function(doc)])
- except Exception, e:
+ except Exception:
+ e = sys.exc_info()[1]
log.error('runtime error in map function: %s', e,
exc_info=True)
results.append([])
@@ -89,8 +92,9 @@
args = cmd[1]
globals_ = {}
try:
- exec code in {'log': _log}, globals_
- except Exception, e:
+ six.exec_(code, {'log': _log}, globals_)
+ except Exception:
+ e = sys.exc_info()[1]
log.error('runtime error in reduce function: %s', e,
exc_info=True)
return {'error': {
@@ -104,7 +108,7 @@
}}
if len(globals_) != 1:
return err
- function = globals_.values()[0]
+ function = list(globals_.values())[0]
if type(function) is not FunctionType:
return err
@@ -118,7 +122,7 @@
keys, vals = zip(*args)
else:
keys, vals = [], []
- if function.func_code.co_argcount == 3:
+ if six.get_function_code(function).co_argcount == 3:
results = function(keys, vals, rereduce)
else:
results = function(keys, vals)
@@ -139,7 +143,8 @@
try:
cmd = json.decode(line)
log.debug('Processing %r', cmd)
- except ValueError, e:
+ except ValueError:
+ e = sys.exc_info()[1]
log.error('Error: %s', e, exc_info=True)
return 1
else:
@@ -148,7 +153,8 @@
_writejson(retval)
except KeyboardInterrupt:
return 0
- except Exception, e:
+ except Exception:
+ e = sys.exc_info()[1]
log.error('Error: %s', e, exc_info=True)
return 1
@@ -218,9 +224,10 @@
sys.stdout.flush()
sys.exit(0)
- except getopt.GetoptError, error:
+ except getopt.GetoptError:
+ error = sys.exc_info()[1]
message = '%s\n\nTry `%s --help` for more information.\n' % (
- str(error), os.path.basename(sys.argv[0])
+ six.text_type(error), os.path.basename(sys.argv[0])
)
sys.stderr.write(message)
sys.stderr.flush()
diff -r 961ac99baa29 perftest.py
--- a/perftest.py Sun Aug 18 18:41:46 2013 +0200
+++ b/perftest.py Fri Jan 17 11:03:56 2014 +0100
@@ -2,6 +2,9 @@
Simple peformance tests.
"""
+from __future__ import print_function
+
+import six
import sys
import time
@@ -10,8 +13,8 @@
def main():
- print 'sys.version : %r' % (sys.version,)
- print 'sys.platform : %r' % (sys.platform,)
+ print('sys.version : %r' % (sys.version,))
+ print('sys.platform : %r' % (sys.platform,))
tests = [create_doc, create_bulk_docs]
if len(sys.argv) > 1:
@@ -35,8 +38,9 @@
stop = time.time()
sys.stdout.write("%0.2fs\n" % (stop - start,))
sys.stdout.flush()
- except Exception, e:
- sys.stdout.write("FAILED - %r\n" % (unicode(e),))
+ except Exception:
+ e = sys.exc_info()[1]
+ sys.stdout.write("FAILED - %r\n" % (six.text_type(e),))
sys.stdout.flush()
finally:
server.delete(db_name)
@@ -45,7 +49,7 @@
def create_doc(db):
"""Create lots of docs, one at a time"""
for i in range(1000):
- db.save({'_id': unicode(i)})
+ db.save({'_id': six.text_type(i)})
def create_bulk_docs(db):
@@ -53,7 +57,7 @@
batch_size = 100
num_batches = 1000
for i in range(num_batches):
- db.update([{'_id': unicode((i * batch_size) + j)} for j in range(batch_size)])
+ db.update([{'_id': six.text_type((i * batch_size) + j)} for j in range(batch_size)])
if __name__ == '__main__':
diff -r 961ac99baa29 setup.py
--- a/setup.py Sun Aug 18 18:41:46 2013 +0200
+++ b/setup.py Fri Jan 17 11:03:56 2014 +0100
@@ -7,6 +7,8 @@
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
+from __future__ import print_function
+
from distutils.cmd import Command
import doctest
from glob import glob
@@ -61,13 +63,13 @@
code_block.content = 1
rst.directives.register_directive('code-block', code_block)
except ImportError:
- print 'Pygments not installed, syntax highlighting disabled'
+ print('Pygments not installed, syntax highlighting disabled')
for source in glob('doc/*.txt'):
dest = os.path.splitext(source)[0] + '.html'
if self.force or not os.path.exists(dest) or \
os.path.getmtime(dest) < os.path.getmtime(source):
- print 'building documentation file %s' % dest
+ print('building documentation file %s' % dest)
publish_cmdline(writer_name='html',
argv=['--config=%s' % docutils_conf, source,
dest])
@@ -86,7 +88,7 @@
sys.argv[1:] = old_argv
except ImportError:
- print 'epydoc not installed, skipping API documentation.'
+ print('epydoc not installed, skipping API documentation.')
class test_doc(Command):
@@ -101,7 +103,7 @@
def run(self):
for filename in glob('doc/*.txt'):
- print 'testing documentation file %s' % filename
+ print('testing documentation file %s' % filename)
doctest.testfile(filename, False, optionflags=doctest.ELLIPSIS)
@@ -112,7 +114,7 @@
# Build setuptools-specific options (if installed).
if not has_setuptools:
- print "WARNING: setuptools/distribute not available. Console scripts will not be installed."
+ print("WARNING: setuptools/distribute not available. Console scripts will not be installed.")
setuptools_options = {}
else:
setuptools_options = {
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment