Skip to content

Instantly share code, notes, and snippets.

@necaris
Created June 7, 2013 22:47
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save necaris/5732938 to your computer and use it in GitHub Desktop.
Save necaris/5732938 to your computer and use it in GitHub Desktop.
#!/usr/bin/env python
#-*-coding: utf-8-*-
"""
Fake CouchDB replication endpoint that copies documents into Couchbase.
"""
# Copyright (c) 2013, Rami Chowdhury
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer. Redistributions in binary
# form must reproduce the above copyright notice, this list of conditions and
# the following disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
# NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import json
import datetime
import wsgiref.simple_server
import wsgiref.util
import wsgiref.headers
import argparse
from base64 import urlsafe_b64encode
from hashlib import sha256
from functools import partial
from couchbase import Couchbase
# Global handler function that's called by the endpoint app -- expected to be
# set up in the execution of the script
DOCUMENTS_HANDLER = None
# Global dictionary simulating CouchDB's _local database
_LOCAL = {}
# Global dictionary keeping track of documents we've updated
_SEEN = set()
def _log_message(msg, **kw):
"""
Log a given message to stdout with a timestamp.
Does string interpolation with the message and arguments, if any given.
"""
timestamp = datetime.datetime.utcnow().isoformat()
try:
message = msg % kw
except:
message = msg
print timestamp, "-", message
def _get_epoch_timestamp(dt):
"""
Return the epoch timestamp for the given datetime.
"""
EPOCH = datetime.datetime(1970, 1, 1)
return int((dt - EPOCH).total_seconds())
def _generic_headers():
"""
Output some generic filler headers, such as CouchDB might output.
"""
headers = wsgiref.headers.Headers([])
headers.add_header("Server", "CouchDB/1.1.1 (Erlang OTP/R15B01)")
headers.add_header("Date", datetime.datetime.now().strftime(
"%a, %d %b %Y %H:%M:%S GMT"))
headers.add_header("Content-Type", "application/json; charset=utf-8")
headers.add_header("Cache-Control", "must-revalidate")
return headers
def fake_replicator_app(environ, start_response):
"""
Emulate just enough of CouchDB to serve as a replication endpoint.
"""
request_method = environ.get('REQUEST_METHOD', None)
path = environ['SCRIPT_NAME'] + environ['PATH_INFO']
try:
# If possible, read in the POSTed data and decode it as JSON
input_stream = environ['wsgi.input']
input_length = int(environ['CONTENT_LENGTH'])
input_raw_data = input_stream.read(input_length)
input_data = json.loads(input_raw_data)
except:
input_data = None
_log_message("%(method)s %(path)s", method=request_method,
path=path)
if path.startswith("/_local"):
# CouchDB uses the _local database to store checkpoint data for this
# replication -- without it, it'll assume updates are failing and keep
# trying the same few documents and revisions over and over again.
# To simulate this, store things in a global dictionary.
object_id = path.split('/')[2]
headers = _generic_headers()
if request_method == 'GET':
# Try to fetch the data from the local dictionary, or return a 404
try:
object_data = _LOCAL[object_id]
object_json = json.dumps(object_data)
headers.add_header("Content-Length",
str(len(object_json)))
start_response("200 OK", headers.items())
return [object_json]
except KeyError:
start_response("404 Not Found", headers.items())
return ['{"error":"not_found","reason":"missing"}']
elif request_method in ('POST', 'PUT'):
# Storing data is just as simple
_LOCAL[object_id] = input_data
start_response("201 Created", headers.items())
return [json.dumps({"ok": True})]
else:
start_response("555 Shouldn't Happen", headers.items())
return []
elif request_method == 'HEAD' and path == '/':
# CouchDB sends a HEAD request at the beginning of a replication, to
# check that the target database is appropriately configured. Send back
# something that looks appropriate.
headers = _generic_headers()
start_response("200 OK", headers.items())
return []
elif request_method == 'GET' and path == '/':
# Next, CouchDB will try to get the metadata for the target database -
# in this case, act as if there isn't one, so CouchDB will assume it'll
# be created as it goes along.
start_response("404 Not Found", [])
return ['{"error":"not_found","reason":"no_db_file"}']
elif request_method == 'POST':
# Now we're getting into the actual replication -- the good stuff. At
# this point we need to start dispatching on the actual request path
# over and above other things.
if path == '/_missing_revs':
# CouchDB is asking us which revisions are *not* in this database,
# i.e. which to replicate. We want them all, so echo the list back
# as the "missing_revs"
_log_message("Got missing revs request: %(num)d missing revs",
num=len(input_data))
filtered_input_data = dict(
(k, v) for k, v in input_data.items() if k not in _SEEN)
output_data = {"missing_revs": filtered_input_data}
headers = _generic_headers()
headers.add_header("Content-Length",
str(len(json.dumps(output_data))))
start_response("200 OK", headers.items())
return [json.dumps(output_data)]
elif path == "/_bulk_docs":
# This is what we're really after -- documents! The documents are
# sent in the 'docs' field, and we pass them on to the handler.
_log_message("Got data dump: %(num)d documents",
num=len(input_data['docs']))
DOCUMENTS_HANDLER(input_data['docs'])
headers = _generic_headers()
start_response("200 OK", headers.items())
# Return empty list to indicate no errors
return ["[]"]
elif path == "/_ensure_full_commit":
# CouchDB's making sure the data's been written
_log_message("Got ensure_full_commit request")
headers = _generic_headers()
# Tell CouchDB we've created the documents
start_response("201 Created", headers.items())
# CouchDB expects a confirmation response
_dt = datetime.datetime.utcnow() - datetime.timedelta(hours=1)
output_data = {
'ok': True,
"instance_start_time": _get_epoch_timestamp(_dt)
}
_log_message(json.dumps(output_data))
return [json.dumps(output_data)]
# We don't care about anything else, so bail out with an error
start_response("418 I'm A Teapot", [])
return []
def _encode_key_if_necessary(key):
"""
If a key is >250 bytes long, encode it as a SHA256 hash.
This is necessary because Couchbase keys can't be > 250 bytes in length,
since all the keys and metadata is kept in RAM on the cluster. CouchDB has
no such restriction, and IDs may be very long.
"""
if len(key) < 250:
return key
return urlsafe_b64encode(sha256(key).digest())
def handle_documents(cb, docs):
"""
Insert into Couchbase what we receive from CouchDB.
Parameter 'cb' should be a Couchbase connection
"""
for doc in docs:
_SEEN.add(doc['_id'])
if doc.get('_deleted', False):
# This document has been deleted in CouchDB, so it's not worth
# inserting into Couchbase.
continue
if doc['_id'].startswith('_design'):
# This is a CouchDB design document. Couchbase deals with design
# documents a bit differently, and since they should be dealt with
# separately, we just refuse to upload them.
continue
# Some CouchDB fields are not useful in Couchbase -- not for our
# use case, at least.
if '_rev' in doc:
del doc['_rev']
if '_revisions' in doc:
del doc['_revisions']
# Some fields are useful, but are called something else ;-)
doc_id = doc['_id']
del doc['_id']
_log_message(" Couchbase insert: %(key)s", key=doc_id)
# Couchbase has a maximum key length of 250 bytes, while CouchDB has no
# such restrictions. If the key is > 250 bytes, we have a potential
# problem. Hopefully there's no context data stored in the key...
key = _encode_key_if_necessary(doc_id)
cb.set(key, doc)
if __name__ == '__main__':
# Set up an ArgumentParser to read the command-line
parser = argparse.ArgumentParser(
description="Fake CouchDB replication endpoint. Inserts to Couchbase")
parser.add_argument(
'--port', default=8080, type=int,
help="The port on which to listen for connections from CouchDB")
parser.add_argument(
"--couchbase-host", default="127.0.0.1",
help="The host for the Couchbase server")
parser.add_argument(
"--couchbase-bucket", default="default",
help="The destination Couchbase bucket")
parser.add_argument(
"--couchbase-password", default="",
help="The password for the destination bucket")
args = parser.parse_args()
# Create the Couchbase connection, and bail if it doesn't work
cb = Couchbase.connect(host=args.couchbase_host,
bucket=args.couchbase_bucket,
password=args.couchbase_password)
# Set up the global document handler
DOCUMENTS_HANDLER = partial(handle_documents, cb)
# Now start up the server
httpd = wsgiref.simple_server.make_server('127.0.0.1', args.port,
fake_replicator_app)
httpd.RequestHandlerClass.log_message = lambda *args: None
httpd.serve_forever()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment