Skip to content

Instantly share code, notes, and snippets.

@soundofjw
Last active August 29, 2015 14:27
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save soundofjw/8bb8247e5f7d1d31d917 to your computer and use it in GitHub Desktop.
Save soundofjw/8bb8247e5f7d1d31d917 to your computer and use it in GitHub Desktop.
base class for running taskqueue tasks from testcases
import datetime
import unittest
import time
import cgi
import base64
import random
import sys
import StringIO
import logging
import re
from google.appengine.ext import testbed
from google.appengine.api import apiproxy_stub_map
from google.appengine.ext import webapp
from google.appengine.api.taskqueue import taskqueue_stub as tqs
import pipeline
def get_tasks(queue_name='default'):
"""Gets pending tasks from a queue, adding a 'params' dictionary to them.
Code originally from:
http://code.google.com/p/pubsubhubbub/source/browse/trunk/hub/testutil.py
"""
taskqueue_stub = apiproxy_stub_map.apiproxy.GetStub('taskqueue')
stub_globals = taskqueue_stub.GetTasks.func_globals
old_format = stub_globals['_FormatEta']
# Yes-- this is a vicious hack to have the task queue stub return the
# ETA of tasks as datetime instances instead of text strings.
stub_globals['_FormatEta'] = \
lambda x: datetime.datetime.utcfromtimestamp(x / 1000000.0)
try:
task_list = taskqueue_stub.GetTasks(queue_name)
finally:
stub_globals['_FormatEta'] = old_format
adjusted_task_list = []
for task in task_list:
for header, value in task['headers']:
if (header == 'content-type' and
value == 'application/x-www-form-urlencoded'):
task['params'] = cgi.parse_qs(base64.b64decode(task['body']))
break
adjusted_task_list.append(task)
return adjusted_task_list
def delete_tasks(task_list, queue_name='default'):
"""Deletes a set of tasks from a queue."""
taskqueue_stub = apiproxy_stub_map.apiproxy.GetStub('taskqueue')
for task in task_list:
# NOTE: Use Delete here instad of DeleteTask because DeleteTask will
# remove the task's name from the list of tombstones, which will cause
# some tasks to run multiple times in tests if barriers fire twice.
taskqueue_stub._GetGroup().GetQueue(queue_name).Delete(task['name'])
class TestBase(testutil.TestSetupMixin, unittest.TestCase):
"""Base class for all tests in this module."""
def setUp(self):
super(TestBase, self).setUp()
self.maxDiff = 10**10
# First, create an instance of the Testbed class.
self.testbed = testbed.Testbed()
# Then activate the testbed, which prepares the service stubs for use.
self.testbed.activate()
# Next, declare which service stubs you want to use.
self.testbed.init_blobstore_stub()
self.testbed.init_urlfetch_stub()
self.testbed.init_app_identity_stub()
self.taskqueue_stub = apiproxy_stub_map.apiproxy.GetStub('taskqueue')
self.queue_name = "default"
def tearDown(self):
self.testbed.deactivate()
def run_tasks(self, queue_name='default'):
"""Runs all tasks in a queue."""
task_list = get_tasks(queue_name=queue_name)
# Shuffle the task list to actually test out-of-order execution.
random.shuffle(task_list)
for task in task_list:
self.run_task(task)
delete_tasks(task_list, queue_name)
return task_list
def run_task(self, task):
"""Runs the given task against the orchestra handlers."""
name = task['name']
method = task['method']
url = task['url']
headers = dict(task['headers'])
environ = {
'wsgi.input': StringIO.StringIO(base64.b64decode(task['body'])),
'wsgi.errors': sys.stderr,
'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': url,
'CONTENT_TYPE': headers.get('content-type', ''),
'CONTENT_LENGTH': headers.get('content-length', ''),
'HTTP_X_APPENGINE_TASKNAME': name,
'HTTP_X_APPENGINE_QUEUENAME': self.queue_name,
}
handlers_map = pipeline.create_handlers_map()
match_url = url
if method == 'GET':
environ['PATH_INFO'], environ['QUERY_STRING'] = (
(url.split('?', 1) + [''])[:2])
match_url = environ['PATH_INFO']
logging.debug('Executing "%s %s" name="%s"', method, url, name)
for pattern, handler_class in handlers_map:
the_match = re.match('^%s$' % pattern, match_url)
if the_match:
break
else:
self.fail('No matching handler for "%s %s"' % (method, url))
handler = handler_class()
request = webapp.Request(environ)
response = webapp.Response()
handler.initialize(request, response)
getattr(handler, method.lower())(*the_match.groups())
#!/usr/bin/env python
#
# Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Test utilities for the Google App Engine Pipeline API."""
# Code originally from:
# http://code.google.com/p/pubsubhubbub/source/browse/trunk/hub/testutil.py
import logging
import os
import sys
import tempfile
class TestSetupMixin(object):
TEST_APP_ID = 'my-app-id'
TEST_VERSION_ID = 'my-version.1234'
def setUp(self):
super(TestSetupMixin, self).setUp()
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import memcache
from google.appengine.api import queueinfo
from google.appengine.datastore import datastore_stub_util
from google.appengine.ext import testbed
from google.appengine.ext.testbed import TASKQUEUE_SERVICE_NAME
before_level = logging.getLogger().getEffectiveLevel()
os.environ['APPLICATION_ID'] = self.TEST_APP_ID
os.environ['CURRENT_VERSION_ID'] = self.TEST_VERSION_ID
os.environ['HTTP_HOST'] = '%s.appspot.com' % self.TEST_APP_ID
os.environ['DEFAULT_VERSION_HOSTNAME'] = os.environ['HTTP_HOST']
os.environ['CURRENT_MODULE_ID'] = 'foo-module'
try:
logging.getLogger().setLevel(100)
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.setup_env(app_id=self.TEST_APP_ID, overwrite=True)
self.testbed.init_memcache_stub()
hr_policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy(probability=1)
self.testbed.init_datastore_v3_stub(consistency_policy=hr_policy)
self.testbed.init_taskqueue_stub()
root_path = os.path.realpath(os.path.dirname(__file__))
# Actually need to flush, even though we've reallocated. Maybe because the
# memcache stub's cache is at the module level, not the API stub?
memcache.flush_all()
finally:
logging.getLogger().setLevel(before_level)
define_queues=['other']
taskqueue_stub = apiproxy_stub_map.apiproxy.GetStub('taskqueue')
taskqueue_stub.queue_yaml_parser = (
lambda x: queueinfo.LoadSingleQueue(
'queue:\n- name: default\n rate: 1/s\n' +
'\n'.join('- name: %s\n rate: 1/s' % name
for name in define_queues)))
def tearDown(self):
super(TestSetupMixin, self).tearDown()
self.testbed.deactivate()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment