Skip to content

Instantly share code, notes, and snippets.

@zzzeek
Created July 17, 2014 23:14
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save zzzeek/001ec0e32cefe2d4e1a1 to your computer and use it in GitHub Desktop.
Save zzzeek/001ec0e32cefe2d4e1a1 to your computer and use it in GitHub Desktop.
notes for oslo issue 1339206
1. environment outside of tests.
OSLO_DBS = semicolon separated list of URLS:
OSLO_DBS=postgresql+psycopg2://foo:bar@host/test;mysql+mysqlconnector://foo:bar@host/test;mysql+mysqldb://foo:bar@host/test;
2. .testr.conf, lets use remote object provisioning:
instance_provision=${PYTHON:-python} -m oslo.db.sqlalchemy.provision create $INSTANCE_COUNT 15
instance_dispose=${PYTHON:-python} -m oslo.db.sqlalchemy.provision drop $INSTANCE_IDS 16
instance_execute=OS_TEST_DBAPI_CONNECTION=$INSTANCE_ID $COMMAND
provision create - for each url in OSLO_DBS and for a count of INSTANCE COUNT,
create a schema <randomstring>_<drivername>, yield out just <randomstring>
e.g.
INSTANCE_COUNT = 5
determine random names: qprl, xzrj, gjwi, cniw, dwuf
create databases:
CONNECT postgresql+psycopg2://foo:bar@host/test
CREATE DATABASE qprl_psycopg2
CREATE DATABASE xzrj_psycopg2
CREATE DATABASE gjwi_psycopg2
CREATE DATABASE cniw_psycopg2
CREATE DATABASE dwuf_psycopg2
CONNECT mysql+mysqlconnector://foo:bar@host/test
CREATE DATABASE qprl_mysqlconnector
CREATE DATABASE xzrj_mysqlconnector
CREATE DATABASE gjwi_mysqlconnector
CREATE DATABASE cniw_mysqlconnector
CREATE DATABASE dwuf_mysqlconnector
CONNECT mysql+mysqldb://foo:bar@host/test
CREATE DATABASE qprl_mysqldb
CREATE DATABASE xzrj_mysqldb
CREATE DATABASE gjwi_mysqldb
CREATE DATABASE cniw_mysqldb
CREATE DATABASE dwuf_mysqldb
instance_execute = OSLO_SCHEMA_TOKEN=$INSTANCE_ID $COMMAND
e.g.
OSLO_SCHEMA_TOKEN=gjwi testrthing_run_tests <args...>
provision dispose - for each url in OSLO_DBS and for each instance id in INSTANCE_IDS,
we drop the schema <randomstring>_<drivername>
INSTANCE_IDS = qprl, xzrj, gjwi, cniw, dwuf
CONNECT postgresql+psycopg2://foo:bar@host/test
DROP DATABASE qprl_psycopg2
DROP DATABASE xzrj_psycopg2
DROP DATABASE gjwi_psycopg2
DROP DATABASE cniw_psycopg2
DROP DATABASE dwuf_psycopg2
CONNECT mysql+mysqlconnector://foo:bar@host/test
DROP DATABASE qprl_mysqlconnector
DROP DATABASE xzrj_mysqlconnector
DROP DATABASE gjwi_mysqlconnector
DROP DATABASE cniw_mysqlconnector
DROP DATABASE dwuf_mysqlconnector
CONNECT mysql+mysqldb://foo:bar@host/test
DROP DATABASE qprl_mysqldb
DROP DATABASE xzrj_mysqldb
DROP DATABASE gjwi_mysqldb
DROP DATABASE cniw_mysqldb
DROP DATABASE dwuf_mysqldb
3. provision objects
1. we store a collection of ProvisionedDatabase() objects - one per
database+drivername. the object links to OSLO_SCHEMA_TOKEN:
schemas = {
'postgresql+psycopg2': ProvisionedDatabase('postgresql+psycopg2://foo:bar@host/gjwi'),
'mysql+mysqlconnector': ProvisionedDatabase('mysql+mysqlconnector://foo:bar@host/gjwi'),
'mysql+mysqldb': ProvisionedDatabase('mysql+mysqldb://foo:bar@host/gjwi'),
}
2. class ProvisionedDatabase(object):
@classmethod
def from_url_and_token(cls, url, token):
return ProvisionedDatabase(url_minus_dbname + token)
def __init__(self, url):
self.url = url
self.current_metadata_name = None
self.engine = None
self.impl = ProvisionImpl.get_impl(url.database) # an Impl class
def get_engine(self):
if self.engine is None:
self.engine = oslo.db.create_engine(self.url, otherstuff)
return self.engine
def drop_all_tables(self):
# do an inspector.get_all_tables(), get_all_views(), get_all_sequences(),
# etc. DROP everything. empty out the schema w/o dropping it
self.impl.drop_all_tables(self.get_engine())
self.current_metadata_name = None
def create_tables(self, table_creation_fn, name):
if self.current_metadata_name != name:
if self.current_metadata_name is not None:
# drop the existing tables
self.drop_all_tables()
table_creation_fn()
self.current_metadata_name = name
self.clean = False
3. class ProvisionImpl(object):
@classmethod
def get_impl(cls, dbname):
# postgresql -> PostgresqlProvisionImpl()...
# mysql -> MysqlProvisionImpl()...
def drop_all_tables(self, engine):
raise NotImplementedError()
def create_database(self, engine):
raise NotImplementedError()
def drop_database(self, engine):
raise NotImplementedError()
class PostgresqlProvisionImpl(ProvisionImpl):
def create_database(self, engine):
# ...
def drop_all_tables(self, engine):
# ...
def drop_database(self, engine):
# ...
class MysqlProvisionImpl(ProvisionImpl):
def create_database(self, engine):
# ...
def drop_all_tables(self, engine):
# ...
def drop_database(self, engine):
# ...
4. provision module usage
provision.setup_for_transaction -> returns ProvisionedTransaction() (Fixture?)
provision.setup_for_transaction -> needs:
1. callable that generates tables, plus a name so we can identify it
2. list of things it will mock, e.g. get_engine() / get_session() methods
3. scenario -> which backend do we want, e.g. dbname+driver, postgresql+psycopg2,
mysql+mysqlconnector, etc.
steps:
1. the schema we will use is <OSLO_SCHEMA_TOKEN>_<drivername>
2. we establish the real engine for this schema. if not exists,
a. create_engine()
else:
a. we have the engine
3. was the last run on this a "drop everything"? or this is our first run?
or the last time we had a "create all tables" callable it was a different
one? (identify it with a name) if so then run the "create all tables" callable
4. set up the connection/transaction
5. make sure the events are in place for rollbacks
6. patch all the things
ProvisionedTransaction.dispose() ->
1. close out sessions
2. unmock mocked things
3. rollback transaction
4. close connection
provision.setup_for_schematest -> returns ProvisionedSchema()
1. list of things it will mock, e.g. get_engine() / get_session() methods
2. scenario -> which backend do we want, e.g. dbname+driver, postgresql+psycopg2,
mysql+mysqlconnector, etc.
steps:
1. the schema we will use is <OSLO_SCHEMA_TOKEN>_<drivername>
2. we establish the real engine for this schema. if not exists,
a. create_engine()
else:
a. we have the engine
b. do a drop everything
3. patch all the things
ProvisionedSchema.dispose() ->
1. close out sessions
2. unmock mocked things
3. do a drop everything
5. test classes sort of look like:
class scenariotestbase(testscenarios.testbase):
allowed_scenarios = 'all'
@memoized_property # <- descriptor, allows setting, testscenarios sets
# to None when it generates a scenario test
def scenarios(self):
# 'scenarios' -> special name used by testscenarios
available = provisioned.get_available_scenarios()
if self.allowed_scenarios != 'all':
# more comprehensive filtering logic here...
available = [scenario for scenario in available if
scenario in self.allowed_scenarios]
# testscenarios will set "scenario_backend" on each test
# object before it runs for real
return [
(scenario, {'scenario_backend': scenario})
for scenario in available
]
class oslotransactionaltestbase(scenariotestbase):
def create_schema(self):
raise NotImplementedError()
def setUp(self):
provisioned = provision.setup_for_transaction(self.create_schema,
backend=self.scenario_backend)
provisioned.mock_things(
[mock1, mock2, ...]
)
self.addCleanup(provisioned.dispose)
class osloschematestbase(scenariotestbase):
def setUp(self):
provisioned = provision.setup_for_schematest(
backend=self.scenario_backend)
provisioned.mock_things(
[mock1, mock2, ...]
)
self.addCleanup(provisioned.dispose)
class somenovatest(oslotransacitonaltestbase):
allowed_scenarios = ('mysql', 'sqlite')
@classmethod
def create_schema(cls):
my_migration_thing.do_sync()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment