Skip to content

Instantly share code, notes, and snippets.

@ussy
Created June 25, 2011 15:41
Show Gist options
  • Save ussy/1046601 to your computer and use it in GitHub Desktop.
Save ussy/1046601 to your computer and use it in GitHub Desktop.
DbUnit + YAML for Java
/*
*
* The DbUnit Database Testing Framework
* Copyright (C)2002-2004, DbUnit.org
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
*/
package org.dbunit.operation;
import java.sql.SQLException;
import java.util.BitSet;
import org.dbunit.DatabaseUnitException;
import org.dbunit.database.DatabaseConfig;
import org.dbunit.database.IDatabaseConnection;
import org.dbunit.database.statement.IPreparedBatchStatement;
import org.dbunit.database.statement.IStatementFactory;
import org.dbunit.dataset.Column;
import org.dbunit.dataset.DataSetException;
import org.dbunit.dataset.IDataSet;
import org.dbunit.dataset.ITable;
import org.dbunit.dataset.ITableIterator;
import org.dbunit.dataset.ITableMetaData;
import org.dbunit.dataset.RowOutOfBoundsException;
import org.dbunit.dataset.datatype.TypeCastException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Base implementation for database operation that are executed in batch.
*
* @author Manuel Laflamme
* @version $Revision: 720 $
* @since Feb 19, 2002
*/
public abstract class AbstractBatchOperation extends AbstractOperation
{
/**
* Logger for this class
*/
private static final Logger logger = LoggerFactory.getLogger(AbstractBatchOperation.class);
private static final BitSet EMPTY_BITSET = new BitSet();
protected boolean _reverseRowOrder = false;
static boolean isEmpty(ITable table) throws DataSetException
{
logger.debug("isEmpty(table={}) - start", table);
Column[] columns = table.getTableMetaData().getColumns();
// No columns = empty
if (columns == null || columns.length == 0)
{
return true;
}
// Try to fetch first table value
try
{
table.getValue(0, columns[0].getColumnName());
return false;
}
catch (RowOutOfBoundsException e)
{
// Not able to access first row thus empty
return true;
}
}
/**
* Returns list of tables this operation is applied to. This method
* allow subclass to do filtering.
*/
protected ITableIterator iterator(IDataSet dataSet) throws DatabaseUnitException
{
return dataSet.iterator();
}
/**
* Returns mapping of columns to ignore by this operation. Each bit set represent
* a column to ignore.
*/
BitSet getIgnoreMapping(ITable table, int row) throws DataSetException
{
return EMPTY_BITSET;
}
/**
* Returns false if the specified table row have a different ignore mapping
* than the specified mapping.
*/
boolean equalsIgnoreMapping(BitSet ignoreMapping, ITable table, int row) throws DataSetException
{
return true;
}
abstract OperationData getOperationData(ITableMetaData metaData,
BitSet ignoreMapping, IDatabaseConnection connection) throws DataSetException;
////////////////////////////////////////////////////////////////////////////
// DatabaseOperation class
public void execute(IDatabaseConnection connection, IDataSet dataSet)
throws DatabaseUnitException, SQLException
{
logger.debug("execute(connection={}, dataSet={}) - start", connection, dataSet);
DatabaseConfig databaseConfig = connection.getConfig();
IStatementFactory factory = (IStatementFactory)databaseConfig.getProperty(DatabaseConfig.PROPERTY_STATEMENT_FACTORY);
// for each table
ITableIterator iterator = iterator(dataSet);
while (iterator.next())
{
ITable table = iterator.getTable();
// Do not process empty table
if (isEmpty(table))
{
continue;
}
ITableMetaData metaData = getOperationMetaData(connection, table.getTableMetaData());
BitSet ignoreMapping = null;
OperationData operationData = null;
IPreparedBatchStatement statement = null;
try
{
// For each row
int start = _reverseRowOrder ? table.getRowCount() - 1 : 0;
int increment = _reverseRowOrder ? -1 : 1;
try
{
for (int i = start; ; i = i + increment)
{
int row = i;
// If current row have a different ignore value mapping than
// previous one, we generate a new statement
if (ignoreMapping == null || !equalsIgnoreMapping(ignoreMapping, table, row))
{
// Execute and close previous statement
if (statement != null)
{
statement.executeBatch();
statement.clearBatch();
statement.close();
}
ignoreMapping = getIgnoreMapping(table, row);
operationData = getOperationData(metaData, ignoreMapping, connection);
statement = factory.createPreparedBatchStatement(
operationData.getSql(), connection);
}
// for each column
Column[] columns = operationData.getColumns();
for (int j = 0; j < columns.length; j++)
{
// Bind value only if not in ignore mapping
if (!ignoreMapping.get(j))
{
Column column = columns[j];
try
{
statement.addValue(table.getValue(row, column.getColumnName()), column.getDataType());
}
catch (TypeCastException e)
{
throw new TypeCastException("Error casting value for table '" + table.getTableMetaData().getTableName()
+"' and column '" + column.getColumnName() + "'", e);
}
}
}
statement.addBatch();
}
}
catch (RowOutOfBoundsException e)
{
// This exception occurs when records are exhausted
// and we reach the end of the table. Ignore this error
// end of table
}
statement.executeBatch();
statement.clearBatch();
}
finally
{
if (statement != null)
{
statement.close();
}
}
}
}
public String toString()
{
StringBuffer sb = new StringBuffer();
sb.append(getClass().getName()).append("[");
sb.append("_reverseRowOrder=").append(this._reverseRowOrder);
sb.append(", super=").append(super.toString());
sb.append("]");
return sb.toString();
}
}
package net.pshared.dbunit;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.dbunit.dataset.Column;
import org.dbunit.dataset.DataSetException;
import org.dbunit.dataset.DefaultTableIterator;
import org.dbunit.dataset.DefaultTableMetaData;
import org.dbunit.dataset.IDataSet;
import org.dbunit.dataset.ITable;
import org.dbunit.dataset.ITableIterator;
import org.dbunit.dataset.ITableMetaData;
import org.dbunit.dataset.RowOutOfBoundsException;
import org.dbunit.dataset.datatype.DataType;
import org.ho.yaml.Yaml;
public class YamlDataSet implements IDataSet {
private Map<String, MyTable> tables = new HashMap<String, MyTable>();
public YamlDataSet(File file) throws FileNotFoundException {
@SuppressWarnings("unchecked")
Map<String, List<Map<String, Object>>> data = (Map<String, List<Map<String, Object>>>) Yaml.load(file);
for (Map.Entry<String, List<Map<String, Object>>> ent : data.entrySet()) {
String tableName = ent.getKey();
List<Map<String, Object>> rows = ent.getValue();
createTable(tableName, rows);
}
}
class MyTable implements ITable {
String name;
List<Map<String, Object>> data;
ITableMetaData meta;
MyTable(String name, List<String> columnNames) {
this.name = name;
this.data = new ArrayList<Map<String, Object>>();
meta = createMeta(name, columnNames);
}
ITableMetaData createMeta(String name, List<String> columnNames) {
Column[] columns = null;
if (columnNames != null) {
columns = new Column[columnNames.size()];
for (int i = 0; i < columnNames.size(); i++)
columns[i] = new Column(columnNames.get(i), DataType.UNKNOWN);
}
return new DefaultTableMetaData(name, columns);
}
public int getRowCount() {
return data.size();
}
public ITableMetaData getTableMetaData() {
return meta;
}
public Object getValue(int row, String column) throws DataSetException {
if (data.size() <= row)
throw new RowOutOfBoundsException("" + row);
return data.get(row).get(column.toUpperCase());
}
public void addRow(Map<String, Object> values) {
data.add(convertMap(values));
}
Map<String, Object> convertMap(Map<String, Object> values) {
Map<String, Object> ret = new HashMap<String, Object>();
for (Map.Entry<String, Object> ent : values.entrySet()) {
ret.put(ent.getKey().toUpperCase(), ent.getValue());
}
return ret;
}
}
MyTable createTable(String name, List<Map<String, Object>> rows) {
MyTable table = new MyTable(name, rows.size() > 0 ? new ArrayList<String>(rows.get(0).keySet()) : null);
for (Map<String, Object> values : rows)
table.addRow(values);
tables.put(name, table);
return table;
}
public ITable getTable(String tableName) throws DataSetException {
return tables.get(tableName);
}
public ITableMetaData getTableMetaData(String tableName) throws DataSetException {
return tables.get(tableName).getTableMetaData();
}
public String[] getTableNames() throws DataSetException {
return (String[]) tables.keySet().toArray(new String[tables.size()]);
}
public ITable[] getTables() throws DataSetException {
return (ITable[]) tables.values().toArray(new ITable[tables.size()]);
}
public ITableIterator iterator() throws DataSetException {
return new DefaultTableIterator(getTables());
}
public ITableIterator reverseIterator() throws DataSetException {
return new DefaultTableIterator(getTables(), true);
}
public boolean isCaseSensitiveTableNames() {
return false;
}
}
package net.pshared.dbunit;
import java.io.File;
import java.sql.Connection;
import org.dbunit.database.DatabaseConnection;
import org.dbunit.database.IDatabaseConnection;
import org.dbunit.dataset.IDataSet;
import org.dbunit.operation.DatabaseOperation;
public class YamlReplaceDbUtils {
public static void readAllReplaceDb(Connection conn, File file) {
try {
IDatabaseConnection connection = new DatabaseConnection(conn);
IDataSet dataSet = new YamlDataSet(file);
DatabaseOperation.CLEAN_INSERT.execute(connection, dataSet);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
package net.pshared.example;
import java.io.File;
import org.seasar.framework.unit.S2TigerTestCase;
import org.seasar.framework.util.ResourceUtil;
public abstract class YamlSupportTestCase extends S2TigerTestCase {
@Override
protected boolean needTransaction() {
return true;
}
protected void readYamlAllReplaceDb(String path) {
String convertPath = ResourceUtil.convertPath(path, getClass());
File file = ResourceUtil.getResourceAsFile(convertPath);
YamlReplaceDbUtils.readAllReplaceDb(getConnection(), file);
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment