Skip to content

Instantly share code, notes, and snippets.

@adamruzicka
Forked from iNecas/dynflow-csv
Last active November 7, 2018 07:41
Show Gist options
  • Save adamruzicka/6f1b75df49e4a3fe30ba130cff382d28 to your computer and use it in GitHub Desktop.
Save adamruzicka/6f1b75df49e4a3fe30ba130cff382d28 to your computer and use it in GitHub Desktop.
#!/usr/bin/env bash
PROGNAME="$(basename "$0")"
WITHOUT_DYNFLOW=0
DB=dynflow-import
DB_USER=postgres
# DB_PASSWORD=
DB_HOST=""
PSQL_FLAGS=""
PSQL_DB_FLAGS="-d $DB"
SU_USER="postgres"
function export-csv () {
echo "Exporting $2"
echo "COPY ($1) TO STDOUT WITH CSV;" | as-su-user "psql $PSQL_FLAGS $PSQL_DB_FLAGS" > $2
}
function import-csv () {
echo "Importing $2"
as-su-user psql $PSQL_FLAGS $PSQL_DB_FLAGS -c "'COPY $1 FROM STDIN WITH CSV'" <$2
}
function dynflow-connection-string () {
result="postgres://"
[ -n "$DB_USER" ] && result="${result}${DB_USER}"
[ -n "$DB_PASSWORD" ] && result="${result}:${DB_PASSWORD}"
echo "${result}@${DB_HOST}/$DB"
}
function as-su-user () {
if [ -z "$SU_USER" ]; then
sh -c "$*"
else
su - "$SU_USER" -c "$*"
fi
}
function run-dynflow () {
block=${1:-false}
script="$(as-su-user mktemp)"
cat >"$script" <<EOF
require 'dynflow'
config = Dynflow::Config.new
config.persistence_adapter = Dynflow::PersistenceAdapters::Sequel.new "$(dynflow-connection-string)"
config.logger_adapter = Dynflow::LoggerAdapters::Simple.new STDERR, 4
config.auto_rescue = false
config.auto_execute = false
config.auto_validity_check = false
config.auto_terminate = false
world = Dynflow::World.new(config)
if $block
require 'dynflow/web'
dynflow_console = Dynflow::Web.setup do
set :world, world
end
puts "\n The Dynflow console is starting. To increase the page size, use 'per_page=1000' in web query"
puts " Press 'q' to stop\n\n"
Thread.new do
Rack::Server.new(:app => dynflow_console, :Port => 4567, :Host => '0.0.0.0').start
end
while STDIN.gets.chomp != 'q'
end
world.terminate
end
EOF
as-su-user scl enable tfm -- ruby "$script"
rm "$script"
}
function prepare-foreman-tasks () {
cat <<EOF | as-su-user psql $PSQL_FLAGS $PSQL_DB_FLAGS
CREATE TABLE foreman_tasks_tasks (
id uuid NOT NULL,
type character varying NOT NULL,
label character varying,
started_at timestamp without time zone,
ended_at timestamp without time zone,
state character varying NOT NULL,
result character varying NOT NULL,
external_id character varying,
parent_task_id uuid,
start_at timestamp without time zone,
start_before timestamp without time zone,
action character varying
);
EOF
}
function dynflow-import () {
echo "create database \"$DB\"" | as-su-user psql $PSQL_FLAGS || true
# prepare db schema
[ $WITHOUT_DYNFLOW -eq 0 ] && run-dynflow
prepare-foreman-tasks
# load data
for table in dynflow_{execution_plans,actions,steps} foreman_tasks_tasks; do
import-csv $table ${table}.csv
done
}
function db-drop () {
echo "drop database \"$DB\"" | as-su-user psql $PSQL_FLAGS
}
function dynflow-export () {
export-csv "select dynflow_execution_plans.* from foreman_tasks_tasks join dynflow_execution_plans on (foreman_tasks_tasks.external_id = dynflow_execution_plans.uuid) where foreman_tasks_tasks.started_at > 'now'::timestamp - '2 months'::interval" dynflow_execution_plans.csv
export-csv "select dynflow_actions.* from foreman_tasks_tasks join dynflow_actions on (foreman_tasks_tasks.external_id = dynflow_actions.execution_plan_uuid) where foreman_tasks_tasks.started_at > 'now'::timestamp - '2 months'::interval" dynflow_actions.csv
export-csv "select dynflow_steps.* from foreman_tasks_tasks join dynflow_steps on (foreman_tasks_tasks.external_id = dynflow_steps.execution_plan_uuid) where foreman_tasks_tasks.started_at > 'now'::timestamp - '2 months'::interval" dynflow_steps.csv
export-csv "select * from foreman_tasks_tasks" foreman_tasks_tasks.csv
export-csv "select * from dynflow_schema_info" dynflow_schema_info.csv
}
function help() {
cat <<HEADER
Usage: $PROGNAME [OPTIONS] ACTION [ACTION2 ...]
Multiple actions can be provided and will be executed in the order
they were specified.
ACTION:
HEADER
column -s '&' -t <<EOF
drop & Drop the DB
export & Export dynflow data to CSV files inside current directory
import & Import CSV data from current directory
env & Print how to configure Dynflow
run & Run Dynflow web console
EOF
echo
echo "OPTIONS:"
column -s '&' -t <<EOF
-d|--database DB & Use database DB
-D|--no-dynflow & Do not attempt to run Dynflow
-h|--help & Show help
-H|--database-host HOST & Connect to postgres running on HOST
-U|--user USER & Connect to postgres as USER
-p|--database-password PASSWORD & Connect to postgres using PASSWORD
-s|--su-user USER & Run commands as USER, defaults to postgres
EOF
}
SHORT_OPTIONS="d:hDH:U:p:s:"
LONG_OPTIONS="database:,database-host:,help,no-dynflow,user:database-password:su-user:"
PARSED_OPTIONS=$(getopt -n "$0" -o "$SHORT_OPTIONS" --long "$LONG_OPTIONS" -- "$@")
if [ $? -ne 0 ];
then
echo "getopt failed" >&2 && exit 1
fi
eval set -- "$PARSED_OPTIONS"
while true; do
case "$1" in
-U|--user)
DB_USER="$2"
shift 2
;;
-s|--su-user)
SU_USER="$2"
shift 2
;;
-d|--database)
DB="$2"
shift 2
;;
-H|--database-host)
DB_HOST="$2"
shift 2
;;
-p|--database-password)
DB_PASSWORD="$2"
shift 2
;;
-D|--no-dynflow)
WITHOUT_DYNFLOW=1
shift
;;
-h|--help)
help
exit 0
;;
--)
shift
break;;
esac
done
[ -n "$DB_USER" ] && PSQL_FLAGS="$PSQL_FLAGS -U $DB_USER"
[ -n "$DB_HOST" ] && PSQL_FLAGS="$PSQL_FLAGS -h $DB_HOST"
[ -n "$DB" ] && PSQL_DB_FLAGS="$PSQL_DB_FLAGS -d $DB"
for operation in $*; do
case $operation in
drop )
db-drop
;;
import )
dynflow-import
;;
export )
dynflow-export
;;
env )
cat <<EOF
# Run the following line in a shell and then start
# dynflow remote_executor example as an observer
export DB_CONN_STRING="$(dynflow-connection-string)"
EOF
;;
run )
run-dynflow true
;;
* )
echo "Unknown operation '$operation', skipping."
;;
esac
done
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment