Skip to content

Instantly share code, notes, and snippets.

@sorjef
Last active January 18, 2022 14:56
Show Gist options
  • Save sorjef/7f8c7b5f43e990d9a5a01c1f81fc9700 to your computer and use it in GitHub Desktop.
Save sorjef/7f8c7b5f43e990d9a5a01c1f81fc9700 to your computer and use it in GitHub Desktop.
Script to convert AWS Data Pipeline template to Cloudformation template with a sample Terraform module
const fs = require('fs');
const Converter = require('./converter.js');
const options = {
name: process.argv[3],
description: process.argv[4],
activate: process.argv[5],
};
const converter = new Converter(JSON.parse(fs.readFileSync(process.argv[2])), options);
process.stdout.write(JSON.stringify(converter.cloudformation()));
const parseField = (key, value) => {
const formationField = { "Key": key };
if (typeof value === 'string') {
formationField.StringValue = value;
} else if (value.ref) {
formationField.RefValue = value.ref;
} else {
throw new Error('Field type is not supported');
}
return formationField;
};
const transformObject = (toTransform) => {
const object = Object.assign({}, toTransform);
if (object.id === 'Default' && object.name === 'Default' && !object.type) {
object.type = 'Default';
}
const keys = Object.keys(object);
const isArray = key => object[key] instanceof Array;
const targetKeys = keys
.filter(key => key !== 'id' && key !== 'name');
const simpleFields = targetKeys
.filter(key => !isArray(key))
.map(key => parseField(key, object[key]));
const arrayFields = targetKeys
.filter(isArray)
.reduce((res, key) => {
const keyValues = object[key].map(value => parseField(key, value));
return res.concat(keyValues);
}, []);
return {
"Id": object.id,
"Name": object.name,
"Fields": simpleFields.concat(arrayFields),
};
};
const transformParameter = parameter => ({
"Id": parameter.id,
"Attributes": Object.keys(parameter)
.filter(key => key !== 'id')
.map(key => ({
"Key": key,
"StringValue": parameter[key],
})),
});
const normalizeName = name => name
.replace(/[^a-z0-9]/gi, ' ')
.replace(/\b\w/g, l => l.toUpperCase())
.replace(/ /g, '');
class Converter {
constructor(template, options) {
this.template = template;
this.options = Object.assign({}, options);
}
_values(values) {
const obj = Object.assign({}, values, this.options.values);
return Object.keys(obj)
.map(key => ({ "Id": key, "StringValue": obj[key] }));
}
cloudformation() {
const result = {
"AWSTemplateFormatVersion": "2010-09-09",
"Description": "Data Pipeline Template",
"Resources": {},
};
result.Resources[normalizeName(this.options.name)] = {
"Type": "AWS::DataPipeline::Pipeline",
"Properties": {
"Activate": this.options.activate,
"Description": this.options.description,
"Name": this.options.name,
"PipelineObjects": this.template.objects.map(transformObject),
"ParameterObjects": this.template.parameters.map(transformParameter),
"ParameterValues": this._values(this.template.values),
},
};
return result;
}
}
module.exports = Converter;
variable "name" {}
variable "description" {}
variable "activate" {}
variable "template" {}
variable "values" {
type = "map"
default = {}
}
data "external" "converted_template" {
program = ["node", "${path.module}/terraform-data.js"]
query = {
template = "${var.template}",
name = "${var.name}",
description = "${var.description}",
activate = "${var.activate}",
values = "${jsonencode("${var.values}")}"
}
}
resource "aws_cloudformation_stack" "data_pipeline" {
name = "data-pipeline-${var.name}-stack",
template_body = "${data.external.converted_template.result.json}",
on_failure = "DELETE"
}
const readline = require('readline');
const Converter = require('./converter.js');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
terminal: false,
});
rl.on('line', (data) => {
const options = JSON.parse(data);
options.activate = (!!options.activate).toString();
options.values = JSON.parse(options.values);
const converter = new Converter(JSON.parse(options.template), options);
const template = JSON.stringify(converter.cloudformation());
process.stdout.write(JSON.stringify({ json: template }));
});
@scraly
Copy link

scraly commented Jun 4, 2018

Hi,
For your information there is an issue when we have backslashes in Data Pipeline JSON template. Finally to many backslashes ate added in CF template body and the step will fail.
Thx.

@clbg
Copy link

clbg commented Aug 14, 2020

Thanks Artem, you saved my day!

@maduxi
Copy link

maduxi commented Dec 2, 2020

I have been using this for quite a while. Really useful. Thx!

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment