Skip to content

Instantly share code, notes, and snippets.

@enbashi
Last active June 29, 2024 12:26
Show Gist options
  • Save enbashi/366c62ee8c5fc350d52ddabc867602d4 to your computer and use it in GitHub Desktop.
Save enbashi/366c62ee8c5fc350d52ddabc867602d4 to your computer and use it in GitHub Desktop.
POC: Project Scaffolding/Templating using Cookiecutter

Installation Instructions:

  1. Create a new worflow using the JSON spec in step 1 and update the connection in each step with your GitHub API token. Publish the workflow.
  2. Create a new AWS Lmabda function following these steps in your AWS Console:
  • click "Create Function"
  • Select "Author from scratch"
  • Fill in Function name "cookiecutter" (if you pick another name, make sure to change the function name in Step 4)
  • For "Runtime", select Python 3.12
  • Click "Create Function" at the bottom of the page
  • in "Code" tab replace "lambda_function.py" file with the attached one in Step 2.
  • in "Configuration" tab select "Environment variables", and add new variable with name COOKIECUTTER_CONFIG and value /tmp/cookiecutter/custom-config.yaml.
  • Back to "Code" tab, and "Deploy" the function
  1. Create a new worflow using the JSON spec in step 3 and update the connection in each step with your GitHub API token. Publish the workflow.
  2. Create a new workflow using the JSON spect in Main. You should have a workflow with three steps like the one shown below. Manually update steps 1 & 3 in this worflow to select the new workflows you just created in above steps. If needed, update the Region and Function name fields in step 2 to point to your Lambda function. You can now run this workflow directly or trigger it from other apps/worflows.
image
{
"steps": [
{
"actionId": "com.datadoghq.dd.workflow_automation.triggerWorkflow",
"display": {
"bounds": {
"y": 186
}
},
"name": "Download_project_template",
"outboundEdges": [
{
"branchName": "main",
"nextStepName": "Scaffold_project_template"
}
],
"parameters": [
{
"name": "workflowInputs",
"value": {
"owner": "{{ Trigger.owner }}",
"path": "{{ Trigger.path }}",
"repo": "{{ Trigger.repo }}"
}
}
]
},
{
"actionId": "com.datadoghq.aws.lambda.invoke_lambda",
"display": {
"bounds": {
"y": 416
}
},
"name": "Scaffold_project_template",
"outboundEdges": [
{
"branchName": "main",
"nextStepName": "Upload_project_template"
}
],
"parameters": [
{
"name": "region",
"value": "us-east-1"
},
{
"name": "functionName",
"value": "cookiecutter"
},
{
"name": "invocationType",
"value": "RequestResponse"
},
{
"name": "inputPayload",
"value": {
"template": "{{ Steps.Download_project_template.workflowOutputs.output }}",
"variables": "{{ Trigger.variables }}"
}
}
]
},
{
"actionId": "com.datadoghq.dd.workflow_automation.triggerWorkflow",
"display": {
"bounds": {
"y": 661.1012351792293
}
},
"name": "Upload_project_template",
"parameters": [
{
"name": "workflowInputs",
"value": {
"owner": "{{ Trigger.owner }}",
"path": "{{ Trigger.generatedProjectPath }}",
"projectContents": "{{ Steps.Scaffold_project_template.payload }}",
"repo": "{{ Trigger.repo }}"
}
}
]
}
],
"inputSchema": {
"parameters": [
{
"defaultValue": "DataDog",
"name": "owner",
"type": "STRING"
},
{
"defaultValue": "apps-templates",
"name": "repo",
"type": "STRING"
},
{
"defaultValue": "templates/cookiecutter-lambda-function",
"name": "path",
"type": "STRING"
},
{
"defaultValue": {
"email": "",
"endpoint": "y",
"full_name": "Your name",
"github_username": "Your github username",
"project_name": "Name of the project",
"project_short_description": "A short description of the project",
"project_slug": "",
"release_date": "",
"schedule": "y",
"timeout": "60",
"version": "0.1.0"
},
"name": "variables",
"type": "OBJECT"
},
{
"defaultValue": "generated-projects/MyNewProject",
"name": "generatedProjectPath",
"type": "STRING"
}
]
},
"startStepName": "Download_project_template",
"triggers": [
{
"manualTrigger": {},
"startStepNames": [
"Download_project_template"
]
}
]
}
{
"steps": [
{
"actionId": "com.datadoghq.http.request",
"display": {
"bounds": {
"y": 372
}
},
"name": "FindTreeSHA",
"outboundEdges": [
{
"branchName": "main",
"nextStepName": "TreeSHA"
}
],
"parameters": [
{
"name": "url",
"value": "https://api.github.com/repos/{{ Trigger.owner }}/{{ Trigger.repo }}/contents/{{ Steps.Constants.data.rootPath }}"
},
{
"name": "verb",
"value": "GET"
},
{
"name": "urlParams",
"value": [
{
"key": "recursive",
"value": "true"
}
]
}
]
},
{
"actionId": "com.datadoghq.datatransformation.func",
"display": {
"bounds": {
"y": 186
}
},
"name": "Constants",
"outboundEdges": [
{
"branchName": "main",
"nextStepName": "FindTreeSHA"
}
],
"parameters": [
{
"name": "script",
"value": "const pathArray = $.Trigger.path.split('/');\nconst projectFolder = pathArray.pop();\nreturn {\n projectFolder,\n rootPath: pathArray.join('/')\n}"
}
]
},
{
"actionId": "com.datadoghq.http.request",
"display": {
"bounds": {
"y": 766
}
},
"name": "GetTrees",
"outboundEdges": [
{
"branchName": "main",
"nextStepName": "FlatContentPaths"
}
],
"parameters": [
{
"name": "url",
"value": "https://api.github.com/repos/{{ Trigger.owner }}/{{ Trigger.repo }}/git/trees/{{ Steps.TreeSHA.data }}"
},
{
"name": "verb",
"value": "GET"
},
{
"name": "urlParams",
"value": [
{
"key": "recursive",
"value": "true"
}
]
}
]
},
{
"actionId": "com.datadoghq.datatransformation.func",
"display": {
"bounds": {
"y": 580
}
},
"name": "TreeSHA",
"outboundEdges": [
{
"branchName": "main",
"nextStepName": "GetTrees"
}
],
"parameters": [
{
"name": "script",
"value": "return ($.Steps.FindTreeSHA.body.find(item => item.path === $.Trigger.path))?.sha"
}
]
},
{
"actionId": "com.datadoghq.http.request",
"display": {
"bounds": {
"y": 1160
}
},
"iterator": {
"forEach": {
"inputList": "{{ Steps.FlatContentPaths.data.files }}"
}
},
"name": "GetFileContent",
"outboundEdges": [
{
"branchName": "main",
"nextStepName": "ProjectContent"
}
],
"parameters": [
{
"name": "url",
"value": "https://api.github.com/repos/{{ Trigger.owner }}/{{ Trigger.repo }}/contents/{{ Trigger.path }}/{{ Current.Value }}"
},
{
"name": "verb",
"value": "GET"
},
{
"name": "urlParams",
"value": [
{
"key": "recursive",
"value": "true"
}
]
}
]
},
{
"actionId": "com.datadoghq.datatransformation.func",
"display": {
"bounds": {
"y": 1390
}
},
"name": "ProjectContent",
"parameters": [
{
"name": "script",
"value": "//$.Steps.GetFileContent[1].body.content\n\nreturn $.Steps.FlatContentPaths.data.files.reduce((accumulator, _, i)=> {\n const currentValue = $.Steps.GetFileContent[i].body;\n const { type, path: originalPath, content} = currentValue;\n const path = originalPath.replace(`${$.Trigger.path}/`, \"\");\n accumulator.push({type, path, content});\n return accumulator;\n}, []).concat(\n $.Steps.FlatContentPaths.data.folders.map(folder => ({\n type: 'folder',\n path:folder\n }))\n)"
}
]
},
{
"actionId": "com.datadoghq.datatransformation.func",
"display": {
"bounds": {
"y": 974
}
},
"name": "FlatContentPaths",
"outboundEdges": [
{
"branchName": "main",
"nextStepName": "GetFileContent"
}
],
"parameters": [
{
"name": "script",
"value": "return $.Steps.GetTrees.body.tree.reduce((acc, currentItem) => {\n if(currentItem.type === 'blob'){\n acc.files.push(currentItem.path)\n } else {\n acc.folders.push(currentItem.path)\n }\n return acc\n}, {files:[], folders: []})"
}
]
}
],
"inputSchema": {
"parameters": [
{
"defaultValue": "DataDog",
"name": "owner",
"type": "STRING"
},
{
"defaultValue": "apps-templates",
"name": "repo",
"type": "STRING"
},
{
"defaultValue": "templates/cookiecutter-lambda-function",
"name": "path",
"type": "STRING"
}
]
},
"outputSchema": {
"parameters": [
{
"name": "output",
"type": "ARRAY_OBJECT",
"value": "{{ Steps.ProjectContent.data }}"
}
]
},
"startStepName": "Constants",
"triggers": [
{
"manualTrigger": {},
"startStepNames": [
"Constants"
]
}
]
}
import json
import sys
import subprocess
import base64
import os
import shutil
# pip install custom package to /tmp/ and add to path
subprocess.call('pip install cookiecutter jinja2-time -t /tmp/ --no-cache-dir'.split(), stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
sys.path.insert(1, '/tmp/')
from cookiecutter.main import cookiecutter
def lambda_handler(event, context):
# serialized array of template contents. Files are encoded as base64
template_json = event['template']
# key/value map of template variables
template_vars = event['variables']
# use request_id in the folder name to prevent leaking across requests (just in case)
rid = context.aws_request_id
basePath = os.path.join('/tmp/', rid)
# deserialize template contents
template_dir = recreate_template_directory(template_json, basePath)
# config reply directory
replay_dir = os.path.join(basePath, 'cookiecutter_reply')
os.makedirs(replay_dir, exist_ok=True)
# custom config file
configPath = os.path.join(basePath, 'cookiecutter')
os.makedirs(configPath, exist_ok=True)
configFile = os.path.join(configPath, 'custom-config.yaml')
with open(configFile, 'w') as f:
f.write('replay_dir: "{}"\ncookiecutters_dir: "{}"'.format(replay_dir,basePath))
os.environ['COOKIECUTTER_CONFIG'] = configFile
# generate project using cookiecutter
output_dir = cookiecutter(template_dir, extra_context=template_vars, no_input=True, output_dir=basePath)
# serialize the generated project
generated_project_json = serialize_project_directory(output_dir)
# cleanup
shutil.rmtree(template_dir)
shutil.rmtree(output_dir)
return generated_project_json
def recreate_template_directory(template_json, basePath):
tmp_dir = os.path.join(basePath, 'cookiecutter_template')
os.makedirs(tmp_dir, exist_ok=True)
for file_data in template_json:
file_path = os.path.join(tmp_dir, file_data['path'])
os.makedirs(os.path.dirname(file_path), exist_ok=True)
if 'content' in file_data:
with open(file_path, 'wb') as f:
f.write(base64.b64decode(file_data['content']))
return tmp_dir
def serialize_project_directory(directory):
result = []
for root, dirs, files in os.walk(directory):
for file in files:
file_path = os.path.join(root, file)
with open(file_path, 'rb') as f:
result.append({
'path': os.path.relpath(file_path, directory),
'content': base64.b64encode(f.read()).decode(),
'type': 'file'
})
for dir in dirs:
dir_path = os.path.join(root, dir)
result.append({
'path': os.path.relpath(dir_path, directory),
'type': 'folder'
})
return result
{
"steps": [
{
"actionId": "com.datadoghq.datatransformation.func",
"display": {
"bounds": {
"y": 186
}
},
"name": "Constants",
"outboundEdges": [
{
"branchName": "main",
"nextStepName": "Create_branch"
}
],
"parameters": [
{
"name": "script",
"value": "const pathArray = $.Trigger.path.split('/');\nconst projectFolder = pathArray.pop();\nreturn {\n projectFolder,\n rootPath: pathArray.join('/'),\n files: $.Trigger.projectContents.filter(item => item.type === 'file'),\n timestamp: Date.now()\n}"
}
]
},
{
"actionId": "com.datadoghq.http.request",
"display": {
"bounds": {
"y": 580
}
},
"iterator": {
"continueOnError": true,
"forEach": {
"inputList": "{{ Steps.Constants.data.files }}"
}
},
"name": "UploadFile",
"outboundEdges": [
{
"branchName": "main",
"nextStepName": "PR_Description"
}
],
"parameters": [
{
"name": "url",
"value": "https://api.github.com/repos/{{ Trigger.owner }}/{{ Trigger.repo }}/contents/{{ Trigger.path }}/{{ Current.Value.path }}"
},
{
"name": "verb",
"value": "PUT"
},
{
"name": "requestHeaders",
"value": [
{
"key": "Content-Type",
"value": [
"application/json"
]
}
]
},
{
"name": "body",
"value": {
"branch": "{{ Steps.Create_branch.branch }}",
"content": "{{ Current.Value.content }}",
"message": "commit message"
}
}
]
},
{
"actionId": "com.datadoghq.github.createBranch",
"display": {
"bounds": {
"y": 372
}
},
"name": "Create_branch",
"outboundEdges": [
{
"branchName": "main",
"nextStepName": "UploadFile"
}
],
"parameters": [
{
"name": "repository",
"value": "{{ Trigger.owner }}/{{ Trigger.repo }}"
},
{
"name": "baseBranch",
"value": "main"
},
{
"name": "branchToCreate",
"value": "new_project_{{ Trigger.path }}_{{ Steps.Constants.data.timestamp }}"
}
]
},
{
"actionId": "com.datadoghq.github.createPullRequest",
"display": {
"bounds": {
"x": -4.238575837475509,
"y": 962.588694087361
}
},
"name": "Create_pull_request",
"parameters": [
{
"name": "repository",
"value": "{{ Trigger.owner }}/{{ Trigger.repo }}"
},
{
"name": "baseBranch",
"value": "main"
},
{
"name": "branchWithChanges",
"value": "{{ Steps.Create_branch.branch }}"
},
{
"name": "prTitle",
"value": "[Project Template] {{ Steps.Constants.data.projectFolder }}"
},
{
"name": "prDescription",
"value": "{{ Steps.PR_Description.data }}"
}
]
},
{
"actionId": "com.datadoghq.datatransformation.func",
"display": {
"bounds": {
"y": 810
}
},
"name": "PR_Description",
"outboundEdges": [
{
"branchName": "main",
"nextStepName": "Create_pull_request"
}
],
"parameters": [
{
"name": "script",
"value": "return `**Generated Files:** \\n\\n- ${$.Trigger.projectContents.map(item => item.path).join(\"\\n- \")}`"
}
]
}
],
"inputSchema": {
"parameters": [
{
"defaultValue": "DataDog",
"name": "owner",
"type": "STRING"
},
{
"defaultValue": "apps-templates",
"name": "repo",
"type": "STRING"
},
{
"defaultValue": "generated-projects/MyNewProject",
"name": "path",
"type": "STRING"
},
{
"defaultValue": [],
"name": "projectContents",
"type": "ARRAY_OBJECT"
}
]
},
"outputSchema": {
"parameters": [
{
"name": "PR_URL",
"type": "STRING",
"value": "{{ Steps.Create_pull_request.data.url }}"
}
]
},
"startStepName": "Constants",
"triggers": [
{
"manualTrigger": {},
"startStepNames": [
"Constants"
]
}
]
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment