Skip to content

Instantly share code, notes, and snippets.

# move_blocks.py
# This script is used to generate move blocks
# as we move to the new module
# usage: python move_blocks.py <workspace_file> <variable_name> <module_name>
# The file name, the local variable map name and the module name that loops over
# workspaces, unfortunately vary
# typical example to create a moved.tf:
# account_name = account_name.replace("-","_")
# python move_blocks.py main.tf <account_name>_workspaces <account_name> >> moved.tf
import sys
import boto3
import sys
def has_ecs_create_permissions(policy_document):
"""
Check if the policy document contains permissions to create ECS resources.
"""
for statement in policy_document.get("Statement", []):
#!/usr/bin/env python
import os
import boto3
import boto3.session
roles = [
#"arn:aws:iam::053296354021:role/id-admin-role",
#"arn:aws:iam::106836188009:role/id-admin-role",
#"arn:aws:iam::125476056637:role/id-admin-role", # build-dev-admin
@bneutra
bneutra / terraform_check_examples.tf
Created July 28, 2023 20:45
terraform_check_examples.tf
# precondition vs check
# precondition:
# - raises
# - stop you at plan
# check:
# - warns
# - to be used with TFC "Continuous Validation"
# - i.e. alert when check warnings emerge
# Uses for precondition and check
#!/usr/bin/env python
import concurrent.futures
import logging
import os
import sys
import time
from collections import deque
import click
from dictdiffer import diff
#!/bin/sh
# Make sure you have set each tfc workspace to "local" execution
# and lock it
DIRS="terraform/data-ml/tf-regional-us-west-2 terraform/data/tf-regional-us-east-1 \
terraform/demo/tf-global terraform/network/tf-global terraform/org-manager/tf-global \
terraform/prod/tf-deploys-us-east-1-shard-1 terraform/prod/tf-global \
terraform/prod/tf-misc-billing"
# this last one is an s3 root, arguably -lock=false is not totally safe
# TODO: how can we ensure no one mucks with an S3 root while we are doing this?
# This is a quick/dirty script to pull as much collector and source
# data from sumo as possible but some massaging of the terraform
# it generates is requires.
# It also produces a shell script to import the resources
import json
import os
import pprint
import requests
pprinter=pprint.PrettyPrinter(indent=4)
require 'ruby-jmeter'
# flush stdout immediately
$stdout.sync = true
# default to Dockerfile jmeter location
jmeter_path = ENV['JMETER_BIN'] ? ENV['JMETER_BIN'] : '/opt/jmeter/bin/'
json_path = File.dirname(__FILE__) + '/claim.json'
gui = false # only true for non docker testing
claim_url = ARGV[0] # e.g. https://your/api
#!/bin/bash
# docker entrypoint for running ruby jmeter in docker
set -e
freeMem=`awk '/MemFree/ { print int($2/1024) }' /proc/meminfo`
s=$(($freeMem/10*8))
x=$(($freeMem/10*8))
n=$(($freeMem/10*2))
export JVM_ARGS="-Xmn${n}m -Xms${s}m -Xmx${x}m"
mkdir -p /mnt/output
# ruby-jmeter image to run
FROM alpine:3.9
ARG JMETER_VERSION="5.1"
ENV JMETER_HOME /opt/apache-jmeter-${JMETER_VERSION}
ENV JMETER_BIN ${JMETER_HOME}/bin
ENV JMETER_DOWNLOAD_URL https://archive.apache.org/dist/jmeter/binaries/apache-jmeter-${JMETER_VERSION}.tgz
# Install extra packages
# See https://github.com/gliderlabs/docker-alpine/issues/136#issuecomment-272703023