Created
July 28, 2023 07:51
-
-
Save ashemedai/962d3449c3712e40bb54d2fa3fba4e00 to your computer and use it in GitHub Desktop.
cfn-lint helper script
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python3 | |
import json | |
from pathlib import Path | |
import re | |
import sys | |
DEBUG = True | |
VALUE_TYPES_PATH = Path("03_value_types") | |
def check_constraints(json_data, key): | |
if DEBUG: | |
print(f"keys under {key}: {json_data[key].keys()}") | |
allowedpatternregex = False | |
stringmax = False | |
stringmin = False | |
product_json_path = None | |
top_level = re.match(r"(?P<product>AWS::.*)::", key) | |
if top_level: | |
product = top_level.group("product") | |
product_json_path = ( | |
VALUE_TYPES_PATH / f"{product.lower().replace('::', '_')}.json" | |
) | |
if DEBUG: | |
print(f" >> {product_json_path}") | |
for entry in json_data[key].keys(): | |
match entry: | |
case "StringMax": | |
if DEBUG: | |
print(f" Found a StringMax for {key}") | |
stringmax = True | |
case "StringMin": | |
if DEBUG: | |
print(f" Found a StringMin for {key}") | |
stringmin = True | |
case "AllowedPatternRegex": | |
if DEBUG: | |
print(f" Found an AllowedPatternRegex for {key}") | |
allowedpatternregex = True | |
missing_constraints = [] | |
if not allowedpatternregex: | |
missing_constraints.append("AllowedPatternRegex") | |
if not stringmax: | |
missing_constraints.append("StringMax") | |
if not stringmin: | |
missing_constraints.append("StringMin") | |
if missing_constraints: | |
print(f"{key} is missing {', '.join(missing_constraints)}") | |
if product_json_path: | |
create_extendedspecs( | |
product_json_path, key, allowedpatternregex, stringmax, stringmin | |
) | |
def create_extendedspecs(json_path, key, allowedpatternregex, stringmax, stringmin): | |
all_objects = None | |
if json_path.exists(): | |
with open(json_path, "r") as json_file: | |
all_objects = json.load(json_file) | |
else: | |
all_objects = [] | |
print(f" >> all_objects: {all_objects}") | |
new_object = {"op": "add", "path": f"/ValueTypes/{key}", "value": {}} | |
if not allowedpatternregex: | |
new_object["value"]["AllowedPatternRegex"] = "^$" | |
if not stringmax: | |
new_object["value"]["StringMax"] = 0 | |
if not stringmin: | |
new_object["value"]["StringMin"] = 0 | |
print(new_object) | |
all_objects.append(new_object) | |
with open(json_path, "w") as json_file: | |
json.dump(all_objects, json_file, indent=1) | |
if __name__ == "__main__": | |
path = Path(sys.argv[1]) | |
print(f"Path: {path}") | |
with open(path, "r") as cloudspec_file: | |
cloudspec = json.load(cloudspec_file) | |
print(f"Top-level keys: {cloudspec.keys()}\n") | |
# We are only interested in the top-level ValueTypes key (for now). | |
if not VALUE_TYPES_PATH.exists(): | |
VALUE_TYPES_PATH.mkdir() | |
name_regex = re.compile(r".*Name", flags=re.M) | |
description_regex = re.compile(r".*Description", flags=re.M) | |
for entry in cloudspec["ValueTypes"].keys(): | |
if name_regex.match(entry) or description_regex.match(entry): | |
if DEBUG: | |
print(f"{entry}") | |
check_constraints(cloudspec["ValueTypes"], entry) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment