Skip to content

Instantly share code, notes, and snippets.

@Just4test
Created May 21, 2020 05:49
Show Gist options
  • Save Just4test/86a3f49ec2490aac3c9a0de9478a7258 to your computer and use it in GitHub Desktop.
Save Just4test/86a3f49ec2490aac3c9a0de9478a7258 to your computer and use it in GitHub Desktop.
cdk-temp
from aws_cdk import (
aws_ec2,
aws_iam,
aws_eks,
aws_elasticache,
aws_msk,
core,
)
import os
VPC_NAME = 'deleteme'
EKS_CLUSTER_NAME = 'deleteme'
def add_cost_allocation_tag(target):
core.Tag.add(target, 'cost_project', 'aws-service-test-202005')
core.Tag.add(target, 'cost_owner', '111')
ENV = {
'region': os.environ['CDK_DEFAULT_REGION'],
'account': os.environ['CDK_DEFAULT_ACCOUNT'],
}
class NetworkingStack(core.Stack):
def __init__(self, scope: core.Construct, id: str, context, **kwargs) -> None:
props={}
super().__init__(scope, id, env=ENV, **kwargs)
vpc = aws_ec2.Vpc(self, VPC_NAME,
cidr='10.3.0.0/16',
enable_dns_hostnames=True,
enable_dns_support=True,
nat_gateways=1,
max_azs=3,
subnet_configuration=[
aws_ec2.SubnetConfiguration(name='Public', cidr_mask=24, subnet_type=aws_ec2.SubnetType.PUBLIC),
aws_ec2.SubnetConfiguration(name='Private', cidr_mask=24, subnet_type=aws_ec2.SubnetType.PRIVATE),
aws_ec2.SubnetConfiguration(name='Database', cidr_mask=24, subnet_type=aws_ec2.SubnetType.ISOLATED)
]
)
add_cost_allocation_tag(vpc)
temp = 'kubernetes.io/cluster/' + EKS_CLUSTER_NAME
core.Tag.add(vpc, temp, 'shared')
for subnet in vpc.select_subnets(subnet_type=aws_ec2.SubnetType.PUBLIC).subnets:
add_cost_allocation_tag(subnet)
core.Tag.add(subnet, temp, 'shared')
core.Tag.add(subnet, 'kubernetes.io/role/elb', '1')
for subnet in vpc.select_subnets(subnet_type=aws_ec2.SubnetType.PRIVATE).subnets:
add_cost_allocation_tag(subnet)
core.Tag.add(subnet, temp, 'shared')
core.Tag.add(subnet, 'kubernetes.io/role/internal-elb', '1')
for subnet in vpc.select_subnets(subnet_type=aws_ec2.SubnetType.ISOLATED).subnets:
add_cost_allocation_tag(subnet)
context['vpc'] = vpc
class EksStack(core.Stack):
def __init__(self, scope: core.Construct, id: str, context, **kwargs) -> None:
super().__init__(scope, id, env=ENV, **kwargs)
print('?????????????????????????????')
print([aws_ec2.SubnetSelection(one_per_az=True, subnet_type=aws_ec2.SubnetType.PUBLIC)])
cluster = aws_eks.Cluster(self, EKS_CLUSTER_NAME,
cluster_name=EKS_CLUSTER_NAME,
default_capacity=0, #Disable inner Auto Scaling group
default_capacity_instance=aws_ec2.InstanceType('r5.large'),
# masters_role=masters_role,
vpc=context['vpc'],
vpc_subnets=[aws_ec2.SubnetSelection(one_per_az=True, subnet_type=aws_ec2.SubnetType.PUBLIC)],
output_cluster_name = True
)
asg = cluster.add_capacity('node-group',
instance_type=aws_ec2.InstanceType('r5.large'),
desired_capacity=3,
# key_name=props['key_name'],
max_capacity=4,
min_capacity=1
)
asg.role.add_managed_policy(
policy=aws_iam.ManagedPolicy.from_aws_managed_policy_name('AmazonSSMFullAccess')
)
# class RedisStack(core.Stack):
# def __init__(self, scope: core.Construct, id: str, context, **kwargs) -> None:
# super().__init__(scope, id, env=ENV, **kwargs)
# redis = aws_elasticache.CfnCacheCluster(self, 'deleteme',
# engine='redis',
# engine_version='3.2.10',
# cache_node_type='cache.t3.medium',
# )
class MskStack(core.Stack):
def __init__(self, scope: core.Construct, id: str, context, **kwargs) -> None:
super().__init__(scope, id, env=ENV, **kwargs)
vpc = context['vpc']
# sg = aws_ec2.SecurityGroup(
# vpc=vpc,
# description='For deleteme/msk',
# securityg_group_name='msk'
# )
# port = aws_ec2.Port(aws_ec2.PortProps(
# protocol=aws_ec2.Protocol.TCP,
# from_port=
# ))
# sg.add_ingress_rule(aws_ec2.Peer.anyIpv4(), )
broker_node_group_info = aws_msk.CfnCluster.BrokerNodeGroupInfoProperty(
instance_type='kafka.t3.small',
# client_subnets=vpc.select_subnets(subnet_type=aws_ec2.SubnetType.PUBLIC).subnets, #先用公共子网以便调试。这将分发到3个子网。
client_subnets=[aws_ec2.SubnetSelection(one_per_az=True, subnet_type=aws_ec2.SubnetType.PUBLIC)],
storage_info=aws_msk.CfnCluster.StorageInfoProperty(
ebs_storage_info=aws_msk.CfnCluster.EBSStorageInfoProperty(volume_size=200)
),
)
msk = aws_msk.CfnCluster(self, 'deleteme',
cluster_name='deleteme',
kafka_version='2.2.1',
broker_node_group_info=broker_node_group_info,
number_of_broker_nodes=3,
)
app = core.App()
context = {}
networking = NetworkingStack(app, "networking", context)
eks = EksStack(app, "eks", context)
# redis = RedisStack(app, "redis", context)
msk = MskStack(app, "msk", context)
app.synth()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment