Skip to content

Instantly share code, notes, and snippets.

View DanielDaCosta's full-sized avatar
🎯
Focusing

Snow Storm DanielDaCosta

🎯
Focusing
View GitHub Profile
ssh -i ~/.ssh/{YOUR_KEY} -NfL 5000:localhost:5000 {SSH_USER}@{EC2_IP}
sudo -u ec2-user docker pull $ACCOUNT_ID.dkr.ecr.us-east-1.amazonaws.com/mlflow:latest
# Run Container (Expose port 5000; Restart -always- after reboot)
docker run --env BUCKET=s3://{YOUR_BUCKET_NAME}/ --env USERNAME=$DB_USER --env PASSWORD=$DB_PASS \
--env HOST=$DB_HOST --env PORT=5432 --env DATABASE=mlflow \
-p 5000:5000 -d --restart always --name mlflow-server $ACCOUNT_ID.dkr.ecr.us-east-1.amazonaws.com/mlflow:latest
# Docker commands This means that developers or built scripts using the Docker CLI no
# longer have to explicitly use the ECR API to retrieve a secure token, nor call docker login with this token before pushing or pulling container image.
sudo yum -y install amazon-ecr-credential-helper # https://aws.amazon.com/blogs/containers/amazon-ecr-credential-helper-now-supports-amazon-ecr-public/
mkdir -p /home/ec2-user/.docker
sudo cat <<-END >> /home/ec2-user/.docker/config.json
{
"credHelpers": {
"public.ecr.aws": "ecr-login",
# installing docker
$ sudo yum update -y
$ sudo amazon-linux-extras install docker
$ sudo service docker start
$ sudo usermod -a -G docker ec2-user # Add the ec2-user to the docker group so you can execute Docker commands without using sudo
# Configure Docker to start on boot
$ sudo systemctl enable docker.service # https://docs.docker.com/engine/install/linux-postinstall/#configure-docker-to-start-on-boot
#!/bin/bash
# Env Variables
# Database variables are saved on SSM Parameter Store
$ ACCOUNT_ID=`aws sts get-caller-identity --output text --query 'Account'`
$ DB_PASS=`aws ssm get-parameters --region us-east-1 --names /mlflow/DB_PASS --with-decryption --query "Parameters[0].Value" --output text`
$ DB_HOST=`aws ssm get-parameters --region us-east-1 --names /mlflow/DB_HOST --query "Parameters[0].Value" --output text`
$ DB_USER=`aws ssm get-parameters --region us-east-1 --names /mlflow/DB_USER --query "Parameters[0].Value" --output text`
$ docker build -t $DOCKER_REGISTRY/$APP_NAME:latest container/
$ aws ecr describe-repositories --repository-names $APP_NAME || aws ecr create-repository --repository-name $APP_NAME
$ aws ecr get-login-password --region $AWS_DEFAULT_REGION | docker login --username AWS --password-stdin $DOCKER_REGISTRY
$ docker push $DOCKER_REGISTRY/$APP_NAME:latest
FROM python:3.8.0
RUN pip install \
mlflow==1.18.0 \
psycopg2 \
boto3 && \
mkdir /mlflow/
EXPOSE 5000
@DanielDaCosta
DanielDaCosta / separator.py
Created January 6, 2021 22:26 — forked from jlln/separator.py
Efficiently split Pandas Dataframe cells containing lists into multiple rows, duplicating the other column's values.
def splitDataFrameList(df,target_column,separator):
''' df = dataframe to split,
target_column = the column containing the values to split
separator = the symbol used to perform the split
returns: a dataframe with each entry for the target column separated, with each element moved into a new row.
The values in the other columns are duplicated across the newly divided rows.
'''
def splitListToRows(row,row_accumulator,target_column,separator):
split_row = row[target_column].split(separator)
from airflow import DAG
from airflow.models import Variable
from airflow.contrib.operators.ecs_operator import ECSOperator
import copy
from datetime import timedelta, datetime
# Airflow Variables
awsRegionName = Variable.get('AwsRegionName')
awsCluster = Variable.get('AwsCluster')
awsTaskDefinition = Variable.get('AwsTaskDefinition')
resource "aws_security_group" "private_sg" {
name = "${var.name}-private-sg"
description = "Security Group for Private EC2 instance"
vpc_id = data.aws_vpc.selected.id
egress {
from_port = 0
to_port = 0
protocol = "-1"
cidr_blocks = ["0.0.0.0/0"]