Skip to content

Instantly share code, notes, and snippets.

View anna-anisienia's full-sized avatar

Anna Geller anna-anisienia

View GitHub Profile
import time
import requests
import logging
import boto3
from botocore.config import Config
def send_price_data_to_timestream(write_client):
base_url = "https://min-api.cryptocompare.com/data"
symbols = "BTC,ETH,REP,DASH"
import requests
import boto3
import uuid
import time
import json
client = boto3.client('kinesis', region_name='eu-central-1')
partition_key = str(uuid.uuid4())
import json
import logging
import requests
import pandas as pd
import awswrangler as wr
from typing import List, Any
logger = logging.getLogger()
logger.setLevel(logging.INFO)
import json
import logging
import requests
import pandas as pd
import awswrangler as wr
from typing import List, Any
class DataQualityAlert:
def __init__(self, slack_webhook_url: str, database: str = "ecommerce"):
@anna-anisienia
anna-anisienia / dask-k8.py
Created August 25, 2020 01:59
Test Dask setup with Prefect and AWS EKS on Fargate
from prefect.environments.storage import Docker
from prefect.environments import DaskKubernetesEnvironment
from prefect import task, Flow
import random
from time import sleep
@task
def inc(x):
sleep(random.random() / 10)
SELECT
resourceId,
resourceType,
awsRegion,
resourceCreationTime,
tags,
configuration.state.value
WHERE
resourceType NOT IN ('AWS::EC2::SecurityGroup',
'AWS::EC2::Subnet', 'AWS::EC2::VPC',
SELECT
resourceId,
resourceType,
awsRegion,
resourceCreationTime,
tags,
tags.tag,
configuration.state.value
WHERE
resourceType NOT IN ('AWS::EC2::SecurityGroup',
import requests
from pprint import pprint
base_url = "http://localhost:3000/api"
auth = {"Authorization": "Bearer YWRtaW46Ym9vdHN0cmFwOmFkbWluMTIz"}
workspaces = requests.get(f"{base_url}/entities/workspaces", headers=auth)
data = workspaces.json().get("data")
workspace_id = data[0].get("id")
dashboards = requests.get(f"{base_url}/entities/workspaces/{workspace_id}/analyticalDashboards",
"""
Dataset used: https://datasets.wri.org/dataset/globalpowerplantdatabase
"""
import os
import time
import pandas as pd
from datetime import datetime, timedelta
import awswrangler as wr
from boto3.dynamodb.conditions import Key
import os
import boto3
import tempfile
from airflow import DAG
from airflow.decorators import task
from airflow.utils.dates import days_ago
from airflow.providers.amazon.aws.transfers.s3_to_redshift import S3ToRedshiftOperator
from airflow.utils.task_group import TaskGroup
S3_BUCKET = 'test_bucket_123'