Skip to content

Instantly share code, notes, and snippets.

View davidrosenstark's full-sized avatar

David Rosenstark davidrosenstark

View GitHub Profile
@davidrosenstark
davidrosenstark / error_log_retriever.py
Last active June 2, 2020 12:22
error_log_retriever
import json
import logging
import os
from datetime import timedelta
import boto3
from dateutil.parser import parse
environment = os.environ.get("ENV")
logger = logging.getLogger(__name__)
import boto3
import datetime
import pytz
import os
import logging
import json
cloudwatch_client = boto3.client('cloudwatch')
sns_client = boto3.client('sns')
#Use this to identify alarms for our specific env
import logging
import os
from flask import Flask,
from flask_injector import FlaskInjector
from injector import singleton, Injector, Module
from common_dal import DataAccessLayer, initialize_db_connection
from apis.api_queries import api_before_request
@davidrosenstark
davidrosenstark / __init__.py
Last active May 11, 2020 07:22
flask init
from flask_restx import Api
from .health_check.health import health_api as health_namespace
api = Api(
title='Title',
version='1.0',
description='description',
doc='/api-docs',
)
@davidrosenstark
davidrosenstark / flask_injector.py
Created May 7, 2020 13:14
flask_injector_setup
from injector import inject
from flask import request, g
@inject
def api_before_request(db_connection: MyDbConnectionClass):
if not request.path.startswith("/health_check"):
logger.info(f"Request received to: {request.url}, headers: {request.headers}, body: {request.json}”)
g.user_info = db_connection.query_user_info(request.headers.get('Username’))
from flask import jsonify
from flask_restx import Resource, Namespace
health_api = Namespace('health', 'health check api')
@health_api.route('health_check', doc=False)
class HealthCheck(Resource):
def get(self):
response = {"Status": "OK"}
return jsonify(response)
from pyspark.sql import SparkSession
spark = SparkSession.builder.appName("SimpleApp").enableHiveSupport().getOrCreate()
spark.sql("use dev")
spark.sql("show tables").show()
spark.sql("SELECT * FROM myTable").show(10)
--configurations '[{"Classification":"hive-site","Properties":{"hive.metastore.client.factory.class":"com.amazonaws.glue.catalog.metastore.AWSGlueDataCatalogHiveClientFactory"}},{"Classification":"spark-hive-site","Properties":{"hive.metastore.client.factory.class":"com.amazonaws.glue.catalog.metastore.AWSGlueDataCatalogHiveClientFactory"}}]'
import com.amazonaws.ClientConfiguration;
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.env.EnvironmentPostProcessor;
import org.springframework.core.env.ConfigurableEnvironment;
import org.springframework.core.env.MapPropertySource;
import java.util.HashMap;
import java.util.Map;
---
AWSTemplateFormatVersion: '2010-09-09'
Description: "Monitoring - Dashboard - Http Errors"
Parameters:
EnvironmentName:
Type: String