Skip to content

Instantly share code, notes, and snippets.

View anilktechie's full-sized avatar
💭
I may be slow to respond.

ak anilktechie

💭
I may be slow to respond.
View GitHub Profile
import psycopg2
# Functions for reading scripts
class ScriptReader(object):
@staticmethod
def get_script(path):
return open(path, 'r').read()
# Utils for messages
@anilktechie
anilktechie / snowflake_database_ddl.py
Created December 6, 2021 00:32 — forked from rtempleton/snowflake_database_ddl.py
Sample Python script to loop through all the database objects and generate the DDL to replicate an environment prior to account replication being made available.
#!/usr/bin/env python
# Get all the database objects and permissions.
# Can be used after running snowflake_roles.py to create the required roles
# 2018-10-23 jfrink added ddl for roles, modified connection parameters
# 2019-01-15 jfrink added Roles and permissions report by object.
# 2019-03-07 jfrink added extract script to create a dump of all the tables to a stage
# and also the corresponding script to load all the data.
# Converted show tables over to using information schema for cases greater then 10k rows.
# Converted show views over to using information schema for cases greater then 10k rows.
use role sysadmin;
-- set up dev environment
create database if not exists my_test_db;
create schema if not exists fuzzy_match;
create warehouse if not exists dev_wh warehouse_size = 'small' auto_suspend = 300 initially_suspended=true;
use schema my_test_db.fuzzy_match;
use warehouse dev_wh;
@anilktechie
anilktechie / SnowflakeCreditMonitoring.sql
Created December 6, 2021 00:35 — forked from rtempleton/SnowflakeCreditMonitoring.sql
SQL source code used in the Snowflake Resource Monitor reports delivered to Slack using Apache Nifi article: https://medium.com/@ryan_templeton/snowflake-resource-monitor-reports-delivered-to-slack-using-apache-nifi-8dfd4fc4d579
--create a table to hold the metrics you wish to monitor and their threshold values
CREATE TABLE "MY_TEST_DB"."PUBLIC"."THRESHOLDS" ("SERVICE_TYPE" STRING NOT NULL, "CREDITS_BILLED" DOUBLE NOT NULL) COMMENT = 'Used for the Nifi alerting demo';
--insert some sample records into the thresholds table
insert into "MY_TEST_DB"."PUBLIC"."THRESHOLDS" values ('AUTO_CLUSTERING', 10),('PIPE', 10),('MATERIALIZED_VIEW', 10),('WAREHOUSE_METERING', 10);
--query to compare current metrics to threshold values
--This is used for the HOURLY report
select a.*, iff(b.credits_billed is null, 0, b.credits_billed)::string as credits_billed from
"MY_TEST_DB"."PUBLIC"."THRESHOLDS" a left join
@anilktechie
anilktechie / SnowflakeCreditMonitoring.xml
Created December 6, 2021 00:35 — forked from rtempleton/SnowflakeCreditMonitoring.xml
This is an importable Nifi template that can be used to monitor Snowflake resources and send reports via Slack. https://medium.com/@ryan_templeton/snowflake-resource-monitor-reports-delivered-to-slack-using-apache-nifi-8dfd4fc4d579
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<template encoding-version="1.2">
<description>Workflow to monitor Snowflake credit consumption and report to Slack. Full article can be found here https://snowflakecomputing.atlassian.net/wiki/spaces/RT/pages/771200336/Creating+a+Snowflake+usage+report+using+Apache+Nifi
Requires existing tables and apps defined in your Snowflake account and your Slack account</description>
<groupId>be279da7-0167-1000-6a67-4d0cb36decc6</groupId>
<name>SnowflakeCreditMonitoring</name>
<snippet>
<connections>
<id>1d50d3ac-3446-3e09-0000-000000000000</id>
@anilktechie
anilktechie / WITSML_SNOWFLAKE.sql
Created December 6, 2021 00:35 — forked from rtempleton/WITSML_SNOWFLAKE.sql
Source code for WITSML processing in Snowflake referenced in
--set up your environment
create database my_test_db;
create schema my_test_db.witsml;
use schema my_test_db.witsml;
--create the staging table where all WITSML files are loaded to by Snowpipe
create table witsml_temp (col1 variant);
@anilktechie
anilktechie / iotworkshop-DataScience.json
Created December 6, 2021 00:35 — forked from rtempleton/iotworkshop-DataScience.json
Zeppelin Notebook for use in Hortonworks IOT workshop
{
"paragraphs": [
{
"text": "%md\n### Run simple queries to check the count of both the iotdata and iotrollup tables.\nClick the play button within each of the paragraphs to rerun/update the content\n",
"user": "admin",
"dateUpdated": "2018-10-05T19:03:40-0400",
"config": {
"tableHide": false,
"editorSetting": {
"language": "markdown",
@anilktechie
anilktechie / sql_builder_insert.py
Created December 19, 2021 05:57 — forked from stephane-klein/sql_builder_insert.py
sql_builder_insert Python example
from psycopg2 import sql
import queries
pg_session = queries.Session(
queries.uri(
host='localhost',
port='5439',
dbname='postgres',
user='postgres',
password='password'
@anilktechie
anilktechie / mysql_wrapper.py
Created December 19, 2021 05:59 — forked from lovesh/mysql_wrapper.py
A MySQLdb wrapper class
import MySQLdb
import MySQLdb.cursors
from config import config
class Cursor(object):
def __init__(self, mysql_cursor):
self.cursor = mysql_cursor
def __iter__(self):
@anilktechie
anilktechie / postgres.py
Created December 19, 2021 06:00 — forked from goldsborough/postgres.py
Python psycopg2 wrapper
###########################################################################
#
## @file postgres.py
#
###########################################################################
import psycopg2
###########################################################################
#