Skip to content

Instantly share code, notes, and snippets.

#!/usr/bin/env bash
get_ip(){
ifconfig | grep "inet " | grep -Fv 127.0.0.1 | awk '{print $2}' |head -1
}
socat TCP-LISTEN:6000,reuseaddr,fork UNIX-CLIENT:\"$DISPLAY\"
xhost +$(get_ip)
xhost + $(hostname)
docker run --rm -it ${VOLUMES} -e DISPLAY=$(ipconfig getifaddr en0):0.0 jess/firefox
#!/usr/bin/env bash
set -e
SCRIPT_NAME=$(basename $0)
USAGE="$SCRIPT_NAME <instance-name> <instance-type>\nFor example: $SCRIPT_NAME myserver t3.2xlarge"
if [ -z "$1" ]; then
echo -e $USAGE
exit 1
package main
import (
"bytes"
"fmt"
"github.com/sirupsen/logrus"
"github.com/spf13/viper"
model "github.com/weldpua2008/supraworker/model"
"time"
)
@weldpua2008
weldpua2008 / temporary_creds.sh
Last active August 4, 2020 16:02
Hadoop S3A prepare creds from another account
#!/bin/sh
# more info:
# https://hadoop.apache.org/docs/r3.0.3/hadoop-aws/tools/hadoop-aws/s3guard.html
# https://docs.aws.amazon.com/IAM/latest/UserGuide/tutorial_cross-account-with-roles.html
# disable S3Guard
# hdfs dfs -Dfs.s3a.s3a-bucket.metadatastore.impl=org.apache.hadoop.fs.s3a.s3guard.NullMetadataStore -ls s3a://s3a-bucket/
# Trust Relationship for CrossAccountRole in account 222222 allows access from 123456:
#.
# {
from pyspark.sql.types import ArrayType, StructField, StructType, StringType, IntegerType
from pyspark.sql import SparkSession
# Create Spark session
spark = SparkSession.builder \
.appName('appName') \
.getOrCreate()
# List
data = [('Category A', 100, "This is category A"),
('Category B', 120, "This is category B"),
@weldpua2008
weldpua2008 / migrate_data.py
Last active January 20, 2022 09:51 — forked from bnekolny/migtrate_data.py
MLFlow migration script from filesystem to database tracking data
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Migration From FileBased store of MLFlow 0.8 up to 1.8.0
Credits:
* Based on work of bnekolny https://gist.github.com/bnekolny/8ec96faa0d57913eea6bb7848c06b912
* Latest version https://gist.github.com/weldpua2008/7f0c4644d247bd0fc7ba9a83c2d337d5
requirements:
pyyaml version 5.1 was required
@weldpua2008
weldpua2008 / ping.go
Created March 27, 2020 21:18
Run ping from go
package main
import (
"bufio"
"os"
"os/exec"
"fmt"
"log"
"io"
)
@weldpua2008
weldpua2008 / dump_sql_from_airflow.sh
Last active August 26, 2019 20:24
Get sql_alchemy_conn details from Airflow
#!/usr/bin/env bash
# Parsing airflow.cfg for Mysql details
export MYSQL_USER=$(grep sql_alchemy_conn /home/airflow/airflow.cfg|cut -d ":" -f 2|sed 's,//,,g')
export MYSQL_PASSWORD=$(grep sql_alchemy_conn /home/airflow/airflow.cfg|cut -d ":" -f 3|cut -d '@' -f 1)
export MYSQL_DATABASE=$(grep sql_alchemy_conn /home/airflow/airflow.cfg|cut -d ":" -f 3|cut -d '@' -f 2|cut -d '/' -f2)
export MYSQL_HOST=$(grep sql_alchemy_conn /home/airflow/airflow.cfg|cut -d ":" -f 3|cut -d '@' -f 2|cut -d '/' -f1)
# querieng data
ocker run --rm -ti -v "$PWD/data":/data mysql:5 sh -c "exec mysql -u$MYSQL_USER -p\"$MYSQL_PASSWORD\" -h $MYSQL_HOST -D $MYSQL_DATABASE"
# dumping data
// Random ASCII
def randomString(length: Int) = scala.util.Random.alphanumeric.take(length).mkString
//signum
def signum[T: Numeric](v: T) : Int = {
val v1 = implicitly[Numeric[T]].toLong(v)
if(v1 > 0 ) 1 else if(v1 < 0 ) -1 else 0
}
print(signum(1.2))
print(signum(BigInt(2).pow(1029)))
@weldpua2008
weldpua2008 / pySpark---WordCount.py
Created February 17, 2019 15:28
Counts words of the file in German
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Counts words of the file with German dictionary."""
__author__ = "Valeriy Soloviov"
__copyright__ = "Copyright 2019"
# Prepare file:
# iconv -f ISO-8859-15 -t UTF-8 ~/anna_k.txt > ~/anna_k_utf8.txt