View web_service_to_google_big_query_etl.py
import requests | |
from lxml import etree | |
from lxml.etree import fromstring | |
import csv | |
username ="" | |
password ="" | |
count = 0 | |
column = 0 | |
list = [] |
View database dump
#!/bin/bash | |
mysqldump -u username -pxxxxx titandb1 --single-transaction --quick --lock-tables=false > titandb1-backup-$(date +%F).sql |
View sort_dict.js
//init a dict with key value pairs. The values in my case are arrays. That can be changed | |
var dict = { | |
"x": [1, 3], | |
"y": [6, 90], | |
"z": [9, 16], | |
"a": [5, 3], | |
"b": [7, 9], | |
"c": [11, 3], | |
"d": [17, 3], | |
"t": [3, 10] |
View gist:af69ae79b30ee81bfc8a75c479e41f6c
server { | |
listen 443; | |
server_name goto.kla.xxx.com go; # <== 2 domains | |
root /var/www/html/docs/sslbeispieldata; | |
ssl on; | |
ssl_certificate /var/www/ssl/xx.crt; | |
ssl_certificate_key /var/www/ssl/xx.key; | |
return 301 goto.xxxxxxxx.com; |
View delete_all_tweets.py
# -*- coding: utf-8 -*- | |
""" | |
This script is forked originally from Dave Jeffery. The original implementation | |
was very slow and deleted around 2 tweets per second. Making it multithreaded I | |
am able to delete 30-50 tweets per second. | |
@author: vik-y | |
---------------------------------------------------------------------------- | |
This script will delete all of the tweets in the specified account. |