Dump existing data:
python3 manage.py dumpdata > datadump.json
Change settings.py to Postgres backend.
Make sure you can connect on PostgreSQL. Then:
#!/usr/bin/env bash | |
# Ahalai-mahalai | |
FLAG="GET_RCV_SMS_LOCAL" | |
PAGE="1" | |
# Krible-krable | |
XMLRESPONSE=$(curl -sL -XPOST 'http://10.0.0.1/xml_action.cgi?method=set&module=duster&file=message' \ |
# https://stackoverflow.com/a/13530258/886938 | |
import multiprocessing as mp | |
import time | |
fn = 'c:/temp/temp.txt' | |
def worker(arg, q): | |
'''stupidly simulates long running process''' | |
start = time.clock() |
import gc | |
import gzip | |
import time | |
import json | |
import shutil | |
import os,sys | |
import tldextract | |
import collections | |
import pandas as pd | |
from tqdm import tqdm |
// https://medium.com/@mlowicki/http-s-proxy-in-golang-in-less-than-100-lines-of-code-6a51c2f2c38c | |
// #!/usr/bin/env bash | |
// case `uname -s` in | |
// Linux*) sslConfig=/etc/ssl/openssl.cnf;; | |
// Darwin*) sslConfig=/System/Library/OpenSSL/openssl.cnf;; | |
// esac | |
// openssl req \ | |
// -newkey rsa:2048 \ | |
// -x509 \ |
#!/usr/bin/env python | |
# -*- coding: utf-8 -*- | |
import os | |
import csv | |
import tempfile | |
import time | |
import logging | |
import optparse | |
import itertools |
from bs4 import BeautifulSoup, NavigableString, Tag | |
def html_to_text(html): | |
"Creates a formatted text email message as a string from a rendered html template (page)" | |
soup = BeautifulSoup(html, 'html.parser') | |
# Ignore anything in head | |
body, text = soup.body, [] | |
for element in body.descendants: | |
# We use type and not isinstance since comments, cdata, etc are subclasses that we don't want | |
if type(element) == NavigableString: |
# referecing: # https://www.digitalocean.com/community/tutorials/how-to-install-apache-kafka-on-ubuntu-14-04 # https://chongyaorobin.wordpress.com/2015/07/08/step-by-step-of-install-apache-kafka-on-ubuntu-standalone-mode/
$ sudo useradd kafka -m
UNLOGGED
table. This reduces the amount of data written to persistent storage by up to 2x.WITH (autovacuum_enabled=false)
on the table. This saves CPU time and IO bandwidth
on useless vacuuming of the table (since we never DELETE
or UPDATE
the table).COPY FROM STDIN
. This is the fastest possible approach to insert rows into table.time timestamp with time zone
is enough.synchronous_commit = off
to postgresql.conf
.domain_regex = r'(([\da-zA-Z])([_\w-]{,62})\.){,127}(([\da-zA-Z])[_\w-]{,61})?([\da-zA-Z]\.((xn\-\-[a-zA-Z\d]+)|([a-zA-Z\d]{2,})))' | |
#Python | |
domain_regex = '{0}$'.format(domain_regex) | |
valid_domain_name_regex = re.compile(domain_regex, re.IGNORECASE) | |
self.domain_name = self.domain_name.lower().strip().encode('ascii') | |
if re.match(valid_domain_name_regex, self.domain_name ): | |
return True | |
else: | |
return False |