Skip to content

Instantly share code, notes, and snippets.

View NeuronQ's full-sized avatar
🎯
Focusing

Andrei Anton NeuronQ

🎯
Focusing
View GitHub Profile
emplloyee_id,first_name,last_name,birth_date,salary,department,public_sgtoken
e458187f-1025-5623-8123-ddef4c303c7a,Clarence,Cortez,12/17/1989,25540,skunkworks3,gn7ha#s*Ky
89e322c2-b2e0-5d8b-b5e6-28e5cdd8165c,Nettie,Reeves,4/2/1965,13206,engineering,AaISTDXwt$
456374b8-2d66-5b37-8172-84cea2a4aa1c,Mittie,Paul,8/18/1970,46326,skunkworks3,VlQQ%F^yK9
1fe2fa68-06da-5edb-b8b3-b2335802cb57,Billy,Rodriquez,8/30/1966,37226,support,uu42WSsfqQ
205f3670-825e-50e3-94cc-697bceae0ee9,Victor,Goodman,10/14/1960,58031,marketing,#Bv]lHY2cR
63df23b9-2a42-5d75-8931-7e6c74c96c54,Jessie,Daniels,8/7/1989,30408,skunkworks3,SJmYw&Tf2G
40188dd5-9159-5d79-87e0-e90597742b28,Wayne,Christensen,12/13/1980,31541,r&d,tTROe1$O$[
370fa8f2-7d93-5387-97a5-dd99d2b8929f,Luis,Moody,10/12/1978,83540,support,E4k4R8C5Lg
b657d44f-5e43-5bec-a838-1e491418dd36,Sophie,Rios,6/28/1967,12605,sales,HQNmW@#AJE
@NeuronQ
NeuronQ / main.cpp
Created October 20, 2019 13:02
version working on macOS 10.14.6, GPU Radeon 555X, with Xcode 10.3, GLEW 2.1.0, GLFW 3.3
#include <iostream>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
using namespace std;
static unsigned int CompileShader(unsigned int type, const string& source) {
unsigned int id = glCreateShader(type);
const char* src = source.c_str();
import sys
import traceback
import gi
gi.require_version('Gst', '1.0')
from gi.repository import Gst, GObject # noqa
INPUT_VIDEO_FILE_PATH = '/opt/app/data/video_file.mp4'
PLACEHOLDER_FILE_PATH = '/opt/app/data/tvlogo.png'
@NeuronQ
NeuronQ / django-models-cross-db-json-fields.py
Created July 19, 2019 10:39
Django models code identical for postgres JSONField and sqlite TextField
## AUTHOR: Andrei Anton
## LICENSE: MIT
## USAGE EXAMPLE:
# @model_with_json_fields
# class MyThing(models.Model):
# JSON_FIELDS = (
# ['data', dict(default=dict, blank=True)],
# ['errors', dict(null=True, blank=True)],
# )
async def main_concurrent():
t = time.perf_counter()
tasks = [asyncio.create_task(handle_url(url))
for url in urls]
## All of the variants below would wait for everything:
# V1: this will wait as little as possible
# (point of this is to show that tasks start executing when created)
# for task in tasks:
// async_scrape.js (tested with node 11.3)
const sleep = ts => new Promise(resolve => setTimeout(resolve, ts * 1000));
async function fetchUrl(url) {
console.log(`~ executing fetchUrl(${url})`);
console.time(`fetchUrl(${url})`);
await sleep(1 + Math.random() * 4);
console.timeEnd(`fetchUrl(${url})`);
return `<em>fake</em> page html for ${url}`;
}
# async_scrape.py (requires Python 3.7+)
import asyncio, random, time
async def fetch_url(url):
print(f"~ executing fetch_url({url})")
t = time.perf_counter()
await asyncio.sleep(random.randint(1, 5))
print(f"time of fetch_url({url}): {time.perf_counter() - t:.2f}s")
return f"<em>fake</em> page html for {url}"
// async_scrape.js (tested with node 11.3)
const sleep = ts => new Promise(resolve => setTimeout(resolve, ts * 1000));
async function fetchUrl(url) {
console.log(`~ executing fetchUrl(${url})`);
console.time(`fetchUrl(${url})`);
await sleep(1 + Math.random() * 4);
console.timeEnd(`fetchUrl(${url})`);
return `<em>fake</em> page html for ${url}`;
}
# async_scrape.py (requires Python 3.7+)
import asyncio, random, time
async def fetch_url(url):
print(f"~ executing fetch_url({url})")
t = time.perf_counter()
await asyncio.sleep(random.randint(1, 5))
print(f"time of fetch_url({url}): {time.perf_counter() - t:.2f}s")
return f"<em>fake</em> page html for {url}"
// callbacks_async_scrape.js (tested with node 11.3)
const http = require('http');
const https = require('https');
const fetchUrl = (url, onSuccess, onError) => {
console.time(`fetchUrl(${url})`);
(url.indexOf('https') === 0 ? https : http).get(url, resp => {
let html = '';
resp.on('data', chunk => html += chunk);
resp.on('end', () => {