Skip to content

Instantly share code, notes, and snippets.

Avatar
🎯
Focusing

Andrei Anton NeuronQ

🎯
Focusing
View GitHub Profile
@NeuronQ
NeuronQ / main.cpp
Created Oct 20, 2019
version working on macOS 10.14.6, GPU Radeon 555X, with Xcode 10.3, GLEW 2.1.0, GLFW 3.3
View main.cpp
#include <iostream>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
using namespace std;
static unsigned int CompileShader(unsigned int type, const string& source) {
unsigned int id = glCreateShader(type);
const char* src = source.c_str();
View test_pipeline.py
import sys
import traceback
import gi
gi.require_version('Gst', '1.0')
from gi.repository import Gst, GObject # noqa
INPUT_VIDEO_FILE_PATH = '/opt/app/data/video_file.mp4'
PLACEHOLDER_FILE_PATH = '/opt/app/data/tvlogo.png'
@NeuronQ
NeuronQ / django-models-cross-db-json-fields.py
Created Jul 19, 2019
Django models code identical for postgres JSONField and sqlite TextField
View django-models-cross-db-json-fields.py
## AUTHOR: Andrei Anton
## LICENSE: MIT
## USAGE EXAMPLE:
# @model_with_json_fields
# class MyThing(models.Model):
# JSON_FIELDS = (
# ['data', dict(default=dict, blank=True)],
# ['errors', dict(null=True, blank=True)],
# )
View async_node_python-main_concurrent.py
async def main_concurrent():
t = time.perf_counter()
tasks = [asyncio.create_task(handle_url(url))
for url in urls]
## All of the variants below would wait for everything:
# V1: this will wait as little as possible
# (point of this is to show that tasks start executing when created)
# for task in tasks:
View async_scrape.js
// async_scrape.js (tested with node 11.3)
const sleep = ts => new Promise(resolve => setTimeout(resolve, ts * 1000));
async function fetchUrl(url) {
console.log(`~ executing fetchUrl(${url})`);
console.time(`fetchUrl(${url})`);
await sleep(1 + Math.random() * 4);
console.timeEnd(`fetchUrl(${url})`);
return `<em>fake</em> page html for ${url}`;
}
View async_scrape.py
# async_scrape.py (requires Python 3.7+)
import asyncio, random, time
async def fetch_url(url):
print(f"~ executing fetch_url({url})")
t = time.perf_counter()
await asyncio.sleep(random.randint(1, 5))
print(f"time of fetch_url({url}): {time.perf_counter() - t:.2f}s")
return f"<em>fake</em> page html for {url}"
View async_scrape-head.js
// async_scrape.js (tested with node 11.3)
const sleep = ts => new Promise(resolve => setTimeout(resolve, ts * 1000));
async function fetchUrl(url) {
console.log(`~ executing fetchUrl(${url})`);
console.time(`fetchUrl(${url})`);
await sleep(1 + Math.random() * 4);
console.timeEnd(`fetchUrl(${url})`);
return `<em>fake</em> page html for ${url}`;
}
View async_scrape-head.py
# async_scrape.py (requires Python 3.7+)
import asyncio, random, time
async def fetch_url(url):
print(f"~ executing fetch_url({url})")
t = time.perf_counter()
await asyncio.sleep(random.randint(1, 5))
print(f"time of fetch_url({url}): {time.perf_counter() - t:.2f}s")
return f"<em>fake</em> page html for {url}"
View callbacks_async_scrape.js
// callbacks_async_scrape.js (tested with node 11.3)
const http = require('http');
const https = require('https');
const fetchUrl = (url, onSuccess, onError) => {
console.time(`fetchUrl(${url})`);
(url.indexOf('https') === 0 ? https : http).get(url, resp => {
let html = '';
resp.on('data', chunk => html += chunk);
resp.on('end', () => {
View sync_scrape.js
// sync_scrape.js (tested with node 11.3)
const request = require("sync-request");
const fetchUrl = url => {
console.time(`fetchUrl(${url})`);
const html = request("GET", url).getBody();
console.timeEnd(`fetchUrl(${url})`);
return html;
};