Skip to content

Instantly share code, notes, and snippets.

View deliro's full-sized avatar
🦀
Rust in action

deliro

🦀
Rust in action
  • Tochka Bank
  • Russia
View GitHub Profile
import pickle as _pickle
import json
from functools import partial
class pickle:
dumps = partial(_pickle.dumps, protocol=_pickle.HIGHEST_PROTOCOL)
loads = _pickle.loads
import redis
from time import sleep
from random import randint
from redq import RedisDeque
r = redis.StrictRedis()
q = RedisDeque(r, "l")
def consumer():
while True:
@deliro
deliro / parse_wiki.py
Last active November 21, 2020 16:33
async parsing of wikipedia with process pool
import asyncio
from concurrent.futures import ProcessPoolExecutor
import aiohttp
from loguru import logger as loguru
from lxml.html import fromstring
pool = ProcessPoolExecutor()
parser_sem = asyncio.Semaphore(pool._max_workers)
@deliro
deliro / wiki_parser.go
Created March 5, 2021 11:44
Walk through Wikipedia link graph (close to BFS) and find path from the main page to the page contain search term
package main
import (
"crypto/tls"
"fmt"
"io/ioutil"
"log"
"net/http"
"regexp"
"strings"
class Semaphore {
constructor(max = 1) {
if (max < 1) { max = 1; }
this.max = max;
this.count = 0;
this.queue = [];
}
acquire() {
let promise;
if (this.count < this.max) {