Skip to content

Instantly share code, notes, and snippets.

## With asyncio.TaskGroup
async def run_errands():
async with asyncio.TaskGroup() as tg:
for errand, (start_time, time_to_finish) in errandsDict.items():
tg.create_task(errands_log(errand,
start_time,
time_to_finish))
(base) C:\Desktop>python async.py
(Pick Up Kids) starting at 6am
(Pick Up Kids) done at 7am
======;
(Return Packages) starting at 9am
(Return Packages) done at 10am
======;
(Grocery Shopping) starting at 11am
(Grocery Shopping) done at 13am
======;
## Errands: [start_time, duration]
errandsDict = {
'Grocery Shopping': [11, 2],
'Return Packages': [9, 1],
'Pick Up Kids': [6, 1],
}
async def errands_log(task, start_time, time_to_finish):
await asyncio.sleep(start_time)
print(f"({task}) starting at {start_time}am")
from typing import Any, Type, TypeVar
from dataclasses import dataclass
curType = TypeVar('curType', bound='Rectangle')
class Rectangle:
def __init__(self, length: float) -> None:
self.length = length
@classmethod
from bs4 import BeautifulSoup
from urllib.request import urlopen, Request
from collections import defaultdict
tagInfo = defaultdict(list)
for tag in tags:
startLink = "https://medium.com/tag/"+tag
response = requests.get(startLink, allow_redirects=True)
page = response.content
## Install: !pip install -U layer
import layer
df = layer.get_dataset('layer/wikitext/datasets/wikitext-103-train').to_pandas()
doc_set = [i for i in df.sentence.str.lower()]
# Install: !pip install sentence_transformers
from sentence_transformers import SentenceTransformer
model = SentenceTransformer('distilbert-base-nli-mean-tokens')
# sentences = [doc_set[9234], doc_set[9239], doc_set[1131966]]
sentence_embeddings = model.encode(sentences)
# Calculate cosine distance of the embeddings
from scipy.spatial import distance
print(1 - distance.cosine(sentence_embeddings[0], sentence_embeddings[1]))
##### Generate the wordcloud #####
my_freq_grams = freq_grams
curMask = np.array(Image.open(pathToYourPic))
wc = WordCloud(background_color='white',
stopwords=stopwords,
width=800,
height=600,
relative_scaling=.6,
max_font_size=60,
from wordcloud import WordCloud, ImageColorGenerator
from nltk.corpus import stopwords
from nltk.util import ngrams
import nltk
def replace(match):
return swMapping[match.group(0)]
## Define stopwords
curSW = stopwords.words('english')
curSW += ['unk']
## Top 5 next words
['dishes', 'and', ',', '-', 'recipes']
## Comparing with the original text
falafel has become popular among vegetarians and vegans ,
as an alternative to meat @-@ laden street foods ,
and is now sold in packaged mixes in health @-@ food stores .
falafel 's versatility has allowed for the reformulating of recipes for meatloaf ,
sloppy joes and spaghetti and meatballs into vegetarian [dishes] .