Install aspell
first. For example on debian-based system:
apt-get install aspell
Check all markdown files inside src
directory:
import time | |
import signal | |
def busy_work(seconds): | |
print("Start busy_work") | |
time.sleep(seconds) | |
print("Stop busy_work") | |
from sanic import Sanic | |
from sanic import response | |
import asyncio | |
api = Sanic(__name__) | |
@api.listener("before_server_start") | |
async def initialize_tasks_set(api, loop): | |
api.tasks = set() |
import sys | |
# Colorize the token | |
def colorize(color: int, token: str) -> str: | |
result = '\033[38;5;{color}m{token}\033[0m'.format(color=color, | |
token=token) | |
return result | |
# Python Comment | |
def python_comment_syntax(color: int) -> None: |
function validemail(str) | |
if str == nil then return nil end | |
if (type(str) ~= 'string') then | |
error("Expected string") | |
return nil | |
end | |
local lastAt = str:find("[^%@]+$") | |
local localPart = str:sub(1, (lastAt - 2)) -- Returns the substring before '@' symbol | |
local domainPart = str:sub(lastAt, #str) -- Returns the substring after '@' symbol | |
-- we werent able to split the email properly |
def read_images(data_dir): | |
pattern = os.path.join(data_dir, '*.png') | |
filenames = tf.train.match_filenames_once(pattern, name='list_files') | |
queue = tf.train.string_input_producer( | |
filenames, | |
num_epochs=NUM_EPOCHS, | |
shuffle=True, | |
name='queue') | |
# Convolutional layer 1 | |
with tf.name_scope('conv1'): | |
W = tf.Variable( | |
tf.truncated_normal( | |
shape=( | |
CONV1_FILTER_SIZE, | |
CONV1_FILTER_SIZE, | |
NUM_CHANNELS, | |
CONV1_FILTER_COUNT), | |
dtype=tf.float32, |
(function() { | |
var width = 320; | |
var height = 0; | |
var streaming = false; | |
var video = null; | |
var canvas = null; | |
var photo = null; | |
var startbutton = null; |
import tensorflow as tf | |
import sys | |
import os | |
def create_graph(pattern): | |
print 'pattern:', pattern | |
graph = tf.Graph() | |
with graph.as_default(): | |
# Initializer | |
tf.variables_initializer(tf.global_variables(), name='init') |
// file: whitespace_tokenizer.rs | |
use std::env; | |
use std::process; | |
use std::fs::File; | |
use std::io::BufReader; | |
use std::io::Read; | |
fn main() { | |
let args: Vec<String> = env::args().collect(); | |
if args.len() != 2 { |