Skip to content

Instantly share code, notes, and snippets.

nawb

Block or report user

Report or block nawb

Hide content and notifications from this user.

Learn more about blocking users

Contact Support about this user’s behavior.

Learn more about reporting abuse

Report abuse
View GitHub Profile
@nawb
nawb / getall.py
Created Sep 17, 2016
Downloads all course content from a url
View getall.py
'''
Give a website
Downloads all pdfs/pptx into the files/ folder.
For the future:
- download files as is, keeping directory structure
- create own directory structure based on filenames (lec01 goes to /lec, q1/p1 to /p, etc)
'''
SAVETODIR="files/"
View sche
import processing.serial.*;
import java.io.*;
import java.util.*;
class Node
{
private int start_time;
private int end_time;
public Node(int a,int b)
{
View Scheduler.pde
int holding_rate = 3; //speed for holding queue
int accepted_rate = 2; //speed for accepted queue
int[] accepted = {0,inf,inf,inf}; //accepted queue
int[] holding = {0,0,0,0}; //holding queue
/* BENEFIT OF SRR IS THAT PROCESSES ALREADY RUNNING DO NOT TAKE TOO LONG TO COMPLETE */
void selfishRoundRobin(int timeSlice) //in seconds
{
init(0);
reOrderPorts();
View Scheduler.pde
void checkHoldingQueue() {
// Check for procs in holding that have matured to priority of the ones in accepted
// In essence we should only check the largest proc in holding queue, but since we're not sorting it...
for (int i = 0; i < devices; i ++){
if (holding[i] == accepted[i]) {
accepted[i] == holding[i]; //put it in accepted queue
holding[i] == -inf; //take it out of holding queue
}
}
}
@nawb
nawb / runner.py
Last active Aug 29, 2015 — forked from alecxe/runner.py
Self-contained minimum example script to run scrapy
View runner.py
import json
from scrapy.crawler import Crawler
from scrapy.contrib.loader import ItemLoader
from scrapy.contrib.loader.processor import Join, MapCompose, TakeFirst
from scrapy import log, signals, Spider, Item, Field
from scrapy.settings import Settings
from twisted.internet import reactor
View safas-scrapy.py
#to make request to the website
import requests
#to scrape using BeautifulSoup
import bs4
#to use regex
import re
#to use JSON
import json
response = requests.get('http://www.islamawareness.net/Dua/rabbana.html')
View seeyouspacecowboy.sh
#!/usr/bin/env bash
# SEE YOU SPACE COWBOY by DANIEL REHN (danielrehn.com)
# Displays a timeless message in your terminal with cosmic color effects
# Usage: add "sh ~/seeyouspacecowboy.sh; sleep 2" to .bash_logout (or similar) in your home directory
# (adjust the sleep variable to display the message for more seconds)
# Cosmic color sequence
View tmux.conf
#Adopted from negativetwleve/dotfiles/tmux/tmux.conf
# Ring the bell if any background window rang a bell
set -g bell-action any
# Automatically renumber windows when you close one.
set-option -g renumber-windows on
# Default termtype. If the rcfile sets $TERM, that overrides this value.
set -g default-terminal screen-256color
@nawb
nawb / silver-shoes
Created Jun 30, 2014
Opens a new tumblr tab in incognito
View silver-shoes
#!/bin/bash
# Click your heels together three times and say "There's no place like home."
chromium --incognito --new-window http://tumblr.com
# AND THIS PART GOES IN ~/.config/openbox/rc.xml, within the <keyboard> tag:
# <keybind key="A-F5">
# <action name="Execute">
# <startupnotify>
# <enabled>true</enabled>
View LinkFixerClone.py
import praw # simple interface to the reddit API, also handles rate limiting of requests
import re
from collections import deque
from time import sleep
USERNAME = "Your username here"
PASSWORD = "Your password here"
USERAGENT = "Your useragent string here. It should include your /u/username as a courtesy to reddit"
r = praw.Reddit(USERAGENT)
You can’t perform that action at this time.