Skip to content

Instantly share code, notes, and snippets.

View TheLie0's full-sized avatar

TheLie0

View GitHub Profile
// Y1 13
// Y2 12
// Y3 11
// X1 10
// X2 9
// X3 8
long last_update = 0;
unsigned long last_frame = 0;
import math
import os
import time
clear = lambda: os.system('cls')
base = 1.5
def rollercoaster():
start = raw_input("Start: ")
/** * * Beschreibung * Ich hasse Java * Top Kek m8 * @version 1.0 vom 30.09.2016 * @author  Ich */import java.util.Scanner;import java.util.Arrays;import java.util.Random;public class Rechner { static Scanner sc = new Scanner(System.in); // neuer scanner static boolean var = false; static float ergebnis = 0; static float[] arrgebnis; public static void main(String[] args) {   System.out.println("Guten Tag. Dies ist ein deutscher Taschenrechner/Sortierer."); // Gruß    Abfrage();  } // end of main public static void Abfrage() {  System.out.print("Was möchten Sie tun? (help für Hilfe)"); // Frage nach Operation;  String Operation = sc.nextLine();  switch (Operation) { // Verteiler je nach Eingabe   case "achterbahn": //(achterbahn)    ergebnis = Achterbahn(var);    break;   case "addition": // addition    ergebnis = Addition(var, ergebnis);    break;   case "subtraktion": // subtraktion     ergebnis = Subtraktion(var);    break;   case "multiplikation": // multiplikation    ergebnis = Multiplikation(var);    br
"""Repeatedly generates a wordcloud from the PornMd live search. requires wordcloud, Pillow, and urllib2"""
import urllib2
import random
from wordcloud import WordCloud
checkList = ["anal","teen","lesbian","mom","amateur","compilation","college","celebrity","couch","cum","cock","handjob","creampie"]
while "my dick is large":
string = ""
dict = {}
for i in range(1000):
req = urllib2.Request("http://www.pornmd.com/randomwords?cache=" + str(random.random()))
import urllib2
from BeautifulSoup import BeautifulSoup
url = 'https://www.explainxkcd.com/wiki/index.php?title=1:_Barrel_-_Part_1&oldid=129291'
req = urllib2.Request(url, headers={'User-Agent' : "Magic Browser"})
response = urllib2.urlopen(req)
soup = BeautifulSoup(response, 'html.parser')
print soup.getText()
import os
def getFiles():
counter = 0
code = {}
for file in os.listdir(path):
if file.endswith(".py"):
with open(file, "r") as myfile:
code[counter] = myfile.readlines()
counter += 1