Skip to content

Instantly share code, notes, and snippets.

@Xronger
Created September 22, 2015 03:08
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Xronger/3cb8a45481a48170fe5e to your computer and use it in GitHub Desktop.
Save Xronger/3cb8a45481a48170fe5e to your computer and use it in GitHub Desktop.
__author__ = 'H1ac0k'
# demo1 展示线程的基本使用
# import threading
# import datetime
#
# class ThreadClass(threading.Thread):
# def run(self):
# now = datetime.datetime.now()
# print("%s says Hello world at time: %s" % (self.getName(), now))
#
# for i in range(2):
# t = ThreadClass()
# t.start()
# demo2 - 1 单线程读取4个首页的信息
# import urllib.request
# import time
#
# hosts = ['http://www.163.com', 'http://www.qq.com', 'http://www.sohu.com', 'http://www.ibm.com']
# start = time.time()
# for host in hosts:
# url = urllib.request.urlopen(host)
# print(url.read(10))
# print("Elapsed Time: %s" % (time.time() - start))
# demo2 - 2 多线程读取4个首页的信息
# import queue
# import threading
# import urllib.request
# import time
#
# hosts = ['http://www.163.com', 'http://www.qq.com', 'http://www.sohu.com', 'http://www.ibm.com']
# my_queue = queue.Queue()
# class ThreadUrl(threading.Thread):
# def __init__(self, queue):
# threading.Thread.__init__(self)
# self.queue = queue
#
# def run(self):
# while True:
# host = self.queue.get()
# url = urllib.request.urlopen(host)
# print(url.read(10))
# self.queue.task_done()
#
# start = time.time()
# def main():
# for i in range(4):
# t = ThreadUrl(my_queue)
# t.setDaemon(True)
# t.start()
#
# for host in hosts:
# my_queue.put(host)
# my_queue.join()
#
# main()
# print("Elapsed Time: %s " % (time.time() - start))
# demo2 - 3 多线程多队列
# import queue
# import threading
# import urllib.request
# import time
# from bs4 import BeautifulSoup
#
# hosts = ['http://www.163.com', 'http://www.qq.com', 'http://www.sohu.com', 'http://www.ibm.com']
# in_queue = queue.Queue()
# out_queue = queue.Queue()
#
# class ThreadUrl(threading.Thread):
# def __init__(self, in_queue, out_queue):
# threading.Thread.__init__(self)
# self.in_queue = in_queue
# self.out_queue = out_queue
#
# def run(self):
# while True:
# host = self.in_queue.get()
# url = urllib.request.urlopen(host)
# chunk = url.read()
# self.out_queue.put(chunk)
# self.in_queue.task_done()
#
# class DataMineThread(threading.Thread):
# def __init__(self, out_queue):
# threading.Thread.__init__(self)
# self.out_queue = out_queue
#
# def run(self):
# while True:
# chunk = self.out_queue.get()
# soup = BeautifulSoup(chunk)
# print(soup.findAll(['title']))
# self.out_queue.task_done()
#
# start = time.time()
# def main():
# for i in range(4):
# t = ThreadUrl(in_queue, out_queue)
# t.setDaemon(True)
# t.start()
#
# for host in hosts:
# in_queue.put(host)
#
# for i in range(4):
# dt = DataMineThread(out_queue)
# dt.setDaemon(True)
# dt.start()
#
# in_queue.join()
# out_queue.join()
#
# main()
# print("Elapsed Time: %s" % (time.time() - start))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment