import threading
threading.Thread(target=f, args=(a,b,c)).start()
import queue
import threading
import urllib2
# Called by each thread
def get_url(q, url):
q.put(urllib2.urlopen(url).read())
theurls = ["http://google.com", "http://yahoo.com"]
q = queue.Queue()
for u in theurls:
t = threading.Thread(target=get_url, args = (q,u))
t.daemon = True
t.start()
s = list(q.queue)
print(s)
# Here program exits without complete execution because treads are daemonic and hence main thread gives no fuck for them
# if we use t.join() just bellow the t.start() then everything happens in a sync but it destroys the purpose of threading as sequential execution will be happening now
# if we have t.daemon=False then program will exit after all spawned threads have finished their work
Thread Pool (Good Resource): https://docs.python.org/3/library/concurrent.futures.html