ruk·si

🐍 Python
Multiprocessing

Updated at 2014-01-18 11:48
import urllib2
from multiprocessing.dummy import Pool as ThreadPool
# multiprocessing module works with processes. They do not share memory space.
# multiprocessing.dummy module works with threads. They share memory space.
# Both provide the same API.

urls = [
    'http://www.google.com',
    'http://www.yahoo.com',
    'http://www.reddit.com',
    'http://stackoverflow.com',
    'http://python.org']

# Make a pool of 4 workers, leave blank and it will match the number of cores.
pool = ThreadPool(4)

# Open the URLs in their own threads
# and return the results.
results = pool.map(urllib2.urlopen, urls)

# Close the pool and wait for the work to finish.
pool.close()
pool.join()

for result in results:
    print("%s : %d" % (result.geturl(), result.getcode()))

Source