\n <\/td>\n | from<\/span> multiprocessing<\/span> import<\/span> Pool<\/span><\/td>\n <\/tr>\n \n <\/td>\n | \n<\/td>\n <\/tr>\n | \n <\/td>\n | # parallelize function<\/span><\/td>\n <\/tr>\n \n <\/td>\n | def<\/span> product<\/span>(a<\/span>,b<\/span>):<\/td>\n <\/tr>\n \n <\/td>\n | print<\/span> a<\/span>*<\/span>b<\/span><\/td>\n <\/tr>\n \n <\/td>\n | \n<\/td>\n <\/tr>\n | \n <\/td>\n | # auxiliary funciton to make it work<\/span><\/td>\n <\/tr>\n \n <\/td>\n | def<\/span> product_helper<\/span>(args<\/span>):<\/td>\n <\/tr>\n \n <\/td>\n | return<\/span> product<\/span>(*<\/span>args<\/span>)<\/td>\n <\/tr>\n \n <\/td>\n | \n<\/td>\n <\/tr>\n | \n <\/td>\n | def<\/span> parallel_product<\/span>(list_a<\/span>, list_b<\/span>):<\/td>\n <\/tr>\n \n <\/td>\n | # spark given number of processes<\/span><\/td>\n <\/tr>\n \n <\/td>\n | p<\/span> =<\/span> Pool<\/span>(5<\/span>) <\/td>\n <\/tr>\n \n <\/td>\n | # set each matching item into a tuple<\/span><\/td>\n <\/tr>\n \n <\/td>\n | job_args<\/span> =<\/span> [(item_a<\/span>, list_b<\/span>[i<\/span>]) for<\/span> i<\/span>, item_a<\/span> in<\/span> enumerate<\/span>(list_a<\/span>)] <\/td>\n <\/tr>\n \n <\/td>\n | # map to pool<\/span><\/td>\n <\/tr>\n \n <\/td>\n | p<\/span>.map<\/span>(product_helper<\/span>, job_args<\/span>)<\/td>\n <\/tr>\n \n <\/td>\n | \n<\/td>\n <\/tr>\n | \n <\/td>\n | exp_a<\/span> =<\/span> range<\/span>(1000<\/span>)<\/td>\n <\/tr>\n \n <\/td>\n | exp_b<\/span> =<\/span> range<\/span>(1000<\/span>)<\/td>\n <\/tr>\n \n <\/td>\n | parallel_product<\/span>(exp_a<\/span>, exp_b<\/span>)<\/td>\n <\/tr>\n <\/table>\n<\/div>\n\n\n <\/div>\n\n <\/div>\n<\/div>\n\n <\/div>\n | | | | | | | | | | | | | | | | |