Multiprocessing jobs with worker pool in Python

#multiprocessing jobs with worker pool in python

from multiprocessing import Process, Lock, Pool
import time
import datetime
import sys
import os
from subprocess import Popen, PIPE

def proc(metric):
    print "Working on %s"%(metric)
    cmd1 = "/usr/bin/tsdb scan --import 2012/04/10 sum %s"%(metric)
    cmd2 = "grep %s"%(metric)

    logfile = open("%s.dat.gz"%(metric), 'w')
    p1 = Popen(cmd1.split(" "), stdout=PIPE)
    p2 = Popen(cmd2.split(" "), stdin=p1.stdout, stdout=PIPE)
    p3 = Popen(['gzip'], stdin=p2.stdout, stdout=logfile)
    p3.communicate()
    logfile.close()

if __name__ == '__main__':
    f = open('file_list_of_metrics.txt', 'r')
    l = [i.rstrip() for i in f]
    pool = Pool(processes=10)
    pool.map(proc, l, 1)
    pool.close()
    pool.join()

One thought on “Multiprocessing jobs with worker pool in Python

Leave a reply to SutoCom Cancel reply