Wenn es möglich ist Jobs zu parallelisieren kann man Multiprocessing unter Python verwenden.
#!/usr/bin/env python
import os
from multiprocessing import Pool
def worker(job):
x, y = job
result = x ** y
if hasattr(os, 'getppid'):
print "parent process pid:", os.getppid()
print "process pid:", os.getpid()
print "result is: ", result
print "---"
if __name__ == '__main__':
jobs = [(1, 2), (3, 4), (5, 6), (11, 12), (13, 14), (15, 16), (21, 22), (23, 24), (25, 26)]
pool = Pool(processes=5)
for job in jobs:
pool.apply_async(worker, args=(job,))
pool.close()
pool.join()
Result:
max@cmkdev:~$ python mp.py parent process pid: 19599 process pid: 19600 result is: 1 --- parent process pid: 19599 process pid: 19601 result is: 81 --- parent process pid: 19599 process pid: 19602 result is: 15625 --- parent process pid: 19599 process pid: 19602 result is: 3138428376721 --- parent process pid: 19599 process pid: 19600 result is: 6568408355712890625 --- parent process pid: 19599 process pid: 19600 result is: 122694327386105632949003612841 --- parent process pid: 19599 process pid: 19600 result is: 480250763996501976790165756943041 --- parent process pid: 19599 process pid: 19602 result is: 2220446049250313080847263336181640625 --- parent process pid: 19599 process pid: 19604 result is: 3937376385699289 ---