-
Notifications
You must be signed in to change notification settings - Fork 6
/
parallel_fetch.py
41 lines (36 loc) · 962 Bytes
/
parallel_fetch.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
from multiprocessing import Pool
import dataio as dio
import time
def get_model(seqLen=100, numBalls=2):
time.sleep(3)
ds = dio.DataSaver(wThick=30, isRect=True, mnSeqLen=seqLen, mxSeqLen=seqLen,
numBalls=numBalls, mnBallSz=25, mxBallSz=25)
ims = ds.fetch()
return ims
def run_parallel(numW=4, numJobs=10):
p = Pool(numW)
resPool = []
for i in range(numJobs):
ds = dio.DataSaver(wThick=30, isRect=True,
mnSeqLen=100, mxSeqLen=100,
numBalls=2, mnBallSz=25, mxBallSz=25)
#resPool.append(p.apply_async(get_model))
resPool.append(p.apply_async(ds.fetch))
print('All Processes launched')
res = []
for r in resPool:
res.append(r.get())
return res
def run_process(numJobs=10):
prcs = []
for i in range(numJobs):
p = Process(target=get_model)
p.start()
prcs.append(p)
print('All Processes launched')
for p in prcs:
res.append(r.get())
return res
def run_serial():
for i in range(100):
tmpRes = get_model()