1 """
2 'parallel.py' is a set of functions that make parallelizing functions much
3 easier. Namely, a 'pmap' function that will use the single-threaded version if
4 parallelization isn't available.
5 """
6 import math
7 import multiprocessing as mp
8 import sys
9
10 from bpm import conf
11
12
13 steps = 10000000
14
15
16 costs = { 'load_genes': 50,
17 }
18
19
20 counter = mp.Value('i', 0)
21
22 -def pmap(*args, **kargs):
23 '''
24 This is a convenient wrapper function that will parallelize a map function
25 if the capability exists. It degrades to a regular map function if not.
26 '''
27 if conf.processes > 1:
28 return mp.Pool(processes=conf.processes).map(*args, **kargs)
29 else:
30 return map(*args, **kargs)
31
33 '''
34 This is a nice little progress bar that is reasonably accurate. It isn't
35 perfect but should give a rough idea of how much longer the program needs
36 to run.
37 '''
38 if not conf.progress:
39 return
40
41 spaces = 60
42
43 if counter.value == steps:
44 progress = spaces
45 blanks = 0
46 pnumber = 100
47 else:
48 percent = float(counter.value) / float(steps)
49 progress = int(math.ceil(percent * spaces))
50 blanks = spaces - progress
51 pnumber = math.ceil(percent * 100)
52
53 print >> sys.stderr, \
54 '\r[%s%s] %d%%' % ('#' * progress, ' ' * blanks, pnumber),
55
56 if final:
57 print >> sys.stderr
58 sys.stderr.flush()
59
61 '''
62 Each unit this counter is increased by represents a "step" in the program.
63 It is then used to show a progress bar.
64 '''
65 counter.value += incby
66
68 '''
69 Simple accessor.
70 '''
71 return counter.value
72