1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 from __future__ import division
16 import os
17 import math
18 import logging
19 import re
20 import xml.dom.minidom as minidom
21
22 logger = logging.getLogger('madevent.stdout')
23
24 pjoin = os.path.join
25 try:
26 import madgraph
27 except ImportError:
28 import internal.cluster as cluster
29 import internal.misc as misc
30 from internal import MadGraph5Error
31 else:
32 import madgraph.various.cluster as cluster
33 import madgraph.various.misc as misc
34 from madgraph import MadGraph5Error
35
37 """ A class to store statistics about a MadEvent run. """
38
40 """ Initialize the run dictionary. For now, the same as a regular
41 dictionary, except that we specify some default statistics. """
42
43 madloop_statistics = {
44 'unknown_stability' : 0,
45 'stable_points' : 0,
46 'unstable_points' : 0,
47 'exceptional_points' : 0,
48 'DP_usage' : 0,
49 'QP_usage' : 0,
50 'DP_init_usage' : 0,
51 'QP_init_usage' : 0,
52 'CutTools_DP_usage' : 0,
53 'CutTools_QP_usage' : 0,
54 'PJFry_usage' : 0,
55 'Golem_usage' : 0,
56 'IREGI_usage' : 0,
57 'Samurai_usage' : 0,
58 'Ninja_usage' : 0,
59 'Ninja_QP_usage' : 0,
60 'COLLIER_usage' : 0,
61 'max_precision' : 1.0e99,
62 'min_precision' : 0.0,
63 'averaged_timing' : 0.0,
64 'n_madloop_calls' : 0,
65 'cumulative_timing' : 0.0,
66 'skipped_subchannel' : 0
67
68 }
69
70 for key, value in madloop_statistics.items():
71 self[key] = value
72
73 super(dict,self).__init__(*args, **opts)
74
76 """ Update the current statitistics with the new_stats specified."""
77
78 if isinstance(new_stats,RunStatistics):
79 new_stats = [new_stats, ]
80 elif isinstance(new_stats,list):
81 if any(not isinstance(_,RunStatistics) for _ in new_stats):
82 raise MadGraph5Error, "The 'new_stats' argument of the function "+\
83 "'updtate_statistics' must be a (possibly list of) "+\
84 "RunStatistics instance."
85
86 keys = set([])
87 for stat in [self,]+new_stats:
88 keys |= set(stat.keys())
89
90 new_stats = new_stats+[self,]
91 for key in keys:
92
93 if key=='max_precision':
94
95 self[key] = min( _[key] for _ in new_stats if key in _)
96 elif key=='min_precision':
97
98 self[key] = max( _[key] for _ in new_stats if key in _)
99 elif key=='averaged_timing':
100 n_madloop_calls = sum(_['n_madloop_calls'] for _ in new_stats if
101 'n_madloop_calls' in _)
102 if n_madloop_calls > 0 :
103 self[key] = sum(_[key]*_['n_madloop_calls'] for _ in
104 new_stats if (key in _ and 'n_madloop_calls' in _) )/n_madloop_calls
105 else:
106
107 self[key] = sum(_[key] for _ in new_stats if key in _)
108
110 """ Load the statistics from an xml node. """
111
112 def getData(Node):
113 return Node.childNodes[0].data
114
115 u_return_code = xml_node.getElementsByTagName('u_return_code')
116 u_codes = [int(_) for _ in getData(u_return_code[0]).split(',')]
117 self['CutTools_DP_usage'] = u_codes[1]
118 self['PJFry_usage'] = u_codes[2]
119 self['IREGI_usage'] = u_codes[3]
120 self['Golem_usage'] = u_codes[4]
121 self['Samurai_usage'] = u_codes[5]
122 self['Ninja_usage'] = u_codes[6]
123 self['COLLIER_usage'] = u_codes[7]
124 self['Ninja_QP_usage'] = u_codes[8]
125 self['CutTools_QP_usage'] = u_codes[9]
126 t_return_code = xml_node.getElementsByTagName('t_return_code')
127 t_codes = [int(_) for _ in getData(t_return_code[0]).split(',')]
128 self['DP_usage'] = t_codes[1]
129 self['QP_usage'] = t_codes[2]
130 self['DP_init_usage'] = t_codes[3]
131 self['DP_init_usage'] = t_codes[4]
132 h_return_code = xml_node.getElementsByTagName('h_return_code')
133 h_codes = [int(_) for _ in getData(h_return_code[0]).split(',')]
134 self['unknown_stability'] = h_codes[1]
135 self['stable_points'] = h_codes[2]
136 self['unstable_points'] = h_codes[3]
137 self['exceptional_points'] = h_codes[4]
138 average_time = xml_node.getElementsByTagName('average_time')
139 avg_time = float(getData(average_time[0]))
140 self['averaged_timing'] = avg_time
141 cumulated_time = xml_node.getElementsByTagName('cumulated_time')
142 cumul_time = float(getData(cumulated_time[0]))
143 self['cumulative_timing'] = cumul_time
144 max_prec = xml_node.getElementsByTagName('max_prec')
145 max_prec = float(getData(max_prec[0]))
146
147 self['min_precision'] = max_prec
148 min_prec = xml_node.getElementsByTagName('min_prec')
149 min_prec = float(getData(min_prec[0]))
150
151 self['max_precision'] = min_prec
152 n_evals = xml_node.getElementsByTagName('n_evals')
153 n_evals = int(getData(n_evals[0]))
154 self['n_madloop_calls'] = n_evals
155
157 """Returns a one-line string summarizing the run statistics
158 gathered for the channel G."""
159
160
161
162 if self['n_madloop_calls']==0:
163 return ''
164
165 stability = [
166 ('tot#',self['n_madloop_calls']),
167 ('unkwn#',self['unknown_stability']),
168 ('UPS%',float(self['unstable_points'])/self['n_madloop_calls']),
169 ('EPS#',self['exceptional_points'])]
170
171 stability = [_ for _ in stability if _[1] > 0 or _[0] in ['UPS%','EPS#']]
172 stability = [(_[0],'%i'%_[1]) if isinstance(_[1], int) else
173 (_[0],'%.3g'%(100.0*_[1])) for _ in stability]
174
175 tools_used = [
176 ('CT_DP',float(self['CutTools_DP_usage'])/self['n_madloop_calls']),
177 ('CT_QP',float(self['CutTools_QP_usage'])/self['n_madloop_calls']),
178 ('PJFry',float(self['PJFry_usage'])/self['n_madloop_calls']),
179 ('Golem',float(self['Golem_usage'])/self['n_madloop_calls']),
180 ('IREGI',float(self['IREGI_usage'])/self['n_madloop_calls']),
181 ('Samurai',float(self['Samurai_usage'])/self['n_madloop_calls']),
182 ('COLLIER',float(self['COLLIER_usage'])/self['n_madloop_calls']),
183 ('Ninja_DP',float(self['Ninja_usage'])/self['n_madloop_calls']),
184 ('Ninja_QP',float(self['Ninja_QP_usage'])/self['n_madloop_calls'])]
185
186 tools_used = [(_[0],'%.3g'%(100.0*_[1])) for _ in tools_used if _[1] > 0.0 ]
187
188 to_print = [('%s statistics:'%(G if isinstance(G,str) else
189 str(os.path.join(list(G))))\
190 +(' %s,'%misc.format_time(int(self['cumulative_timing'])) if
191 int(self['cumulative_timing']) > 0 else '')
192 +((' Avg. ML timing = %i ms'%int(1.0e3*self['averaged_timing'])) if
193 self['averaged_timing'] > 0.001 else
194 (' Avg. ML timing = %i mus'%int(1.0e6*self['averaged_timing']))) \
195 +', Min precision = %.2e'%self['min_precision'])
196 ,' -> Stability %s'%dict(stability)
197 ,' -> Red. tools usage in %% %s'%dict(tools_used)
198
199
200
201
202
203 ]
204
205 if self['skipped_subchannel'] > 0 and not no_warning:
206 to_print.append("WARNING: Some event with large weight have been "+\
207 "discarded. This happened %s times." % self['skipped_subchannel'])
208
209 return ('\n'.join(to_print)).replace("'"," ")
210
212 """return if any stat needs to be reported as a warning
213 When this is True, the print_warning doit retourner un warning
214 """
215
216 if self['n_madloop_calls'] > 0:
217 fraction = self['exceptional_points']/float(self['n_madloop_calls'])
218 else:
219 fraction = 0.0
220
221 if self['skipped_subchannel'] > 0:
222 return True
223 elif fraction > 1.0e-4:
224 return True
225 else:
226 return False
227
229 """get a string with all the identified warning"""
230
231 to_print = []
232 if self['skipped_subchannel'] > 0:
233 to_print.append("Some event with large weight have been discarded."+\
234 " This happens %s times." % self['skipped_subchannel'])
235 if self['n_madloop_calls'] > 0:
236 fraction = self['exceptional_points']/float(self['n_madloop_calls'])
237 if fraction > 1.0e-4:
238 to_print.append("Some PS with numerical instability have been set "+\
239 "to a zero matrix-element (%.3g%%)" % (100.0*fraction))
240
241 return ('\n'.join(to_print)).replace("'"," ")
242
244
246 """Initialize all data """
247
248 self.run_statistics = RunStatistics()
249 self.name = name
250 self.parent_name = ''
251 self.axsec = 0
252 self.xsec = 0
253 self.xerru = 0
254 self.xerrc = 0
255 self.nevents = 0
256 self.nw = 0
257 self.maxit = 0
258 self.nunwgt = 0
259 self.luminosity = 0
260 self.mfactor = 1
261 self.ysec_iter = []
262 self.yerr_iter = []
263 self.yasec_iter = []
264 self.eff_iter = []
265 self.maxwgt_iter = []
266 self.maxwgt = 0
267 self.th_maxwgt= 0
268
269 self.th_nunwgt = 0
270
271
272 return
273
274
276 """read results.dat and fullfill information"""
277
278 if isinstance(filepath, str):
279 finput = open(filepath)
280 elif isinstance(filepath, file):
281 finput = filepath
282 else:
283 raise Exception, "filepath should be a path or a file descriptor"
284
285 i=0
286 found_xsec_line = False
287 for line in finput:
288
289
290 if '<' in line:
291 break
292 i+=1
293 if i == 1:
294 def secure_float(d):
295 try:
296 return float(d)
297 except ValueError:
298 m=re.search(r'''([+-]?[\d.]*)([+-]\d*)''', d)
299 if m:
300 return float(m.group(1))*10**(float(m.group(2)))
301 return
302
303 data = [secure_float(d) for d in line.split()]
304 try:
305 self.axsec, self.xerru, self.xerrc, self.nevents, self.nw,\
306 self.maxit, self.nunwgt, self.luminosity, self.wgt, \
307 self.xsec = data[:10]
308 except ValueError:
309 log = pjoin(os.path.dirname(filepath), 'log.txt')
310 if os.path.exists(log):
311 if 'end code not correct' in line:
312 error_code = data[4]
313 log = pjoin(os.path.dirname(filepath), 'log.txt')
314 raise Exception, "Reported error: End code %s \n Full associated log: \n%s"\
315 % (error_code, open(log).read())
316 else:
317 log = pjoin(os.path.dirname(filepath), 'log.txt')
318 raise Exception, "Wrong formatting in results.dat: %s \n Full associated log: \n%s"\
319 % (line, open(log).read())
320 if len(data) > 10:
321 self.maxwgt = data[10]
322 if len(data) >12:
323 self.th_maxwgt, self.th_nunwgt = data[11:13]
324 if self.mfactor > 1:
325 self.luminosity /= self.mfactor
326 continue
327 try:
328 l, sec, err, eff, maxwgt, asec = line.split()
329 found_xsec_line = True
330 except:
331 break
332 self.ysec_iter.append(secure_float(sec))
333 self.yerr_iter.append(secure_float(err))
334 self.yasec_iter.append(secure_float(asec))
335 self.eff_iter.append(secure_float(eff))
336 self.maxwgt_iter.append(secure_float(maxwgt))
337
338 finput.seek(0)
339 xml = []
340 for line in finput:
341 if re.match('^.*<.*>',line):
342 xml.append(line)
343 break
344 for line in finput:
345 xml.append(line)
346
347 if xml:
348 self.parse_xml_results('\n'.join(xml))
349
350
351 if self.nevents == 0 and self.nunwgt == 0 and isinstance(filepath, str) and \
352 os.path.exists(pjoin(os.path.split(filepath)[0], 'nevts')):
353 nevts = int((open(pjoin(os.path.split(filepath)[0], 'nevts')).read()).split()[0])
354 self.nevents = nevts
355 self.nunwgt = nevts
356
358 """ Parse the xml part of the results.dat file."""
359
360 dom = minidom.parseString(xml)
361
362 statistics_node = dom.getElementsByTagName("run_statistics")
363
364 if statistics_node:
365 try:
366 self.run_statistics.load_statistics(statistics_node[0])
367 except ValueError, IndexError:
368 logger.warning('Fail to read run statistics from results.dat')
369
371 self.mfactor = int(value)
372
374 """Change the number of iterations for this process"""
375
376 if len(self.ysec_iter) <= nb_iter:
377 return
378
379
380 nb_to_rm = len(self.ysec_iter) - nb_iter
381 ysec = [0]
382 yerr = [0]
383 for i in range(nb_to_rm):
384 ysec[0] += self.ysec_iter[i]
385 yerr[0] += self.yerr_iter[i]**2
386 ysec[0] /= (nb_to_rm+1)
387 yerr[0] = math.sqrt(yerr[0]) / (nb_to_rm + 1)
388
389 for i in range(1, nb_iter):
390 ysec[i] = self.ysec_iter[nb_to_rm + i]
391 yerr[i] = self.yerr_iter[nb_to_rm + i]
392
393 self.ysec_iter = ysec
394 self.yerr_iter = yerr
395
396 - def get(self, name):
397
398 if name in ['xsec', 'xerru','xerrc']:
399 return getattr(self, name) * self.mfactor
400 elif name in ['luminosity']:
401
402
403 return getattr(self, name)
404 elif (name == 'eff'):
405 return self.xerr*math.sqrt(self.nevents/(self.xsec+1e-99))
406 elif name == 'xerr':
407 return math.sqrt(self.xerru**2+self.xerrc**2)
408 elif name == 'name':
409 return pjoin(self.parent_name, self.name)
410 else:
411 return getattr(self, name)
412
414
419
421 """read the data in the file"""
422 try:
423 oneresult = OneResult(name)
424 oneresult.set_mfactor(mfactor)
425 oneresult.read_results(filepath)
426 oneresult.parent_name = self.name
427 self.append(oneresult)
428 return oneresult
429 except Exception:
430 logger.critical("Error when reading %s" % filepath)
431 raise
432
433
435 """compute the value associate to this combination"""
436
437 self.compute_iterations()
438 self.axsec = sum([one.axsec for one in self])
439 self.xsec = sum([one.xsec for one in self])
440 self.xerrc = sum([one.xerrc for one in self])
441 self.xerru = math.sqrt(sum([one.xerru**2 for one in self]))
442
443 self.nevents = sum([one.nevents for one in self])
444 self.nw = sum([one.nw for one in self])
445 self.maxit = len(self.yerr_iter)
446 self.nunwgt = sum([one.nunwgt for one in self])
447 self.wgt = 0
448 self.luminosity = min([0]+[one.luminosity for one in self])
449 if update_statistics:
450 self.run_statistics.aggregate_statistics([_.run_statistics for _ in self])
451
453 """compute the value associate to this combination"""
454
455 nbjobs = len(self)
456 if not nbjobs:
457 return
458 max_xsec = max(one.xsec for one in self)
459 min_xsec = min(one.xsec for one in self)
460 self.axsec = sum([one.axsec for one in self]) / nbjobs
461 self.xsec = sum([one.xsec for one in self]) /nbjobs
462 self.xerrc = sum([one.xerrc for one in self]) /nbjobs
463 self.xerru = math.sqrt(sum([one.xerru**2 for one in self])) /nbjobs
464 if error:
465 self.xerrc = error
466 self.xerru = error
467
468 self.nevents = sum([one.nevents for one in self])
469 self.nw = 0
470 self.maxit = 0
471 self.nunwgt = sum([one.nunwgt for one in self])
472 self.wgt = 0
473 self.luminosity = sum([one.luminosity for one in self])
474 self.ysec_iter = []
475 self.yerr_iter = []
476 self.th_maxwgt = 0.0
477 self.th_nunwgt = 0
478 for result in self:
479 self.ysec_iter+=result.ysec_iter
480 self.yerr_iter+=result.yerr_iter
481 self.yasec_iter += result.yasec_iter
482 self.eff_iter += result.eff_iter
483 self.maxwgt_iter += result.maxwgt_iter
484
485
486 onefail = False
487 for one in list(self):
488 if one.xsec < (self.xsec - 25* one.xerru):
489 if not onefail:
490 logger.debug('multi run are inconsistent: %s < %s - 25* %s: assign error %s', one.xsec, self.xsec, one.xerru, error if error else max_xsec-min_xsec)
491 onefail = True
492 self.remove(one)
493 if onefail:
494 if error:
495 return self.compute_average(error)
496 else:
497 return self.compute_average((max_xsec-min_xsec)/2.)
498
499
500
502 """Compute iterations to have a chi-square on the stability of the
503 integral"""
504
505 nb_iter = min([len(a.ysec_iter) for a in self], 0)
506
507 for oneresult in self:
508 oneresult.change_iterations_number(nb_iter)
509
510
511 for i in range(nb_iter):
512 value = [one.ysec_iter[i] for one in self]
513 error = [one.yerr_iter[i]**2 for one in self]
514
515
516 self.ysec_iter.append(sum(value))
517 self.yerr_iter.append(math.sqrt(sum(error)))
518
519
520 template_file = \
521 """
522 %(diagram_link)s
523 <BR>
524 <b>s= %(cross).5g ± %(error).3g (%(unit)s)</b><br><br>
525 <table class="sortable" id='tablesort'>
526 <tr><th>Graph</th>
527 <th> %(result_type)s</th>
528 <th>Error</th>
529 <th>Events (K)</th>
530 <th>Unwgt</th>
531 <th>Luminosity</th>
532 </tr>
533 %(table_lines)s
534 </table>
535 </center>
536 <br><br><br>
537 """
538 table_line_template = \
539 """
540 <tr><td align=right>%(P_title)s</td>
541 <td align=right><a id="%(P_link)s" href=%(P_link)s onClick="check_link('%(P_link)s','%(mod_P_link)s','%(P_link)s')"> %(cross)s </a> </td>
542 <td align=right> %(error)s</td>
543 <td align=right> %(events)s</td>
544 <td align=right> %(unweighted)s</td>
545 <td align=right> %(luminosity)s</td>
546 </tr>
547 """
548
549 - def get_html(self,run, unit, me_dir = []):
550 """write html output"""
551
552
553 P_grouping = {}
554
555 tables_line = ''
556 for oneresult in self:
557 if oneresult.name.startswith('P'):
558 title = '<a href=../../SubProcesses/%(P)s/diagrams.html>%(P)s</a>' \
559 % {'P':oneresult.name}
560 P = oneresult.name.split('_',1)[0]
561 if P in P_grouping:
562 P_grouping[P] += float(oneresult.xsec)
563 else:
564 P_grouping[P] = float(oneresult.xsec)
565 else:
566 title = oneresult.name
567
568 if not isinstance(oneresult, Combine_results):
569
570 if os.path.exists(pjoin(me_dir, 'Events', run, 'alllogs_1.html')):
571 link = '../../Events/%(R)s/alllogs_1.html#/%(P)s/%(G)s' % \
572 {'P': os.path.basename(self.name),
573 'G': oneresult.name,
574 'R': run}
575 mod_link = link
576 elif os.path.exists(pjoin(me_dir, 'Events', run, 'alllogs_0.html')):
577 link = '../../Events/%(R)s/alllogs_0.html#/%(P)s/%(G)s' % \
578 {'P': os.path.basename(self.name),
579 'G': oneresult.name,
580 'R': run}
581 mod_link = link
582 else:
583
584 link = '../../SubProcesses/%(P)s/%(G)s/%(R)s_log.txt' % \
585 {'P': os.path.basename(self.name),
586 'G': oneresult.name,
587 'R': run}
588 mod_link = '../../SubProcesses/%(P)s/%(G)s/log.txt' % \
589 {'P': os.path.basename(self.name),
590 'G': oneresult.name}
591 if not os.path.exists(link) and not os.path.exists(mod_link):
592 P = os.path.basename(self.name)
593 base = pjoin(me_dir, 'SubProcesses', P, os.path.dirname(link))
594 pos = [pjoin(base,c) for c in os.listdir(base) if c.endswith('.log')]
595 if len(pos) == 1:
596 link = pos[0]
597 else:
598 link = '#%s' % oneresult.name
599 mod_link = link
600
601 dico = {'P_title': title,
602 'P_link': link,
603 'mod_P_link': mod_link,
604 'cross': '%.4g' % oneresult.xsec,
605 'error': '%.3g' % oneresult.xerru,
606 'events': oneresult.nevents/1000.0,
607 'unweighted': oneresult.nunwgt,
608 'luminosity': '%.3g' % oneresult.luminosity
609 }
610
611 tables_line += self.table_line_template % dico
612
613 for P_name, cross in P_grouping.items():
614 dico = {'P_title': '%s sum' % P_name,
615 'P_link': './results.html',
616 'mod_P_link':'',
617 'cross': cross,
618 'error': '',
619 'events': '',
620 'unweighted': '',
621 'luminosity': ''
622 }
623 tables_line += self.table_line_template % dico
624
625 if self.name.startswith('P'):
626 title = '<dt><a name=%(P)s href=../../SubProcesses/%(P)s/diagrams.html>%(P)s</a></dt><dd>' \
627 % {'P':self.name}
628 else:
629 title = ''
630
631 dico = {'cross': self.xsec,
632 'abscross': self.axsec,
633 'error': self.xerru,
634 'unit': unit,
635 'result_type': 'Cross-Section',
636 'table_lines': tables_line,
637 'diagram_link': title
638 }
639
640 html_text = self.template_file % dico
641 return html_text
642
644 """write a correctly formatted results.dat"""
645
646 def fstr(nb):
647 data = '%E' % nb
648 if data == 'NAN':
649 nb, power = 0,0
650 else:
651 nb, power = data.split('E')
652 nb = float(nb) /10
653 power = int(power) + 1
654 return '%.5fE%+03i' %(nb,power)
655
656 line = '%s %s %s %i %i %i %i %s %s %s %s %s %i\n' % (fstr(self.axsec), fstr(self.xerru),
657 fstr(self.xerrc), self.nevents, self.nw, self.maxit, self.nunwgt,
658 fstr(self.luminosity), fstr(self.wgt), fstr(self.xsec), fstr(self.maxwgt),
659 fstr(self.th_maxwgt), self.th_nunwgt)
660 fsock = open(output_path,'w')
661 fsock.writelines(line)
662 for i in range(len(self.ysec_iter)):
663 line = '%s %s %s %s %s %s\n' % (i+1, self.ysec_iter[i], self.yerr_iter[i],
664 self.eff_iter[i], self.maxwgt_iter[i], self.yasec_iter[i])
665 fsock.writelines(line)
666
667
668
669 results_header = """
670 <head>
671 <title>Process results</title>
672 <script type="text/javascript" src="../sortable.js"></script>
673 <link rel=stylesheet href="../mgstyle.css" type="text/css">
674 </head>
675 <body>
676 <script type="text/javascript">
677 function UrlExists(url) {
678 var http = new XMLHttpRequest();
679 http.open('HEAD', url, false);
680 try{
681 http.send()
682 }
683 catch(err){
684 return 1==2;
685 }
686 return http.status!=404;
687 }
688 function check_link(url,alt, id){
689 var obj = document.getElementById(id);
690 if ( ! UrlExists(url)){
691 if ( ! UrlExists(alt)){
692 obj.href = alt;
693 return true;
694 }
695 obj.href = alt;
696 return false;
697 }
698 obj.href = url;
699 return 1==1;
700 }
701 </script>
702 """
703
705 """ """
706
707 run = cmd.results.current['run_name']
708 all = Combine_results(run)
709
710
711 for Pdir in cmd.get_Pdir():
712 P_comb = Combine_results(Pdir)
713
714 if jobs:
715 for job in filter(lambda j: j['p_dir'] in Pdir, jobs):
716 P_comb.add_results(os.path.basename(job['dirname']),\
717 pjoin(job['dirname'],'results.dat'))
718 elif folder_names:
719 try:
720 for line in open(pjoin(Pdir, 'symfact.dat')):
721 name, mfactor = line.split()
722 if float(mfactor) < 0:
723 continue
724 if os.path.exists(pjoin(Pdir, 'ajob.no_ps.log')):
725 continue
726
727 for folder in folder_names:
728 if 'G' in folder:
729 dir = folder.replace('*', name)
730 else:
731 dir = folder.replace('*', '_G' + name)
732 P_comb.add_results(dir, pjoin(Pdir,dir,'results.dat'), mfactor)
733 if jobs:
734 for job in filter(lambda j: j['p_dir'] == Pdir, jobs):
735 P_comb.add_results(os.path.basename(job['dirname']),\
736 pjoin(job['dirname'],'results.dat'))
737 except IOError:
738 continue
739 else:
740 G_dir, mfactors = cmd.get_Gdir(Pdir, symfact=True)
741 for G in G_dir:
742 if not folder_names:
743 if main_dir:
744 path = pjoin(main_dir, os.path.basename(Pdir), os.path.basename(G),'results.dat')
745 else:
746 path = pjoin(G,'results.dat')
747 P_comb.add_results(os.path.basename(G), path, mfactors[G])
748
749 P_comb.compute_values()
750 all.append(P_comb)
751 all.compute_values()
752
753
754
755 return all
756
757
759 """ folder_names and jobs have been added for the amcatnlo runs """
760 run = cmd.results.current['run_name']
761 if not os.path.exists(pjoin(cmd.me_dir, 'HTML', run)):
762 os.mkdir(pjoin(cmd.me_dir, 'HTML', run))
763
764 unit = cmd.results.unit
765 P_text = ""
766 Presults = collect_result(cmd, folder_names=folder_names, jobs=jobs)
767
768 for P_comb in Presults:
769 P_text += P_comb.get_html(run, unit, cmd.me_dir)
770 P_comb.compute_values()
771 if cmd.proc_characteristics['ninitial'] == 1:
772 P_comb.write_results_dat(pjoin(cmd.me_dir, 'SubProcesses', P_comb.name,
773 '%s_results.dat' % run))
774
775 Presults.write_results_dat(pjoin(cmd.me_dir,'SubProcesses', 'results.dat'))
776
777 fsock = open(pjoin(cmd.me_dir, 'HTML', run, 'results.html'),'w')
778 fsock.write(results_header)
779 fsock.write('%s <dl>' % Presults.get_html(run, unit, cmd.me_dir))
780 fsock.write('%s </dl></body>' % P_text)
781
782 return Presults.xsec, Presults.xerru
783