Package madgraph :: Package madevent :: Module sum_html
[hide private]
[frames] | no frames]

Source Code for Module madgraph.madevent.sum_html

  1  ################################################################################ 
  2  # 
  3  # Copyright (c) 2011 The MadGraph5_aMC@NLO Development team and Contributors 
  4  # 
  5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
  6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
  7  # high-energy processes in the Standard Model and beyond. 
  8  # 
  9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
 10  # distribution. 
 11  # 
 12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
 13  # 
 14  ################################################################################ 
 15  from __future__ import division 
 16  import os 
 17  import math 
 18  import logging 
 19  import re 
 20  import xml.dom.minidom as minidom 
 21   
 22  logger = logging.getLogger('madevent.stdout') # -> stdout 
 23   
 24  pjoin = os.path.join 
 25  try: 
 26      import madgraph 
 27  except ImportError: 
 28      import internal.cluster as cluster 
 29      import internal.misc as misc 
 30      from internal import MadGraph5Error 
 31  else: 
 32      import madgraph.various.cluster as cluster 
 33      import madgraph.various.misc as misc 
 34      from madgraph import MadGraph5Error 
 35   
36 -class RunStatistics(dict):
37 """ A class to store statistics about a MadEvent run. """ 38
39 - def __init__(self, *args, **opts):
40 """ Initialize the run dictionary. For now, the same as a regular 41 dictionary, except that we specify some default statistics. """ 42 43 madloop_statistics = { 44 'unknown_stability' : 0, 45 'stable_points' : 0, 46 'unstable_points' : 0, 47 'exceptional_points' : 0, 48 'DP_usage' : 0, 49 'QP_usage' : 0, 50 'DP_init_usage' : 0, 51 'QP_init_usage' : 0, 52 'CutTools_DP_usage' : 0, 53 'CutTools_QP_usage' : 0, 54 'PJFry_usage' : 0, 55 'Golem_usage' : 0, 56 'IREGI_usage' : 0, 57 'Samurai_usage' : 0, 58 'Ninja_usage' : 0, 59 'Ninja_QP_usage' : 0, 60 'COLLIER_usage' : 0, 61 'max_precision' : 1.0e99, 62 'min_precision' : 0.0, 63 'averaged_timing' : 0.0, 64 'n_madloop_calls' : 0, 65 'cumulative_timing' : 0.0, 66 'skipped_subchannel' : 0 # number of times that a computation have been 67 # discarded due to abnormal weight. 68 } 69 70 for key, value in madloop_statistics.items(): 71 self[key] = value 72 73 super(dict,self).__init__(*args, **opts)
74
75 - def aggregate_statistics(self, new_stats):
76 """ Update the current statitistics with the new_stats specified.""" 77 78 if isinstance(new_stats,RunStatistics): 79 new_stats = [new_stats, ] 80 elif isinstance(new_stats,list): 81 if any(not isinstance(_,RunStatistics) for _ in new_stats): 82 raise MadGraph5Error, "The 'new_stats' argument of the function "+\ 83 "'updtate_statistics' must be a (possibly list of) "+\ 84 "RunStatistics instance." 85 86 keys = set([]) 87 for stat in [self,]+new_stats: 88 keys |= set(stat.keys()) 89 90 new_stats = new_stats+[self,] 91 for key in keys: 92 # Define special rules 93 if key=='max_precision': 94 # The minimal precision corresponds to the maximal value for PREC 95 self[key] = min( _[key] for _ in new_stats if key in _) 96 elif key=='min_precision': 97 # The maximal precision corresponds to the minimal value for PREC 98 self[key] = max( _[key] for _ in new_stats if key in _) 99 elif key=='averaged_timing': 100 n_madloop_calls = sum(_['n_madloop_calls'] for _ in new_stats if 101 'n_madloop_calls' in _) 102 if n_madloop_calls > 0 : 103 self[key] = sum(_[key]*_['n_madloop_calls'] for _ in 104 new_stats if (key in _ and 'n_madloop_calls' in _) )/n_madloop_calls 105 else: 106 # Now assume all other quantities are cumulative 107 self[key] = sum(_[key] for _ in new_stats if key in _)
108
109 - def load_statistics(self, xml_node):
110 """ Load the statistics from an xml node. """ 111 112 def getData(Node): 113 return Node.childNodes[0].data
114 115 u_return_code = xml_node.getElementsByTagName('u_return_code') 116 u_codes = [int(_) for _ in getData(u_return_code[0]).split(',')] 117 self['CutTools_DP_usage'] = u_codes[1] 118 self['PJFry_usage'] = u_codes[2] 119 self['IREGI_usage'] = u_codes[3] 120 self['Golem_usage'] = u_codes[4] 121 self['Samurai_usage'] = u_codes[5] 122 self['Ninja_usage'] = u_codes[6] 123 self['COLLIER_usage'] = u_codes[7] 124 self['Ninja_QP_usage'] = u_codes[8] 125 self['CutTools_QP_usage'] = u_codes[9] 126 t_return_code = xml_node.getElementsByTagName('t_return_code') 127 t_codes = [int(_) for _ in getData(t_return_code[0]).split(',')] 128 self['DP_usage'] = t_codes[1] 129 self['QP_usage'] = t_codes[2] 130 self['DP_init_usage'] = t_codes[3] 131 self['DP_init_usage'] = t_codes[4] 132 h_return_code = xml_node.getElementsByTagName('h_return_code') 133 h_codes = [int(_) for _ in getData(h_return_code[0]).split(',')] 134 self['unknown_stability'] = h_codes[1] 135 self['stable_points'] = h_codes[2] 136 self['unstable_points'] = h_codes[3] 137 self['exceptional_points'] = h_codes[4] 138 average_time = xml_node.getElementsByTagName('average_time') 139 avg_time = float(getData(average_time[0])) 140 self['averaged_timing'] = avg_time 141 cumulated_time = xml_node.getElementsByTagName('cumulated_time') 142 cumul_time = float(getData(cumulated_time[0])) 143 self['cumulative_timing'] = cumul_time 144 max_prec = xml_node.getElementsByTagName('max_prec') 145 max_prec = float(getData(max_prec[0])) 146 # The minimal precision corresponds to the maximal value for PREC 147 self['min_precision'] = max_prec 148 min_prec = xml_node.getElementsByTagName('min_prec') 149 min_prec = float(getData(min_prec[0])) 150 # The maximal precision corresponds to the minimal value for PREC 151 self['max_precision'] = min_prec 152 n_evals = xml_node.getElementsByTagName('n_evals') 153 n_evals = int(getData(n_evals[0])) 154 self['n_madloop_calls'] = n_evals
155
156 - def nice_output(self,G, no_warning=False):
157 """Returns a one-line string summarizing the run statistics 158 gathered for the channel G.""" 159 160 # Do not return anythign for now if there is no madloop calls. This can 161 # change of course if more statistics are gathered, unrelated to MadLoop. 162 if self['n_madloop_calls']==0: 163 return '' 164 165 stability = [ 166 ('tot#',self['n_madloop_calls']), 167 ('unkwn#',self['unknown_stability']), 168 ('UPS%',float(self['unstable_points'])/self['n_madloop_calls']), 169 ('EPS#',self['exceptional_points'])] 170 171 stability = [_ for _ in stability if _[1] > 0 or _[0] in ['UPS%','EPS#']] 172 stability = [(_[0],'%i'%_[1]) if isinstance(_[1], int) else 173 (_[0],'%.3g'%(100.0*_[1])) for _ in stability] 174 175 tools_used = [ 176 ('CT_DP',float(self['CutTools_DP_usage'])/self['n_madloop_calls']), 177 ('CT_QP',float(self['CutTools_QP_usage'])/self['n_madloop_calls']), 178 ('PJFry',float(self['PJFry_usage'])/self['n_madloop_calls']), 179 ('Golem',float(self['Golem_usage'])/self['n_madloop_calls']), 180 ('IREGI',float(self['IREGI_usage'])/self['n_madloop_calls']), 181 ('Samurai',float(self['Samurai_usage'])/self['n_madloop_calls']), 182 ('COLLIER',float(self['COLLIER_usage'])/self['n_madloop_calls']), 183 ('Ninja_DP',float(self['Ninja_usage'])/self['n_madloop_calls']), 184 ('Ninja_QP',float(self['Ninja_QP_usage'])/self['n_madloop_calls'])] 185 186 tools_used = [(_[0],'%.3g'%(100.0*_[1])) for _ in tools_used if _[1] > 0.0 ] 187 188 to_print = [('%s statistics:'%(G if isinstance(G,str) else 189 str(os.path.join(list(G))))\ 190 +(' %s,'%misc.format_time(int(self['cumulative_timing'])) if 191 int(self['cumulative_timing']) > 0 else '') 192 +((' Avg. ML timing = %i ms'%int(1.0e3*self['averaged_timing'])) if 193 self['averaged_timing'] > 0.001 else 194 (' Avg. ML timing = %i mus'%int(1.0e6*self['averaged_timing']))) \ 195 +', Min precision = %.2e'%self['min_precision']) 196 ,' -> Stability %s'%dict(stability) 197 ,' -> Red. tools usage in %% %s'%dict(tools_used) 198 # I like the display above better after all 199 # ,'Stability %s'%(str([_[0] for _ in stability]), 200 # str([_[1] for _ in stability])) 201 # ,'Red. tools usage in %% %s'%(str([_[0] for _ in tools_used]), 202 # str([_[1] for _ in tools_used])) 203 ] 204 205 if self['skipped_subchannel'] > 0 and not no_warning: 206 to_print.append("WARNING: Some event with large weight have been "+\ 207 "discarded. This happened %s times." % self['skipped_subchannel']) 208 209 return ('\n'.join(to_print)).replace("'"," ")
210
211 - def has_warning(self):
212 """return if any stat needs to be reported as a warning 213 When this is True, the print_warning doit retourner un warning 214 """ 215 216 if self['n_madloop_calls'] > 0: 217 fraction = self['exceptional_points']/float(self['n_madloop_calls']) 218 else: 219 fraction = 0.0 220 221 if self['skipped_subchannel'] > 0: 222 return True 223 elif fraction > 1.0e-4: 224 return True 225 else: 226 return False
227
228 - def get_warning_text(self):
229 """get a string with all the identified warning""" 230 231 to_print = [] 232 if self['skipped_subchannel'] > 0: 233 to_print.append("Some event with large weight have been discarded."+\ 234 " This happens %s times." % self['skipped_subchannel']) 235 if self['n_madloop_calls'] > 0: 236 fraction = self['exceptional_points']/float(self['n_madloop_calls']) 237 if fraction > 1.0e-4: 238 to_print.append("Some PS with numerical instability have been set "+\ 239 "to a zero matrix-element (%.3g%%)" % (100.0*fraction)) 240 241 return ('\n'.join(to_print)).replace("'"," ")
242
243 -class OneResult(object):
244
245 - def __init__(self, name):
246 """Initialize all data """ 247 248 self.run_statistics = RunStatistics() 249 self.name = name 250 self.parent_name = '' 251 self.axsec = 0 # Absolute cross section = Sum(abs(wgt)) 252 self.xsec = 0 # Real cross section = Sum(wgt) 253 self.xerru = 0 # uncorrelated error 254 self.xerrc = 0 # correlated error 255 self.nevents = 0 256 self.nw = 0 # number of events after the primary unweighting 257 self.maxit = 0 # 258 self.nunwgt = 0 # number of unweighted events 259 self.luminosity = 0 260 self.mfactor = 1 # number of times that this channel occur (due to symmetry) 261 self.ysec_iter = [] 262 self.yerr_iter = [] 263 self.yasec_iter = [] 264 self.eff_iter = [] 265 self.maxwgt_iter = [] 266 self.maxwgt = 0 # weight used for the secondary unweighting. 267 self.th_maxwgt= 0 # weight that should have been use for secondary unweighting 268 # this can happen if we force maxweight 269 self.th_nunwgt = 0 # associated number of event with th_maxwgt 270 #(this is theoretical do not correspond to a number of written event) 271 272 return
273 274 #@cluster.multiple_try(nb_try=5,sleep=20)
275 - def read_results(self, filepath):
276 """read results.dat and fullfill information""" 277 278 if isinstance(filepath, str): 279 finput = open(filepath) 280 elif isinstance(filepath, file): 281 finput = filepath 282 else: 283 raise Exception, "filepath should be a path or a file descriptor" 284 285 i=0 286 found_xsec_line = False 287 for line in finput: 288 # Exit as soon as we hit the xml part. Not elegant, but the part 289 # below should eventually be xml anyway. 290 if '<' in line: 291 break 292 i+=1 293 if i == 1: 294 def secure_float(d): 295 try: 296 return float(d) 297 except ValueError: 298 m=re.search(r'''([+-]?[\d.]*)([+-]\d*)''', d) 299 if m: 300 return float(m.group(1))*10**(float(m.group(2))) 301 return
302 303 data = [secure_float(d) for d in line.split()] 304 self.axsec, self.xerru, self.xerrc, self.nevents, self.nw,\ 305 self.maxit, self.nunwgt, self.luminosity, self.wgt, \ 306 self.xsec = data[:10] 307 if len(data) > 10: 308 self.maxwgt = data[10] 309 if len(data) >12: 310 self.th_maxwgt, self.th_nunwgt = data[11:13] 311 if self.mfactor > 1: 312 self.luminosity /= self.mfactor 313 continue 314 try: 315 l, sec, err, eff, maxwgt, asec = line.split() 316 found_xsec_line = True 317 except: 318 break 319 self.ysec_iter.append(secure_float(sec)) 320 self.yerr_iter.append(secure_float(err)) 321 self.yasec_iter.append(secure_float(asec)) 322 self.eff_iter.append(secure_float(eff)) 323 self.maxwgt_iter.append(secure_float(maxwgt)) 324 325 finput.seek(0) 326 xml = [] 327 for line in finput: 328 if re.match('^.*<.*>',line): 329 xml.append(line) 330 break 331 for line in finput: 332 xml.append(line) 333 334 if xml: 335 self.parse_xml_results('\n'.join(xml)) 336 337 # this is for amcatnlo: the number of events has to be read from another file 338 if self.nevents == 0 and self.nunwgt == 0 and isinstance(filepath, str) and \ 339 os.path.exists(pjoin(os.path.split(filepath)[0], 'nevts')): 340 nevts = int((open(pjoin(os.path.split(filepath)[0], 'nevts')).read()).split()[0]) 341 self.nevents = nevts 342 self.nunwgt = nevts
343
344 - def parse_xml_results(self, xml):
345 """ Parse the xml part of the results.dat file.""" 346 347 dom = minidom.parseString(xml) 348 349 statistics_node = dom.getElementsByTagName("run_statistics") 350 351 if statistics_node: 352 try: 353 self.run_statistics.load_statistics(statistics_node[0]) 354 except ValueError, IndexError: 355 logger.warning('Fail to read run statistics from results.dat')
356
357 - def set_mfactor(self, value):
358 self.mfactor = int(value)
359
360 - def change_iterations_number(self, nb_iter):
361 """Change the number of iterations for this process""" 362 363 if len(self.ysec_iter) <= nb_iter: 364 return 365 366 # Combine the first iterations into a single bin 367 nb_to_rm = len(self.ysec_iter) - nb_iter 368 ysec = [0] 369 yerr = [0] 370 for i in range(nb_to_rm): 371 ysec[0] += self.ysec_iter[i] 372 yerr[0] += self.yerr_iter[i]**2 373 ysec[0] /= (nb_to_rm+1) 374 yerr[0] = math.sqrt(yerr[0]) / (nb_to_rm + 1) 375 376 for i in range(1, nb_iter): 377 ysec[i] = self.ysec_iter[nb_to_rm + i] 378 yerr[i] = self.yerr_iter[nb_to_rm + i] 379 380 self.ysec_iter = ysec 381 self.yerr_iter = yerr
382
383 - def get(self, name):
384 385 if name in ['xsec', 'xerru','xerrc']: 386 return getattr(self, name) * self.mfactor 387 elif name in ['luminosity']: 388 #misc.sprint("use unsafe luminosity definition") 389 #raise Exception 390 return getattr(self, name) #/ self.mfactor 391 elif (name == 'eff'): 392 return self.xerr*math.sqrt(self.nevents/(self.xsec+1e-99)) 393 elif name == 'xerr': 394 return math.sqrt(self.xerru**2+self.xerrc**2) 395 elif name == 'name': 396 return pjoin(self.parent_name, self.name) 397 else: 398 return getattr(self, name)
399
400 -class Combine_results(list, OneResult):
401
402 - def __init__(self, name):
403 404 list.__init__(self) 405 OneResult.__init__(self, name)
406
407 - def add_results(self, name, filepath, mfactor=1):
408 """read the data in the file""" 409 try: 410 oneresult = OneResult(name) 411 oneresult.set_mfactor(mfactor) 412 oneresult.read_results(filepath) 413 oneresult.parent_name = self.name 414 self.append(oneresult) 415 return oneresult 416 except Exception: 417 logger.critical("Error when reading %s" % filepath) 418 raise
419 420
421 - def compute_values(self, update_statistics=False):
422 """compute the value associate to this combination""" 423 424 self.compute_iterations() 425 self.axsec = sum([one.axsec for one in self]) 426 self.xsec = sum([one.xsec for one in self]) 427 self.xerrc = sum([one.xerrc for one in self]) 428 self.xerru = math.sqrt(sum([one.xerru**2 for one in self])) 429 430 self.nevents = sum([one.nevents for one in self]) 431 self.nw = sum([one.nw for one in self]) 432 self.maxit = len(self.yerr_iter) # 433 self.nunwgt = sum([one.nunwgt for one in self]) 434 self.wgt = 0 435 self.luminosity = min([0]+[one.luminosity for one in self]) 436 if update_statistics: 437 self.run_statistics.aggregate_statistics([_.run_statistics for _ in self])
438
439 - def compute_average(self, error=None):
440 """compute the value associate to this combination""" 441 442 nbjobs = len(self) 443 if not nbjobs: 444 return 445 max_xsec = max(one.xsec for one in self) 446 min_xsec = min(one.xsec for one in self) 447 self.axsec = sum([one.axsec for one in self]) / nbjobs 448 self.xsec = sum([one.xsec for one in self]) /nbjobs 449 self.xerrc = sum([one.xerrc for one in self]) /nbjobs 450 self.xerru = math.sqrt(sum([one.xerru**2 for one in self])) /nbjobs 451 if error: 452 self.xerrc = error 453 self.xerru = error 454 455 self.nevents = sum([one.nevents for one in self]) 456 self.nw = 0#sum([one.nw for one in self]) 457 self.maxit = 0#len(self.yerr_iter) # 458 self.nunwgt = sum([one.nunwgt for one in self]) 459 self.wgt = 0 460 self.luminosity = sum([one.luminosity for one in self]) 461 self.ysec_iter = [] 462 self.yerr_iter = [] 463 self.th_maxwgt = 0.0 464 self.th_nunwgt = 0 465 for result in self: 466 self.ysec_iter+=result.ysec_iter 467 self.yerr_iter+=result.yerr_iter 468 self.yasec_iter += result.yasec_iter 469 self.eff_iter += result.eff_iter 470 self.maxwgt_iter += result.maxwgt_iter 471 472 #check full consistency 473 onefail = False 474 for one in list(self): 475 if one.xsec < (self.xsec - 25* one.xerru): 476 if not onefail: 477 logger.debug('multi run are inconsistent: %s < %s - 25* %s: assign error %s', one.xsec, self.xsec, one.xerru, error if error else max_xsec-min_xsec) 478 onefail = True 479 self.remove(one) 480 if onefail: 481 if error: 482 return self.compute_average(error) 483 else: 484 return self.compute_average((max_xsec-min_xsec)/2.)
485 486 487
488 - def compute_iterations(self):
489 """Compute iterations to have a chi-square on the stability of the 490 integral""" 491 492 nb_iter = min([len(a.ysec_iter) for a in self], 0) 493 # syncronize all iterations to a single one 494 for oneresult in self: 495 oneresult.change_iterations_number(nb_iter) 496 497 # compute value error for each iteration 498 for i in range(nb_iter): 499 value = [one.ysec_iter[i] for one in self] 500 error = [one.yerr_iter[i]**2 for one in self] 501 502 # store the value for the iteration 503 self.ysec_iter.append(sum(value)) 504 self.yerr_iter.append(math.sqrt(sum(error)))
505 506 507 template_file = \ 508 """ 509 %(diagram_link)s 510 <BR> 511 &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;<b>s= %(cross).5g &#177 %(error).3g (%(unit)s)</b><br><br> 512 <table class="sortable" id='tablesort'> 513 <tr><th>Graph</th> 514 <th> %(result_type)s</th> 515 <th>Error</th> 516 <th>Events (K)</th> 517 <th>Unwgt</th> 518 <th>Luminosity</th> 519 </tr> 520 %(table_lines)s 521 </table> 522 </center> 523 <br><br><br> 524 """ 525 table_line_template = \ 526 """ 527 <tr><td align=right>%(P_title)s</td> 528 <td align=right><a id="%(P_link)s" href=%(P_link)s onClick="check_link('%(P_link)s','%(mod_P_link)s','%(P_link)s')"> %(cross)s </a> </td> 529 <td align=right> %(error)s</td> 530 <td align=right> %(events)s</td> 531 <td align=right> %(unweighted)s</td> 532 <td align=right> %(luminosity)s</td> 533 </tr> 534 """ 535
536 - def get_html(self,run, unit, me_dir = []):
537 """write html output""" 538 539 # store value for global cross-section 540 P_grouping = {} 541 542 tables_line = '' 543 for oneresult in self: 544 if oneresult.name.startswith('P'): 545 title = '<a href=../../SubProcesses/%(P)s/diagrams.html>%(P)s</a>' \ 546 % {'P':oneresult.name} 547 P = oneresult.name.split('_',1)[0] 548 if P in P_grouping: 549 P_grouping[P] += float(oneresult.xsec) 550 else: 551 P_grouping[P] = float(oneresult.xsec) 552 else: 553 title = oneresult.name 554 555 if not isinstance(oneresult, Combine_results): 556 # this is for the (aMC@)NLO logs 557 if os.path.exists(pjoin(me_dir, 'Events', run, 'alllogs_1.html')): 558 link = '../../Events/%(R)s/alllogs_1.html#/%(P)s/%(G)s' % \ 559 {'P': os.path.basename(self.name), 560 'G': oneresult.name, 561 'R': run} 562 mod_link = link 563 elif os.path.exists(pjoin(me_dir, 'Events', run, 'alllogs_0.html')): 564 link = '../../Events/%(R)s/alllogs_0.html#/%(P)s/%(G)s' % \ 565 {'P': os.path.basename(self.name), 566 'G': oneresult.name, 567 'R': run} 568 mod_link = link 569 else: 570 # this is for madevent runs 571 link = '../../SubProcesses/%(P)s/%(G)s/%(R)s_log.txt' % \ 572 {'P': os.path.basename(self.name), 573 'G': oneresult.name, 574 'R': run} 575 mod_link = '../../SubProcesses/%(P)s/%(G)s/log.txt' % \ 576 {'P': os.path.basename(self.name), 577 'G': oneresult.name} 578 if not os.path.exists(link) and not os.path.exists(mod_link): 579 P = os.path.basename(self.name) 580 base = pjoin(me_dir, 'SubProcesses', P, os.path.dirname(link)) 581 pos = [pjoin(base,c) for c in os.listdir(base) if c.endswith('.log')] 582 if len(pos) == 1: 583 link = pos[0] 584 else: 585 link = '#%s' % oneresult.name 586 mod_link = link 587 588 dico = {'P_title': title, 589 'P_link': link, 590 'mod_P_link': mod_link, 591 'cross': '%.4g' % oneresult.xsec, 592 'error': '%.3g' % oneresult.xerru, 593 'events': oneresult.nevents/1000.0, 594 'unweighted': oneresult.nunwgt, 595 'luminosity': '%.3g' % oneresult.luminosity 596 } 597 598 tables_line += self.table_line_template % dico 599 600 for P_name, cross in P_grouping.items(): 601 dico = {'P_title': '%s sum' % P_name, 602 'P_link': './results.html', 603 'mod_P_link':'', 604 'cross': cross, 605 'error': '', 606 'events': '', 607 'unweighted': '', 608 'luminosity': '' 609 } 610 tables_line += self.table_line_template % dico 611 612 if self.name.startswith('P'): 613 title = '<dt><a name=%(P)s href=../../SubProcesses/%(P)s/diagrams.html>%(P)s</a></dt><dd>' \ 614 % {'P':self.name} 615 else: 616 title = '' 617 618 dico = {'cross': self.xsec, 619 'abscross': self.axsec, 620 'error': self.xerru, 621 'unit': unit, 622 'result_type': 'Cross-Section', 623 'table_lines': tables_line, 624 'diagram_link': title 625 } 626 627 html_text = self.template_file % dico 628 return html_text
629
630 - def write_results_dat(self, output_path):
631 """write a correctly formatted results.dat""" 632 633 def fstr(nb): 634 data = '%E' % nb 635 if data == 'NAN': 636 nb, power = 0,0 637 else: 638 nb, power = data.split('E') 639 nb = float(nb) /10 640 power = int(power) + 1 641 return '%.5fE%+03i' %(nb,power)
642 643 line = '%s %s %s %i %i %i %i %s %s %s %s %s %i\n' % (fstr(self.axsec), fstr(self.xerru), 644 fstr(self.xerrc), self.nevents, self.nw, self.maxit, self.nunwgt, 645 fstr(self.luminosity), fstr(self.wgt), fstr(self.xsec), fstr(self.maxwgt), 646 fstr(self.th_maxwgt), self.th_nunwgt) 647 fsock = open(output_path,'w') 648 fsock.writelines(line) 649 for i in range(len(self.ysec_iter)): 650 line = '%s %s %s %s %s %s\n' % (i+1, self.ysec_iter[i], self.yerr_iter[i], 651 self.eff_iter[i], self.maxwgt_iter[i], self.yasec_iter[i]) 652 fsock.writelines(line)
653 654 655 656 results_header = """ 657 <head> 658 <title>Process results</title> 659 <script type="text/javascript" src="../sortable.js"></script> 660 <link rel=stylesheet href="../mgstyle.css" type="text/css"> 661 </head> 662 <body> 663 <script type="text/javascript"> 664 function UrlExists(url) { 665 var http = new XMLHttpRequest(); 666 http.open('HEAD', url, false); 667 try{ 668 http.send() 669 } 670 catch(err){ 671 return 1==2; 672 } 673 return http.status!=404; 674 } 675 function check_link(url,alt, id){ 676 var obj = document.getElementById(id); 677 if ( ! UrlExists(url)){ 678 if ( ! UrlExists(alt)){ 679 obj.href = alt; 680 return true; 681 } 682 obj.href = alt; 683 return false; 684 } 685 obj.href = url; 686 return 1==1; 687 } 688 </script> 689 """ 690
691 -def collect_result(cmd, folder_names=[], jobs=None, main_dir=None):
692 """ """ 693 694 run = cmd.results.current['run_name'] 695 all = Combine_results(run) 696 697 698 for Pdir in cmd.get_Pdir(): 699 P_comb = Combine_results(Pdir) 700 701 if jobs: 702 for job in filter(lambda j: j['p_dir'] in Pdir, jobs): 703 P_comb.add_results(os.path.basename(job['dirname']),\ 704 pjoin(job['dirname'],'results.dat')) 705 elif folder_names: 706 try: 707 for line in open(pjoin(Pdir, 'symfact.dat')): 708 name, mfactor = line.split() 709 if float(mfactor) < 0: 710 continue 711 if os.path.exists(pjoin(Pdir, 'ajob.no_ps.log')): 712 continue 713 714 for folder in folder_names: 715 if 'G' in folder: 716 dir = folder.replace('*', name) 717 else: 718 dir = folder.replace('*', '_G' + name) 719 P_comb.add_results(dir, pjoin(Pdir,dir,'results.dat'), mfactor) 720 if jobs: 721 for job in filter(lambda j: j['p_dir'] == Pdir, jobs): 722 P_comb.add_results(os.path.basename(job['dirname']),\ 723 pjoin(job['dirname'],'results.dat')) 724 except IOError: 725 continue 726 else: 727 G_dir, mfactors = cmd.get_Gdir(Pdir, symfact=True) 728 for G in G_dir: 729 if not folder_names: 730 if main_dir: 731 path = pjoin(main_dir, os.path.basename(Pdir), os.path.basename(G),'results.dat') 732 else: 733 path = pjoin(G,'results.dat') 734 P_comb.add_results(os.path.basename(G), path, mfactors[G]) 735 736 P_comb.compute_values() 737 all.append(P_comb) 738 all.compute_values() 739 740 741 742 return all
743 744
745 -def make_all_html_results(cmd, folder_names = [], jobs=[]):
746 """ folder_names and jobs have been added for the amcatnlo runs """ 747 run = cmd.results.current['run_name'] 748 if not os.path.exists(pjoin(cmd.me_dir, 'HTML', run)): 749 os.mkdir(pjoin(cmd.me_dir, 'HTML', run)) 750 751 unit = cmd.results.unit 752 P_text = "" 753 Presults = collect_result(cmd, folder_names=folder_names, jobs=jobs) 754 755 for P_comb in Presults: 756 P_text += P_comb.get_html(run, unit, cmd.me_dir) 757 P_comb.compute_values() 758 if cmd.proc_characteristics['ninitial'] == 1: 759 P_comb.write_results_dat(pjoin(cmd.me_dir, 'SubProcesses', P_comb.name, 760 '%s_results.dat' % run)) 761 762 Presults.write_results_dat(pjoin(cmd.me_dir,'SubProcesses', 'results.dat')) 763 764 fsock = open(pjoin(cmd.me_dir, 'HTML', run, 'results.html'),'w') 765 fsock.write(results_header) 766 fsock.write('%s <dl>' % Presults.get_html(run, unit, cmd.me_dir)) 767 fsock.write('%s </dl></body>' % P_text) 768 769 return Presults.xsec, Presults.xerru
770