Package madgraph :: Package interface :: Module amcatnlo_run_interface
[hide private]
[frames] | no frames]

Source Code for Module madgraph.interface.amcatnlo_run_interface

   1   ################################################################################ 
   2  # 
   3  # Copyright (c) 2011 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """A user friendly command line interface to access MadGraph5_aMC@NLO features. 
  16     Uses the cmd package for command interpretation and tab completion. 
  17  """ 
  18  from __future__ import division 
  19   
  20  import atexit 
  21  import glob 
  22  import logging 
  23  import math 
  24  import optparse 
  25  import os 
  26  import pydoc 
  27  import random 
  28  import re 
  29  import shutil 
  30  import subprocess 
  31  import sys 
  32  import traceback 
  33  import time 
  34  import signal 
  35  import tarfile 
  36  import copy 
  37  import datetime 
  38  import tarfile 
  39  import traceback 
  40  import StringIO 
  41  try: 
  42      import cpickle as pickle 
  43  except: 
  44      import pickle 
  45   
  46  try: 
  47      import readline 
  48      GNU_SPLITTING = ('GNU' in readline.__doc__) 
  49  except: 
  50      GNU_SPLITTING = True 
  51   
  52  root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0] 
  53  root_path = os.path.split(root_path)[0] 
  54  sys.path.insert(0, os.path.join(root_path,'bin')) 
  55   
  56  # usefull shortcut 
  57  pjoin = os.path.join 
  58  # Special logger for the Cmd Interface 
  59  logger = logging.getLogger('madgraph.stdout') # -> stdout 
  60  logger_stderr = logging.getLogger('madgraph.stderr') # ->stderr 
  61    
  62  try: 
  63      import madgraph 
  64  except ImportError:  
  65      aMCatNLO = True  
  66      import internal.extended_cmd as cmd 
  67      import internal.common_run_interface as common_run 
  68      import internal.banner as banner_mod 
  69      import internal.misc as misc     
  70      from internal import InvalidCmd, MadGraph5Error 
  71      import internal.files as files 
  72      import internal.cluster as cluster 
  73      import internal.save_load_object as save_load_object 
  74      import internal.gen_crossxhtml as gen_crossxhtml 
  75      import internal.sum_html as sum_html 
  76      import internal.shower_card as shower_card 
  77      import internal.FO_analyse_card as analyse_card  
  78      import internal.lhe_parser as lhe_parser 
  79  else: 
  80      # import from madgraph directory 
  81      aMCatNLO = False 
  82      import madgraph.interface.extended_cmd as cmd 
  83      import madgraph.interface.common_run_interface as common_run 
  84      import madgraph.iolibs.files as files 
  85      import madgraph.iolibs.save_load_object as save_load_object 
  86      import madgraph.madevent.gen_crossxhtml as gen_crossxhtml 
  87      import madgraph.madevent.sum_html as sum_html 
  88      import madgraph.various.banner as banner_mod 
  89      import madgraph.various.cluster as cluster 
  90      import madgraph.various.misc as misc 
  91      import madgraph.various.shower_card as shower_card 
  92      import madgraph.various.FO_analyse_card as analyse_card 
  93      import madgraph.various.lhe_parser as lhe_parser 
  94      from madgraph import InvalidCmd, aMCatNLOError, MadGraph5Error,MG5DIR 
95 96 -class aMCatNLOError(Exception):
97 pass
98
99 100 -def compile_dir(*arguments):
101 """compile the direcory p_dir 102 arguments is the tuple (me_dir, p_dir, mode, options, tests, exe, run_mode) 103 this function needs not to be a class method in order to do 104 the compilation on multicore""" 105 106 if len(arguments) == 1: 107 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments[0] 108 elif len(arguments)==7: 109 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments 110 else: 111 raise aMCatNLOError, 'not correct number of argument' 112 logger.info(' Compiling %s...' % p_dir) 113 114 this_dir = pjoin(me_dir, 'SubProcesses', p_dir) 115 116 try: 117 #compile everything 118 # compile and run tests 119 for test in tests: 120 # skip check_poles for LOonly dirs 121 if test == 'check_poles' and os.path.exists(pjoin(this_dir, 'parton_lum_0.f')): 122 continue 123 if test == 'test_ME' or test == 'test_MC': 124 test_exe='test_soft_col_limits' 125 else: 126 test_exe=test 127 misc.compile([test_exe], cwd = this_dir, job_specs = False) 128 input = pjoin(me_dir, '%s_input.txt' % test) 129 #this can be improved/better written to handle the output 130 misc.call(['./%s' % (test_exe)], cwd=this_dir, 131 stdin = open(input), stdout=open(pjoin(this_dir, '%s.log' % test), 'w'), 132 close_fds=True) 133 if test == 'check_poles' and os.path.exists(pjoin(this_dir,'MadLoop5_resources')) : 134 tf=tarfile.open(pjoin(this_dir,'MadLoop5_resources.tar.gz'),'w:gz', 135 dereference=True) 136 tf.add(pjoin(this_dir,'MadLoop5_resources'),arcname='MadLoop5_resources') 137 tf.close() 138 139 if not options['reweightonly']: 140 misc.compile(['gensym'], cwd=this_dir, job_specs = False) 141 misc.call(['./gensym'],cwd= this_dir, 142 stdout=open(pjoin(this_dir, 'gensym.log'), 'w'), 143 close_fds=True) 144 #compile madevent_mintMC/mintFO 145 misc.compile([exe], cwd=this_dir, job_specs = False) 146 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 147 misc.compile(['reweight_xsec_events'], cwd=this_dir, job_specs = False) 148 149 logger.info(' %s done.' % p_dir) 150 return 0 151 except MadGraph5Error, msg: 152 return msg
153
154 155 -def check_compiler(options, block=False):
156 """check that the current fortran compiler is gfortran 4.6 or later. 157 If block, stops the execution, otherwise just print a warning""" 158 159 msg = 'In order to be able to run at NLO MadGraph5_aMC@NLO, you need to have ' + \ 160 'gfortran 4.6 or later installed.\n%s has been detected\n'+\ 161 'Note that You can still run all MadEvent run without any problem!' 162 #first check that gfortran is installed 163 if options['fortran_compiler']: 164 compiler = options['fortran_compiler'] 165 elif misc.which('gfortran'): 166 compiler = 'gfortran' 167 else: 168 compiler = '' 169 170 if 'gfortran' not in compiler: 171 if block: 172 raise aMCatNLOError(msg % compiler) 173 else: 174 logger.warning(msg % compiler) 175 else: 176 curr_version = misc.get_gfortran_version(compiler) 177 if not ''.join(curr_version.split('.')) >= '46': 178 if block: 179 raise aMCatNLOError(msg % (compiler + ' ' + curr_version)) 180 else: 181 logger.warning(msg % (compiler + ' ' + curr_version))
182
183 184 185 #=============================================================================== 186 # CmdExtended 187 #=============================================================================== 188 -class CmdExtended(common_run.CommonRunCmd):
189 """Particularisation of the cmd command for aMCatNLO""" 190 191 #suggested list of command 192 next_possibility = { 193 'start': [], 194 } 195 196 debug_output = 'ME5_debug' 197 error_debug = 'Please report this bug on https://bugs.launchpad.net/mg5amcnlo\n' 198 error_debug += 'More information is found in \'%(debug)s\'.\n' 199 error_debug += 'Please attach this file to your report.' 200 201 config_debug = 'If you need help with this issue please contact us on https://answers.launchpad.net/mg5amcnlo\n' 202 203 204 keyboard_stop_msg = """stopping all operation 205 in order to quit MadGraph5_aMC@NLO please enter exit""" 206 207 # Define the Error 208 InvalidCmd = InvalidCmd 209 ConfigurationError = aMCatNLOError 210
211 - def __init__(self, me_dir, options, *arg, **opt):
212 """Init history and line continuation""" 213 214 # Tag allowing/forbiding question 215 self.force = False 216 217 # If possible, build an info line with current version number 218 # and date, from the VERSION text file 219 info = misc.get_pkg_info() 220 info_line = "" 221 if info and info.has_key('version') and info.has_key('date'): 222 len_version = len(info['version']) 223 len_date = len(info['date']) 224 if len_version + len_date < 30: 225 info_line = "#* VERSION %s %s %s *\n" % \ 226 (info['version'], 227 (30 - len_version - len_date) * ' ', 228 info['date']) 229 else: 230 version = open(pjoin(root_path,'MGMEVersion.txt')).readline().strip() 231 info_line = "#* VERSION %s %s *\n" % \ 232 (version, (24 - len(version)) * ' ') 233 234 # Create a header for the history file. 235 # Remember to fill in time at writeout time! 236 self.history_header = \ 237 '#************************************************************\n' + \ 238 '#* MadGraph5_aMC@NLO *\n' + \ 239 '#* *\n' + \ 240 "#* * * *\n" + \ 241 "#* * * * * *\n" + \ 242 "#* * * * * 5 * * * * *\n" + \ 243 "#* * * * * *\n" + \ 244 "#* * * *\n" + \ 245 "#* *\n" + \ 246 "#* *\n" + \ 247 info_line + \ 248 "#* *\n" + \ 249 "#* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 250 "#* https://server06.fynu.ucl.ac.be/projects/madgraph *\n" + \ 251 "#* and *\n" + \ 252 "#* http://amcatnlo.cern.ch *\n" + \ 253 '#* *\n' + \ 254 '#************************************************************\n' + \ 255 '#* *\n' + \ 256 '#* Command File for aMCatNLO *\n' + \ 257 '#* *\n' + \ 258 '#* run as ./bin/aMCatNLO.py filename *\n' + \ 259 '#* *\n' + \ 260 '#************************************************************\n' 261 262 if info_line: 263 info_line = info_line[1:] 264 265 logger.info(\ 266 "************************************************************\n" + \ 267 "* *\n" + \ 268 "* W E L C O M E to M A D G R A P H 5 *\n" + \ 269 "* a M C @ N L O *\n" + \ 270 "* *\n" + \ 271 "* * * *\n" + \ 272 "* * * * * *\n" + \ 273 "* * * * * 5 * * * * *\n" + \ 274 "* * * * * *\n" + \ 275 "* * * *\n" + \ 276 "* *\n" + \ 277 info_line + \ 278 "* *\n" + \ 279 "* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 280 "* http://amcatnlo.cern.ch *\n" + \ 281 "* *\n" + \ 282 "* Type 'help' for in-line help. *\n" + \ 283 "* *\n" + \ 284 "************************************************************") 285 super(CmdExtended, self).__init__(me_dir, options, *arg, **opt)
286 287
288 - def get_history_header(self):
289 """return the history header""" 290 return self.history_header % misc.get_time_info()
291
292 - def stop_on_keyboard_stop(self):
293 """action to perform to close nicely on a keyboard interupt""" 294 try: 295 if hasattr(self, 'cluster'): 296 logger.info('rm jobs on queue') 297 self.cluster.remove() 298 if hasattr(self, 'results'): 299 self.update_status('Stop by the user', level=None, makehtml=True, error=True) 300 self.add_error_log_in_html(KeyboardInterrupt) 301 except: 302 pass
303
304 - def postcmd(self, stop, line):
305 """ Update the status of the run for finishing interactive command """ 306 307 # relaxing the tag forbidding question 308 self.force = False 309 310 if not self.use_rawinput: 311 return stop 312 313 314 arg = line.split() 315 if len(arg) == 0: 316 return stop 317 elif str(arg[0]) in ['exit','quit','EOF']: 318 return stop 319 320 try: 321 self.update_status('Command \'%s\' done.<br> Waiting for instruction.' % arg[0], 322 level=None, error=True) 323 except Exception: 324 misc.sprint('self.update_status fails', log=logger) 325 pass
326
327 - def nice_user_error(self, error, line):
328 """If a ME run is currently running add a link in the html output""" 329 330 self.add_error_log_in_html() 331 cmd.Cmd.nice_user_error(self, error, line)
332
333 - def nice_config_error(self, error, line):
334 """If a ME run is currently running add a link in the html output""" 335 336 self.add_error_log_in_html() 337 cmd.Cmd.nice_config_error(self, error, line)
338
339 - def nice_error_handling(self, error, line):
340 """If a ME run is currently running add a link in the html output""" 341 342 self.add_error_log_in_html() 343 cmd.Cmd.nice_error_handling(self, error, line)
344
345 346 347 #=============================================================================== 348 # HelpToCmd 349 #=============================================================================== 350 -class HelpToCmd(object):
351 """ The Series of help routine for the aMCatNLOCmd""" 352
353 - def help_launch(self):
354 """help for launch command""" 355 _launch_parser.print_help()
356
357 - def help_banner_run(self):
358 logger.info("syntax: banner_run Path|RUN [--run_options]") 359 logger.info("-- Reproduce a run following a given banner") 360 logger.info(" One of the following argument is require:") 361 logger.info(" Path should be the path of a valid banner.") 362 logger.info(" RUN should be the name of a run of the current directory") 363 self.run_options_help([('-f','answer all question by default'), 364 ('--name=X', 'Define the name associated with the new run')])
365 366
367 - def help_compile(self):
368 """help for compile command""" 369 _compile_parser.print_help()
370
371 - def help_generate_events(self):
372 """help for generate_events commandi 373 just call help_launch""" 374 _generate_events_parser.print_help()
375 376
377 - def help_calculate_xsect(self):
378 """help for generate_events command""" 379 _calculate_xsect_parser.print_help()
380
381 - def help_shower(self):
382 """help for shower command""" 383 _shower_parser.print_help()
384 385
386 - def help_open(self):
387 logger.info("syntax: open FILE ") 388 logger.info("-- open a file with the appropriate editor.") 389 logger.info(' If FILE belongs to index.html, param_card.dat, run_card.dat') 390 logger.info(' the path to the last created/used directory is used')
391
392 - def run_options_help(self, data):
393 if data: 394 logger.info('-- local options:') 395 for name, info in data: 396 logger.info(' %s : %s' % (name, info)) 397 398 logger.info("-- session options:") 399 logger.info(" Note that those options will be kept for the current session") 400 logger.info(" --cluster : Submit to the cluster. Current cluster: %s" % self.options['cluster_type']) 401 logger.info(" --multicore : Run in multi-core configuration") 402 logger.info(" --nb_core=X : limit the number of core to use to X.")
403
404 405 406 407 #=============================================================================== 408 # CheckValidForCmd 409 #=============================================================================== 410 -class CheckValidForCmd(object):
411 """ The Series of check routine for the aMCatNLOCmd""" 412
413 - def check_shower(self, args, options):
414 """Check the validity of the line. args[0] is the run_directory""" 415 416 if options['force']: 417 self.force = True 418 419 if len(args) == 0: 420 self.help_shower() 421 raise self.InvalidCmd, 'Invalid syntax, please specify the run name' 422 if not os.path.isdir(pjoin(self.me_dir, 'Events', args[0])): 423 raise self.InvalidCmd, 'Directory %s does not exists' % \ 424 pjoin(os.getcwd(), 'Events', args[0]) 425 426 self.set_run_name(args[0], level= 'shower') 427 args[0] = pjoin(self.me_dir, 'Events', args[0])
428
429 - def check_plot(self, args):
430 """Check the argument for the plot command 431 plot run_name modes""" 432 433 434 madir = self.options['madanalysis_path'] 435 td = self.options['td_path'] 436 437 if not madir or not td: 438 logger.info('Retry to read configuration file to find madanalysis/td') 439 self.set_configuration() 440 441 madir = self.options['madanalysis_path'] 442 td = self.options['td_path'] 443 444 if not madir: 445 error_msg = 'No Madanalysis path correctly set.' 446 error_msg += 'Please use the set command to define the path and retry.' 447 error_msg += 'You can also define it in the configuration file.' 448 raise self.InvalidCmd(error_msg) 449 if not td: 450 error_msg = 'No path to td directory correctly set.' 451 error_msg += 'Please use the set command to define the path and retry.' 452 error_msg += 'You can also define it in the configuration file.' 453 raise self.InvalidCmd(error_msg) 454 455 if len(args) == 0: 456 if not hasattr(self, 'run_name') or not self.run_name: 457 self.help_plot() 458 raise self.InvalidCmd('No run name currently define. Please add this information.') 459 args.append('all') 460 return 461 462 463 if args[0] not in self._plot_mode: 464 self.set_run_name(args[0], level='plot') 465 del args[0] 466 if len(args) == 0: 467 args.append('all') 468 elif not self.run_name: 469 self.help_plot() 470 raise self.InvalidCmd('No run name currently define. Please add this information.') 471 472 for arg in args: 473 if arg not in self._plot_mode and arg != self.run_name: 474 self.help_plot() 475 raise self.InvalidCmd('unknown options %s' % arg)
476
477 - def check_pgs(self, arg):
478 """Check the argument for pythia command 479 syntax: pgs [NAME] 480 Note that other option are already remove at this point 481 """ 482 483 # If not pythia-pgs path 484 if not self.options['pythia-pgs_path']: 485 logger.info('Retry to read configuration file to find pythia-pgs path') 486 self.set_configuration() 487 488 if not self.options['pythia-pgs_path'] or not \ 489 os.path.exists(pjoin(self.options['pythia-pgs_path'],'src')): 490 error_msg = 'No pythia-pgs path correctly set.' 491 error_msg += 'Please use the set command to define the path and retry.' 492 error_msg += 'You can also define it in the configuration file.' 493 raise self.InvalidCmd(error_msg) 494 495 tag = [a for a in arg if a.startswith('--tag=')] 496 if tag: 497 arg.remove(tag[0]) 498 tag = tag[0][6:] 499 500 501 if len(arg) == 0 and not self.run_name: 502 if self.results.lastrun: 503 arg.insert(0, self.results.lastrun) 504 else: 505 raise self.InvalidCmd('No run name currently define. Please add this information.') 506 507 if len(arg) == 1 and self.run_name == arg[0]: 508 arg.pop(0) 509 510 if not len(arg) and \ 511 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 512 self.help_pgs() 513 raise self.InvalidCmd('''No file file pythia_events.hep currently available 514 Please specify a valid run_name''') 515 516 lock = None 517 if len(arg) == 1: 518 prev_tag = self.set_run_name(arg[0], tag, 'pgs') 519 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name)) 520 521 if not filenames: 522 raise self.InvalidCmd('No events file corresponding to %s run with tag %s. '% (self.run_name, prev_tag)) 523 else: 524 input_file = filenames[0] 525 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 526 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 527 argument=['-c', input_file], 528 close_fds=True) 529 else: 530 if tag: 531 self.run_card['run_tag'] = tag 532 self.set_run_name(self.run_name, tag, 'pgs') 533 534 return lock
535 536
537 - def check_delphes(self, arg):
538 """Check the argument for pythia command 539 syntax: delphes [NAME] 540 Note that other option are already remove at this point 541 """ 542 543 # If not pythia-pgs path 544 if not self.options['delphes_path']: 545 logger.info('Retry to read configuration file to find delphes path') 546 self.set_configuration() 547 548 if not self.options['delphes_path']: 549 error_msg = 'No delphes path correctly set.' 550 error_msg += 'Please use the set command to define the path and retry.' 551 error_msg += 'You can also define it in the configuration file.' 552 raise self.InvalidCmd(error_msg) 553 554 tag = [a for a in arg if a.startswith('--tag=')] 555 if tag: 556 arg.remove(tag[0]) 557 tag = tag[0][6:] 558 559 560 if len(arg) == 0 and not self.run_name: 561 if self.results.lastrun: 562 arg.insert(0, self.results.lastrun) 563 else: 564 raise self.InvalidCmd('No run name currently define. Please add this information.') 565 566 if len(arg) == 1 and self.run_name == arg[0]: 567 arg.pop(0) 568 569 if not len(arg) and \ 570 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 571 self.help_pgs() 572 raise self.InvalidCmd('''No file file pythia_events.hep currently available 573 Please specify a valid run_name''') 574 575 if len(arg) == 1: 576 prev_tag = self.set_run_name(arg[0], tag, 'delphes') 577 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events')) 578 579 580 if not filenames: 581 raise self.InvalidCmd('No events file corresponding to %s run with tag %s.:%s '\ 582 % (self.run_name, prev_tag, 583 pjoin(self.me_dir,'Events',self.run_name, '%s_pythia_events.hep.gz' % prev_tag))) 584 else: 585 input_file = filenames[0] 586 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 587 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 588 argument=['-c', input_file], 589 close_fds=True) 590 else: 591 if tag: 592 self.run_card['run_tag'] = tag 593 self.set_run_name(self.run_name, tag, 'delphes')
594
595 - def check_calculate_xsect(self, args, options):
596 """check the validity of the line. args is ORDER, 597 ORDER being LO or NLO. If no mode is passed, NLO is used""" 598 # modify args in order to be DIR 599 # mode being either standalone or madevent 600 601 if options['force']: 602 self.force = True 603 604 if not args: 605 args.append('NLO') 606 return 607 608 if len(args) > 1: 609 self.help_calculate_xsect() 610 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 611 612 elif len(args) == 1: 613 if not args[0] in ['NLO', 'LO']: 614 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1] 615 mode = args[0] 616 617 # check for incompatible options/modes 618 if options['multicore'] and options['cluster']: 619 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 620 ' are not compatible. Please choose one.'
621 622
623 - def check_generate_events(self, args, options):
624 """check the validity of the line. args is ORDER, 625 ORDER being LO or NLO. If no mode is passed, NLO is used""" 626 # modify args in order to be DIR 627 # mode being either standalone or madevent 628 629 if not args: 630 args.append('NLO') 631 return 632 633 if len(args) > 1: 634 self.help_generate_events() 635 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 636 637 elif len(args) == 1: 638 if not args[0] in ['NLO', 'LO']: 639 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1] 640 mode = args[0] 641 642 # check for incompatible options/modes 643 if options['multicore'] and options['cluster']: 644 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 645 ' are not compatible. Please choose one.'
646
647 - def check_banner_run(self, args):
648 """check the validity of line""" 649 650 if len(args) == 0: 651 self.help_banner_run() 652 raise self.InvalidCmd('banner_run requires at least one argument.') 653 654 tag = [a[6:] for a in args if a.startswith('--tag=')] 655 656 657 if os.path.exists(args[0]): 658 type ='banner' 659 format = self.detect_card_type(args[0]) 660 if format != 'banner': 661 raise self.InvalidCmd('The file is not a valid banner.') 662 elif tag: 663 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 664 (args[0], tag)) 665 if not os.path.exists(args[0]): 666 raise self.InvalidCmd('No banner associates to this name and tag.') 667 else: 668 name = args[0] 669 type = 'run' 670 banners = misc.glob('*_banner.txt', pjoin(self.me_dir,'Events', args[0])) 671 if not banners: 672 raise self.InvalidCmd('No banner associates to this name.') 673 elif len(banners) == 1: 674 args[0] = banners[0] 675 else: 676 #list the tag and propose those to the user 677 tags = [os.path.basename(p)[len(args[0])+1:-11] for p in banners] 678 tag = self.ask('which tag do you want to use?', tags[0], tags) 679 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 680 (args[0], tag)) 681 682 run_name = [arg[7:] for arg in args if arg.startswith('--name=')] 683 if run_name: 684 try: 685 self.exec_cmd('remove %s all banner -f' % run_name) 686 except Exception: 687 pass 688 self.set_run_name(args[0], tag=None, level='parton', reload_card=True) 689 elif type == 'banner': 690 self.set_run_name(self.find_available_run_name(self.me_dir)) 691 elif type == 'run': 692 if not self.results[name].is_empty(): 693 run_name = self.find_available_run_name(self.me_dir) 694 logger.info('Run %s is not empty so will use run_name: %s' % \ 695 (name, run_name)) 696 self.set_run_name(run_name) 697 else: 698 try: 699 self.exec_cmd('remove %s all banner -f' % run_name) 700 except Exception: 701 pass 702 self.set_run_name(name)
703 704 705
706 - def check_launch(self, args, options):
707 """check the validity of the line. args is MODE 708 MODE being LO, NLO, aMC@NLO or aMC@LO. If no mode is passed, auto is used""" 709 # modify args in order to be DIR 710 # mode being either standalone or madevent 711 712 if options['force']: 713 self.force = True 714 715 716 if not args: 717 args.append('auto') 718 return 719 720 if len(args) > 1: 721 self.help_launch() 722 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 723 724 elif len(args) == 1: 725 if not args[0] in ['LO', 'NLO', 'aMC@NLO', 'aMC@LO','auto']: 726 raise self.InvalidCmd, '%s is not a valid mode, please use "LO", "NLO", "aMC@NLO" or "aMC@LO"' % args[0] 727 mode = args[0] 728 729 # check for incompatible options/modes 730 if options['multicore'] and options['cluster']: 731 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 732 ' are not compatible. Please choose one.' 733 if mode == 'NLO' and options['reweightonly']: 734 raise self.InvalidCmd, 'option -r (--reweightonly) needs mode "aMC@NLO" or "aMC@LO"'
735 736
737 - def check_compile(self, args, options):
738 """check the validity of the line. args is MODE 739 MODE being FO or MC. If no mode is passed, MC is used""" 740 # modify args in order to be DIR 741 # mode being either standalone or madevent 742 743 if options['force']: 744 self.force = True 745 746 if not args: 747 args.append('MC') 748 return 749 750 if len(args) > 1: 751 self.help_compile() 752 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 753 754 elif len(args) == 1: 755 if not args[0] in ['MC', 'FO']: 756 raise self.InvalidCmd, '%s is not a valid mode, please use "FO" or "MC"' % args[0] 757 mode = args[0]
758
759 # check for incompatible options/modes 760 761 762 #=============================================================================== 763 # CompleteForCmd 764 #=============================================================================== 765 -class CompleteForCmd(CheckValidForCmd):
766 """ The Series of help routine for the MadGraphCmd""" 767
768 - def complete_launch(self, text, line, begidx, endidx):
769 """auto-completion for launch command""" 770 771 args = self.split_arg(line[0:begidx]) 772 if len(args) == 1: 773 #return mode 774 return self.list_completion(text,['LO','NLO','aMC@NLO','aMC@LO'],line) 775 elif len(args) == 2 and line[begidx-1] == '@': 776 return self.list_completion(text,['LO','NLO'],line) 777 else: 778 opts = [] 779 for opt in _launch_parser.option_list: 780 opts += opt._long_opts + opt._short_opts 781 return self.list_completion(text, opts, line)
782
783 - def complete_banner_run(self, text, line, begidx, endidx, formatting=True):
784 "Complete the banner run command" 785 try: 786 787 788 args = self.split_arg(line[0:begidx], error=False) 789 790 if args[-1].endswith(os.path.sep): 791 return self.path_completion(text, 792 os.path.join('.',*[a for a in args \ 793 if a.endswith(os.path.sep)])) 794 795 796 if len(args) > 1: 797 # only options are possible 798 tags = misc.glob('%s_*_banner.txt' % args[1],pjoin(self.me_dir, 'Events' , args[1])) 799 tags = ['%s' % os.path.basename(t)[len(args[1])+1:-11] for t in tags] 800 801 if args[-1] != '--tag=': 802 tags = ['--tag=%s' % t for t in tags] 803 else: 804 return self.list_completion(text, tags) 805 return self.list_completion(text, tags +['--name=','-f'], line) 806 807 # First argument 808 possibilites = {} 809 810 comp = self.path_completion(text, os.path.join('.',*[a for a in args \ 811 if a.endswith(os.path.sep)])) 812 if os.path.sep in line: 813 return comp 814 else: 815 possibilites['Path from ./'] = comp 816 817 run_list = misc.glob(pjoin('*','*_banner.txt'), pjoin(self.me_dir, 'Events')) 818 run_list = [n.rsplit('/',2)[1] for n in run_list] 819 possibilites['RUN Name'] = self.list_completion(text, run_list) 820 821 return self.deal_multiple_categories(possibilites, formatting) 822 823 824 except Exception, error: 825 print error
826 827
828 - def complete_compile(self, text, line, begidx, endidx):
829 """auto-completion for launch command""" 830 831 args = self.split_arg(line[0:begidx]) 832 if len(args) == 1: 833 #return mode 834 return self.list_completion(text,['FO','MC'],line) 835 else: 836 opts = [] 837 for opt in _compile_parser.option_list: 838 opts += opt._long_opts + opt._short_opts 839 return self.list_completion(text, opts, line)
840
841 - def complete_calculate_xsect(self, text, line, begidx, endidx):
842 """auto-completion for launch command""" 843 844 args = self.split_arg(line[0:begidx]) 845 if len(args) == 1: 846 #return mode 847 return self.list_completion(text,['LO','NLO'],line) 848 else: 849 opts = [] 850 for opt in _calculate_xsect_parser.option_list: 851 opts += opt._long_opts + opt._short_opts 852 return self.list_completion(text, opts, line)
853
854 - def complete_generate_events(self, text, line, begidx, endidx):
855 """auto-completion for generate_events command 856 call the compeltion for launch""" 857 self.complete_launch(text, line, begidx, endidx)
858 859
860 - def complete_shower(self, text, line, begidx, endidx):
861 args = self.split_arg(line[0:begidx]) 862 if len(args) == 1: 863 #return valid run_name 864 data = misc.glob(pjoin('*','events.lhe.gz', pjoin(self.me_dir, 'Events'))) 865 data = [n.rsplit('/',2)[1] for n in data] 866 tmp1 = self.list_completion(text, data) 867 if not self.run_name: 868 return tmp1
869
870 - def complete_plot(self, text, line, begidx, endidx):
871 """ Complete the plot command """ 872 873 args = self.split_arg(line[0:begidx], error=False) 874 875 if len(args) == 1: 876 #return valid run_name 877 data = misc.glob(pjoin('*','events.lhe*', pjoin(self.me_dir, 'Events'))) 878 data = [n.rsplit('/',2)[1] for n in data] 879 tmp1 = self.list_completion(text, data) 880 if not self.run_name: 881 return tmp1 882 883 if len(args) > 1: 884 return self.list_completion(text, self._plot_mode)
885
886 - def complete_pgs(self,text, line, begidx, endidx):
887 "Complete the pgs command" 888 args = self.split_arg(line[0:begidx], error=False) 889 if len(args) == 1: 890 #return valid run_name 891 data = misc.glob(pjoin('*', 'events_*.hep.gz'), 892 pjoin(self.me_dir, 'Events')) 893 data = [n.rsplit('/',2)[1] for n in data] 894 tmp1 = self.list_completion(text, data) 895 if not self.run_name: 896 return tmp1 897 else: 898 tmp2 = self.list_completion(text, self._run_options + ['-f', 899 '--tag=' ,'--no_default'], line) 900 return tmp1 + tmp2 901 else: 902 return self.list_completion(text, self._run_options + ['-f', 903 '--tag=','--no_default'], line)
904 905 complete_delphes = complete_pgs
906
907 -class aMCatNLOAlreadyRunning(InvalidCmd):
908 pass
909
910 -class AskRunNLO(cmd.ControlSwitch):
911 912 to_control = [('order', 'Type of perturbative computation'), 913 ('fixed_order', 'No MC@[N]LO matching / event generation'), 914 ('shower', 'Shower the generated events'), 915 ('madspin', 'Decay onshell particles'), 916 ('reweight', 'Add weights to events for new hypp.'), 917 ('madanalysis','Run MadAnalysis5 on the events generated')] 918 919 quit_on = cmd.ControlSwitch.quit_on + ['onlyshower'] 920
921 - def __init__(self, question, line_args=[], mode=None, force=False, 922 *args, **opt):
923 924 self.check_available_module(opt['mother_interface'].options) 925 self.me_dir = opt['mother_interface'].me_dir 926 self.last_mode = opt['mother_interface'].last_mode 927 self.proc_characteristics = opt['mother_interface'].proc_characteristics 928 self.run_card = banner_mod.RunCard(pjoin(self.me_dir,'Cards', 'run_card.dat')) 929 super(AskRunNLO,self).__init__(self.to_control, opt['mother_interface'], 930 *args, **opt)
931 932 @property
933 - def answer(self):
934 935 out = super(AskRunNLO, self).answer 936 if out['shower'] == 'HERWIG7': 937 out['shower'] = 'HERWIGPP' 938 939 if out['shower'] not in self.get_allowed('shower') or out['shower'] =='OFF': 940 out['runshower'] = False 941 else: 942 out['runshower'] = True 943 return out
944 945
946 - def check_available_module(self, options):
947 948 self.available_module = set() 949 if options['madanalysis5_path']: 950 self.available_module.add('MA5') 951 if not aMCatNLO or ('mg5_path' in options and options['mg5_path']): 952 953 self.available_module.add('MadSpin') 954 if misc.has_f2py() or options['f2py_compiler']: 955 self.available_module.add('reweight') 956 if options['pythia8_path']: 957 self.available_module.add('PY8') 958 if options['hwpp_path'] and options['thepeg_path'] and options['hepmc_path']: 959 self.available_module.add('HW7')
960 # 961 # shorcut 962 #
963 - def ans_lo(self, value):
964 """ function called if the user type lo=value. or lo (then value is None)""" 965 966 if value is None: 967 self.switch['order'] = 'LO' 968 self.switch['fixed_order'] = 'ON' 969 self.set_switch('shower', 'OFF') 970 else: 971 logger.warning('Invalid command: lo=%s' % value)
972
973 - def ans_nlo(self, value):
974 if value is None: 975 self.switch['order'] = 'NLO' 976 self.switch['fixed_order'] = 'ON' 977 self.set_switch('shower', 'OFF') 978 else: 979 logger.warning('Invalid command: nlo=%s' % value)
980
981 - def ans_amc__at__nlo(self, value):
982 if value is None: 983 self.switch['order'] = 'NLO' 984 self.switch['fixed_order'] = 'OFF' 985 self.set_switch('shower', 'ON') 986 else: 987 logger.warning('Invalid command: aMC@NLO=%s' % value)
988
989 - def ans_amc__at__lo(self, value):
990 if value is None: 991 self.switch['order'] = 'LO' 992 self.switch['fixed_order'] = 'OFF' 993 self.set_switch('shower', 'ON') 994 else: 995 logger.warning('Invalid command: aMC@LO=%s' % value)
996
997 - def ans_noshower(self, value):
998 if value is None: 999 self.switch['order'] = 'NLO' 1000 self.switch['fixed_order'] = 'OFF' 1001 self.set_switch('shower', 'OFF') 1002 else: 1003 logger.warning('Invalid command: noshower=%s' % value)
1004
1005 - def ans_onlyshower(self, value):
1006 if value is None: 1007 self.switch['mode'] = 'onlyshower' 1008 self.switch['madspin'] = 'OFF' 1009 self.switch['reweight'] = 'OFF' 1010 else: 1011 logger.warning('Invalid command: onlyshower=%s' % value)
1012
1013 - def ans_noshowerlo(self, value):
1014 if value is None: 1015 self.switch['order'] = 'LO' 1016 self.switch['fixed_order'] = 'OFF' 1017 self.set_switch('shower', 'OFF') 1018 else: 1019 logger.warning('Invalid command: noshowerlo=%s' % value)
1020
1021 - def ans_madanalysis5(self, value):
1022 """ shortcut madanalysis5 -> madanalysis """ 1023 1024 if value is None: 1025 return self.onecmd('madanalysis') 1026 else: 1027 self.set_switch('madanalysis', value)
1028 # 1029 # ORDER 1030 #
1031 - def get_allowed_order(self):
1032 return ["LO", "NLO"]
1033
1034 - def set_default_order(self):
1035 1036 if self.last_mode in ['LO', 'aMC@L0', 'noshowerLO']: 1037 self.switch['order'] = 'LO' 1038 self.switch['order'] = 'NLO'
1039
1040 - def set_switch_off_order(self):
1041 return
1042 # 1043 # Fix order 1044 #
1045 - def get_allowed_fixed_order(self):
1046 """ """ 1047 if self.proc_characteristics['ninitial'] == 1: 1048 return ['ON'] 1049 else: 1050 return ['ON', 'OFF']
1051
1052 - def set_default_fixed_order(self):
1053 1054 if self.last_mode in ['LO', 'NLO']: 1055 self.switch['fixed_order'] = 'ON' 1056 if self.proc_characteristics['ninitial'] == 1: 1057 self.switch['fixed_order'] = 'ON' 1058 else: 1059 self.switch['fixed_order'] = 'OFF' 1060
1061 - def color_for_fixed_order(self, switch_value):
1062 1063 if switch_value in ['OFF']: 1064 return self.green % switch_value 1065 else: 1066 return self.red % switch_value
1067
1068 - def color_for_shower(self, switch_value):
1069 1070 if switch_value in ['ON']: 1071 return self.green % switch_value 1072 elif switch_value in self.get_allowed('shower'): 1073 return self.green % switch_value 1074 else: 1075 return self.red % switch_value
1076
1077 - def consistency_fixed_order_shower(self, vfix, vshower):
1078 """ consistency_XX_YY(val_XX, val_YY) 1079 -> XX is the new key set by the user to a new value val_XX 1080 -> YY is another key set by the user. 1081 -> return value should be None or "replace_YY" 1082 """ 1083 1084 if vfix == 'ON' and vshower != 'OFF' : 1085 return 'OFF' 1086 return None
1087 1088 consistency_fixed_order_madspin = consistency_fixed_order_shower 1089 consistency_fixed_order_reweight = consistency_fixed_order_shower 1090
1091 - def consistency_fixed_order_madanalysis(self, vfix, vma5):
1092 1093 if vfix == 'ON' and vma5 == 'ON' : 1094 return 'OFF' 1095 return None
1096 1097
1098 - def consistency_shower_fixed_order(self, vshower, vfix):
1099 """ consistency_XX_YY(val_XX, val_YY) 1100 -> XX is the new key set by the user to a new value val_XX 1101 -> YY is another key set by the user. 1102 -> return value should be None or "replace_YY" 1103 """ 1104 1105 if vshower != 'OFF' and vfix == 'ON': 1106 return 'OFF' 1107 return None
1108 1109 consistency_madspin_fixed_order = consistency_shower_fixed_order 1110 consistency_reweight_fixed_order = consistency_shower_fixed_order 1111 consistency_madanalysis_fixed_order = consistency_shower_fixed_order 1112 1113 1114 # 1115 # Shower 1116 #
1117 - def get_allowed_shower(self):
1118 """ """ 1119 1120 if hasattr(self, 'allowed_shower'): 1121 return self.allowed_shower 1122 1123 if not misc.which('bc'): 1124 return ['OFF'] 1125 1126 if self.proc_characteristics['ninitial'] == 1: 1127 self.allowed_shower = ['OFF'] 1128 return ['OFF'] 1129 else: 1130 allowed = ['HERWIG6','OFF', 'PYTHIA6Q', 'PYTHIA6PT', ] 1131 if 'PY8' in self.available_module: 1132 allowed.append('PYTHIA8') 1133 if 'HW7' in self.available_module: 1134 allowed.append('HERWIGPP') 1135 1136 self.allowed_shower = allowed 1137 1138 return allowed
1139
1140 - def check_value_shower(self, value):
1141 """ """ 1142 1143 if value.upper() in self.get_allowed_shower(): 1144 return True 1145 if value.upper() in ['PYTHIA8', 'HERWIGPP']: 1146 return True 1147 if value.upper() == 'ON': 1148 return self.run_card['parton_shower'] 1149 if value.upper() in ['P8','PY8','PYTHIA_8']: 1150 return 'PYTHIA8' 1151 if value.upper() in ['PY6','P6','PY6PT', 'PYTHIA_6', 'PYTHIA_6PT','PYTHIA6PT','PYTHIA6_PT']: 1152 return 'PYTHIA6PT' 1153 if value.upper() in ['PY6Q', 'PYTHIA_6Q','PYTHIA6Q', 'PYTHIA6_Q']: 1154 return 'PYTHIA6Q' 1155 if value.upper() in ['HW7', 'HERWIG7']: 1156 return 'HERWIG7' 1157 if value.upper() in ['HW++', 'HWPP', 'HERWIG++']: 1158 return 'HERWIGPP' 1159 if value.upper() in ['HW6', 'HERWIG_6']: 1160 return 'HERWIG6'
1161
1162 - def set_default_shower(self):
1163 1164 if self.last_mode in ['LO', 'NLO', 'noshower', 'noshowerLO']: 1165 self.switch['shower'] = 'OFF' 1166 return 1167 1168 if self.proc_characteristics['ninitial'] == 1: 1169 self.switch['shower'] = 'OFF' 1170 return 1171 1172 if not misc.which('bc'): 1173 logger.warning('bc command not available. Forbids to run the shower. please install it if you want to run the shower. (sudo apt-get install bc)') 1174 self.switch['shower'] = 'OFF' 1175 return 1176 1177 if os.path.exists(pjoin(self.me_dir, 'Cards', 'shower_card.dat')): 1178 self.switch['shower'] = self.run_card['parton_shower'] 1179 #self.switch['shower'] = 'ON' 1180 self.switch['fixed_order'] = "OFF" 1181 else: 1182 self.switch['shower'] = 'OFF' 1183
1184 - def consistency_shower_madanalysis(self, vshower, vma5):
1185 """ MA5 only possible with (N)LO+PS if shower is run""" 1186 1187 if vshower == 'OFF' and vma5 == 'ON': 1188 return 'OFF' 1189 return None
1190
1191 - def consistency_madanalysis_shower(self, vma5, vshower):
1192 1193 if vma5=='ON' and vshower == 'OFF': 1194 return 'ON' 1195 return None
1196
1197 - def get_cardcmd_for_shower(self, value):
1198 """ adpat run_card according to this setup. return list of cmd to run""" 1199 1200 if value != 'OFF': 1201 return ['set parton_shower %s' % self.switch['shower']] 1202 return []
1203 1204 # 1205 # madspin 1206 #
1207 - def get_allowed_madspin(self):
1208 """ """ 1209 1210 if hasattr(self, 'allowed_madspin'): 1211 return self.allowed_madspin 1212 1213 self.allowed_madspin = [] 1214 1215 1216 if 'MadSpin' not in self.available_module: 1217 return self.allowed_madspin 1218 if self.proc_characteristics['ninitial'] == 1: 1219 self.available_module.remove('MadSpin') 1220 self.allowed_madspin = ['OFF'] 1221 return self.allowed_madspin 1222 else: 1223 self.allowed_madspin = ['OFF', 'ON', 'onshell'] 1224 return self.allowed_madspin
1225
1226 - def check_value_madspin(self, value):
1227 """handle alias and valid option not present in get_allowed_madspin 1228 remember that this mode should always be OFF for 1>N. (ON not in allowed value)""" 1229 1230 if value.upper() in self.get_allowed_madspin(): 1231 if value == value.upper(): 1232 return True 1233 else: 1234 return value.upper() 1235 elif value.lower() in self.get_allowed_madspin(): 1236 if value == value.lower(): 1237 return True 1238 else: 1239 return value.lower() 1240 1241 if 'MadSpin' not in self.available_module or \ 1242 'ON' not in self.get_allowed_madspin(): 1243 return False 1244 1245 if value.lower() in ['madspin', 'full']: 1246 return 'full' 1247 elif value.lower() in ['none']: 1248 return 'none'
1249
1250 - def set_default_madspin(self):
1251 1252 if 'MadSpin' in self.available_module: 1253 if os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')): 1254 self.switch['madspin'] = 'ON' 1255 else: 1256 self.switch['madspin'] = 'OFF' 1257 else: 1258 self.switch['madspin'] = 'Not Avail.'
1259
1260 - def get_cardcmd_for_madspin(self, value):
1261 """set some command to run before allowing the user to modify the cards.""" 1262 1263 if value == 'onshell': 1264 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode onshell"] 1265 elif value in ['full', 'madspin']: 1266 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode madspin"] 1267 elif value == 'none': 1268 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode none"] 1269 else: 1270 return []
1271 1272 # 1273 # reweight 1274 #
1275 - def get_allowed_reweight(self):
1276 """set the valid (visible) options for reweight""" 1277 1278 if hasattr(self, 'allowed_reweight'): 1279 return getattr(self, 'allowed_reweight') 1280 1281 self.allowed_reweight = [] 1282 if 'reweight' not in self.available_module: 1283 return self.allowed_reweight 1284 if self.proc_characteristics['ninitial'] == 1: 1285 self.available_module.remove('reweight') 1286 self.allowed_reweight.append('OFF') 1287 return self.allowed_reweight 1288 else: 1289 self.allowed_reweight = [ 'OFF', 'ON', 'NLO', 'NLO_TREE','LO'] 1290 return self.allowed_reweight
1291
1292 - def set_default_reweight(self):
1293 """initialise the switch for reweight""" 1294 1295 if 'reweight' in self.available_module: 1296 if os.path.exists(pjoin(self.me_dir,'Cards','reweight_card.dat')): 1297 self.switch['reweight'] = 'ON' 1298 else: 1299 self.switch['reweight'] = 'OFF' 1300 else: 1301 self.switch['reweight'] = 'Not Avail.'
1302
1303 - def get_cardcmd_for_reweight(self, value):
1304 """ adpat run_card according to this setup. return list of cmd to run""" 1305 1306 if value == 'LO': 1307 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode LO"] 1308 elif value == 'NLO': 1309 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode NLO", 1310 "set store_rwgt_info T"] 1311 elif value == 'NLO_TREE': 1312 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode NLO_tree", 1313 "set store_rwgt_info T"] 1314 return []
1315 1316 # 1317 # MadAnalysis5 1318 #
1319 - def get_allowed_madanalysis(self):
1320 1321 if hasattr(self, 'allowed_madanalysis'): 1322 return self.allowed_madanalysis 1323 1324 self.allowed_madanalysis = [] 1325 1326 1327 if 'MA5' not in self.available_module: 1328 return self.allowed_madanalysis 1329 1330 if self.proc_characteristics['ninitial'] == 1: 1331 self.available_module.remove('MA5') 1332 self.allowed_madanalysis = ['OFF'] 1333 return self.allowed_madanalysis 1334 else: 1335 self.allowed_madanalysis = ['OFF', 'ON'] 1336 return self.allowed_madanalysis
1337
1338 - def set_default_madanalysis(self):
1339 """initialise the switch for reweight""" 1340 1341 if 'MA5' not in self.available_module: 1342 self.switch['madanalysis'] = 'Not Avail.' 1343 elif os.path.exists(pjoin(self.me_dir,'Cards', 'madanalysis5_hadron_card.dat')): 1344 self.switch['madanalysis'] = 'ON' 1345 else: 1346 self.switch['madanalysis'] = 'OFF'
1347
1348 - def check_value_madanalysis(self, value):
1349 """check an entry is valid. return the valid entry in case of shortcut""" 1350 1351 if value.upper() in self.get_allowed('madanalysis'): 1352 return True 1353 value = value.lower() 1354 if value == 'hadron': 1355 return 'ON' if 'ON' in self.get_allowed_madanalysis5 else False 1356 else: 1357 return False
1358
1359 1360 #=============================================================================== 1361 # aMCatNLOCmd 1362 #=============================================================================== 1363 -class aMCatNLOCmd(CmdExtended, HelpToCmd, CompleteForCmd, common_run.CommonRunCmd):
1364 """The command line processor of MadGraph""" 1365 1366 # Truth values 1367 true = ['T','.true.',True,'true'] 1368 # Options and formats available 1369 _run_options = ['--cluster','--multicore','--nb_core=','--nb_core=2', '-c', '-m'] 1370 _generate_options = ['-f', '--laststep=parton', '--laststep=pythia', '--laststep=pgs', '--laststep=delphes'] 1371 _calculate_decay_options = ['-f', '--accuracy=0.'] 1372 _set_options = ['stdout_level','fortran_compiler','cpp_compiler','timeout'] 1373 _plot_mode = ['all', 'parton','shower','pgs','delphes'] 1374 _clean_mode = _plot_mode + ['channel', 'banner'] 1375 _display_opts = ['run_name', 'options', 'variable'] 1376 # survey options, dict from name to type, default value, and help text 1377 # Variables to store object information 1378 web = False 1379 cluster_mode = 0 1380 queue = 'madgraph' 1381 nb_core = None 1382 make_opts_var = {} 1383 1384 next_possibility = { 1385 'start': ['generate_events [OPTIONS]', 'calculate_crossx [OPTIONS]', 'launch [OPTIONS]', 1386 'help generate_events'], 1387 'generate_events': ['generate_events [OPTIONS]', 'shower'], 1388 'launch': ['launch [OPTIONS]', 'shower'], 1389 'shower' : ['generate_events [OPTIONS]'] 1390 } 1391 1392 1393 ############################################################################
1394 - def __init__(self, me_dir = None, options = {}, *completekey, **stdin):
1395 """ add information to the cmd """ 1396 1397 self.start_time = 0 1398 CmdExtended.__init__(self, me_dir, options, *completekey, **stdin) 1399 #common_run.CommonRunCmd.__init__(self, me_dir, options) 1400 1401 self.mode = 'aMCatNLO' 1402 self.nb_core = 0 1403 self.prompt = "%s>"%os.path.basename(pjoin(self.me_dir)) 1404 1405 1406 self.load_results_db() 1407 self.results.def_web_mode(self.web) 1408 # check that compiler is gfortran 4.6 or later if virtuals have been exported 1409 proc_card = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read() 1410 1411 if not '[real=QCD]' in proc_card: 1412 check_compiler(self.options, block=True)
1413 1414 1415 ############################################################################
1416 - def do_shower(self, line):
1417 """ run the shower on a given parton level file """ 1418 argss = self.split_arg(line) 1419 (options, argss) = _launch_parser.parse_args(argss) 1420 # check argument validity and normalise argument 1421 options = options.__dict__ 1422 options['reweightonly'] = False 1423 self.check_shower(argss, options) 1424 evt_file = pjoin(os.getcwd(), argss[0], 'events.lhe') 1425 self.ask_run_configuration('onlyshower', options) 1426 self.run_mcatnlo(evt_file, options) 1427 1428 self.update_status('', level='all', update_results=True)
1429 1430 ################################################################################
1431 - def do_plot(self, line):
1432 """Create the plot for a given run""" 1433 1434 # Since in principle, all plot are already done automaticaly 1435 args = self.split_arg(line) 1436 # Check argument's validity 1437 self.check_plot(args) 1438 logger.info('plot for run %s' % self.run_name) 1439 1440 if not self.force: 1441 self.ask_edit_cards([], args, plot=True) 1442 1443 if any([arg in ['parton'] for arg in args]): 1444 filename = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe') 1445 if os.path.exists(filename+'.gz'): 1446 misc.gunzip(filename) 1447 if os.path.exists(filename): 1448 logger.info('Found events.lhe file for run %s' % self.run_name) 1449 shutil.move(filename, pjoin(self.me_dir, 'Events', 'unweighted_events.lhe')) 1450 self.create_plot('parton') 1451 shutil.move(pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'), filename) 1452 misc.gzip(filename) 1453 1454 if any([arg in ['all','parton'] for arg in args]): 1455 filename = pjoin(self.me_dir, 'Events', self.run_name, 'MADatNLO.top') 1456 if os.path.exists(filename): 1457 logger.info('Found MADatNLO.top file for run %s' % \ 1458 self.run_name) 1459 output = pjoin(self.me_dir, 'HTML',self.run_name, 'plots_parton.html') 1460 plot_dir = pjoin(self.me_dir, 'HTML', self.run_name, 'plots_parton') 1461 1462 if not os.path.isdir(plot_dir): 1463 os.makedirs(plot_dir) 1464 top_file = pjoin(plot_dir, 'plots.top') 1465 files.cp(filename, top_file) 1466 madir = self.options['madanalysis_path'] 1467 tag = self.run_card['run_tag'] 1468 td = self.options['td_path'] 1469 misc.call(['%s/plot' % self.dirbin, madir, td], 1470 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1471 stderr = subprocess.STDOUT, 1472 cwd=plot_dir) 1473 1474 misc.call(['%s/plot_page-pl' % self.dirbin, 1475 os.path.basename(plot_dir), 1476 'parton'], 1477 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1478 stderr = subprocess.STDOUT, 1479 cwd=pjoin(self.me_dir, 'HTML', self.run_name)) 1480 shutil.move(pjoin(self.me_dir, 'HTML',self.run_name ,'plots.html'), 1481 output) 1482 1483 os.remove(pjoin(self.me_dir, 'Events', 'plots.top')) 1484 1485 if any([arg in ['all','shower'] for arg in args]): 1486 filenames = misc.glob('events_*.lhe.gz', pjoin(self.me_dir, 'Events', self.run_name)) 1487 if len(filenames) != 1: 1488 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name)) 1489 if len(filenames) != 1: 1490 logger.info('No shower level file found for run %s' % \ 1491 self.run_name) 1492 return 1493 filename = filenames[0] 1494 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1495 1496 if not os.path.exists(pjoin(self.me_dir, 'Cards', 'pythia_card.dat')): 1497 if aMCatNLO and not self.options['mg5_path']: 1498 raise "plotting NLO HEP file needs MG5 utilities" 1499 1500 files.cp(pjoin(self.options['mg5_path'], 'Template','LO', 'Cards', 'pythia_card_default.dat'), 1501 pjoin(self.me_dir, 'Cards', 'pythia_card.dat')) 1502 self.run_hep2lhe() 1503 else: 1504 filename = filenames[0] 1505 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1506 1507 self.create_plot('shower') 1508 lhe_file_name = filename.replace('.hep.gz', '.lhe') 1509 shutil.move(pjoin(self.me_dir, 'Events','pythia_events.lhe'), 1510 lhe_file_name) 1511 misc.gzip(lhe_file_name) 1512 1513 if any([arg in ['all','pgs'] for arg in args]): 1514 filename = pjoin(self.me_dir, 'Events', self.run_name, 1515 '%s_pgs_events.lhco' % self.run_tag) 1516 if os.path.exists(filename+'.gz'): 1517 misc.gunzip(filename) 1518 if os.path.exists(filename): 1519 self.create_plot('PGS') 1520 misc.gzip(filename) 1521 else: 1522 logger.info('No valid files for pgs plot') 1523 1524 if any([arg in ['all','delphes'] for arg in args]): 1525 filename = pjoin(self.me_dir, 'Events', self.run_name, 1526 '%s_delphes_events.lhco' % self.run_tag) 1527 if os.path.exists(filename+'.gz'): 1528 misc.gunzip(filename) 1529 if os.path.exists(filename): 1530 #shutil.move(filename, pjoin(self.me_dir, 'Events','delphes_events.lhco')) 1531 self.create_plot('Delphes') 1532 #shutil.move(pjoin(self.me_dir, 'Events','delphes_events.lhco'), filename) 1533 misc.gzip(filename) 1534 else: 1535 logger.info('No valid files for delphes plot')
1536 1537 1538 ############################################################################
1539 - def do_calculate_xsect(self, line):
1540 """Main commands: calculates LO/NLO cross-section, using madevent_mintFO 1541 this function wraps the do_launch one""" 1542 1543 self.start_time = time.time() 1544 argss = self.split_arg(line) 1545 # check argument validity and normalise argument 1546 (options, argss) = _calculate_xsect_parser.parse_args(argss) 1547 options = options.__dict__ 1548 options['reweightonly'] = False 1549 options['parton'] = True 1550 self.check_calculate_xsect(argss, options) 1551 self.do_launch(line, options, argss)
1552 1553 ############################################################################
1554 - def do_banner_run(self, line):
1555 """Make a run from the banner file""" 1556 1557 args = self.split_arg(line) 1558 #check the validity of the arguments 1559 self.check_banner_run(args) 1560 1561 # Remove previous cards 1562 for name in ['shower_card.dat', 'madspin_card.dat']: 1563 try: 1564 os.remove(pjoin(self.me_dir, 'Cards', name)) 1565 except Exception: 1566 pass 1567 1568 banner_mod.split_banner(args[0], self.me_dir, proc_card=False) 1569 1570 # Check if we want to modify the run 1571 if not self.force: 1572 ans = self.ask('Do you want to modify the Cards/Run Type?', 'n', ['y','n']) 1573 if ans == 'n': 1574 self.force = True 1575 1576 # Compute run mode: 1577 if self.force: 1578 mode_status = {'order': 'NLO', 'fixed_order': False, 'madspin':False, 'shower':True} 1579 banner = banner_mod.Banner(args[0]) 1580 for line in banner['run_settings']: 1581 if '=' in line: 1582 mode, value = [t.strip() for t in line.split('=')] 1583 mode_status[mode] = value 1584 else: 1585 mode_status = {} 1586 1587 # Call Generate events 1588 self.do_launch('-n %s %s' % (self.run_name, '-f' if self.force else ''), 1589 switch=mode_status)
1590 1591 ############################################################################
1592 - def do_generate_events(self, line):
1593 """Main commands: generate events 1594 this function just wraps the do_launch one""" 1595 self.do_launch(line)
1596 1597 1598 ############################################################################
1599 - def do_treatcards(self, line, amcatnlo=True,mode=''):
1600 """Advanced commands: this is for creating the correct run_card.inc from the nlo format""" 1601 #check if no 'Auto' are present in the file 1602 self.check_param_card(pjoin(self.me_dir, 'Cards','param_card.dat')) 1603 1604 # propagate the FO_card entry FO_LHE_weight_ratio to the run_card. 1605 # this variable is system only in the run_card 1606 # can not be done in EditCard since this parameter is not written in the 1607 # run_card directly. 1608 if mode in ['LO', 'NLO']: 1609 name = 'fo_lhe_weight_ratio' 1610 FO_card = analyse_card.FOAnalyseCard(pjoin(self.me_dir,'Cards', 'FO_analyse_card.dat')) 1611 if name in FO_card: 1612 self.run_card.set(name, FO_card[name], user=False) 1613 name = 'fo_lhe_postprocessing' 1614 if name in FO_card: 1615 self.run_card.set(name, FO_card[name], user=False) 1616 1617 return super(aMCatNLOCmd,self).do_treatcards(line, amcatnlo)
1618 1619 ############################################################################
1620 - def set_configuration(self, amcatnlo=True, **opt):
1621 """assign all configuration variable from file 1622 loop over the different config file if config_file not define """ 1623 return super(aMCatNLOCmd,self).set_configuration(amcatnlo=amcatnlo, **opt)
1624 1625 ############################################################################
1626 - def do_launch(self, line, options={}, argss=[], switch={}):
1627 """Main commands: launch the full chain 1628 options and args are relevant if the function is called from other 1629 functions, such as generate_events or calculate_xsect 1630 mode gives the list of switch needed for the computation (usefull for banner_run) 1631 """ 1632 1633 if not argss and not options: 1634 self.start_time = time.time() 1635 argss = self.split_arg(line) 1636 # check argument validity and normalise argument 1637 (options, argss) = _launch_parser.parse_args(argss) 1638 options = options.__dict__ 1639 self.check_launch(argss, options) 1640 1641 1642 if 'run_name' in options.keys() and options['run_name']: 1643 self.run_name = options['run_name'] 1644 # if a dir with the given run_name already exists 1645 # remove it and warn the user 1646 if os.path.isdir(pjoin(self.me_dir, 'Events', self.run_name)): 1647 logger.warning('Removing old run information in \n'+ 1648 pjoin(self.me_dir, 'Events', self.run_name)) 1649 files.rm(pjoin(self.me_dir, 'Events', self.run_name)) 1650 self.results.delete_run(self.run_name) 1651 else: 1652 self.run_name = '' # will be set later 1653 1654 if options['multicore']: 1655 self.cluster_mode = 2 1656 elif options['cluster']: 1657 self.cluster_mode = 1 1658 1659 if not switch: 1660 mode = argss[0] 1661 1662 if mode in ['LO', 'NLO']: 1663 options['parton'] = True 1664 mode = self.ask_run_configuration(mode, options) 1665 else: 1666 mode = self.ask_run_configuration('auto', options, switch) 1667 1668 self.results.add_detail('run_mode', mode) 1669 1670 self.update_status('Starting run', level=None, update_results=True) 1671 1672 if self.options['automatic_html_opening']: 1673 misc.open_file(os.path.join(self.me_dir, 'crossx.html')) 1674 self.options['automatic_html_opening'] = False 1675 1676 if '+' in mode: 1677 mode = mode.split('+')[0] 1678 self.compile(mode, options) 1679 evt_file = self.run(mode, options) 1680 1681 if self.run_card['nevents'] == 0 and not mode in ['LO', 'NLO']: 1682 logger.info('No event file generated: grids have been set-up with a '\ 1683 'relative precision of %s' % self.run_card['req_acc']) 1684 return 1685 1686 if not mode in ['LO', 'NLO']: 1687 assert evt_file == pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'), '%s != %s' %(evt_file, pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')) 1688 1689 if self.run_card['systematics_program'] == 'systematics': 1690 self.exec_cmd('systematics %s %s ' % (self.run_name, ' '.join(self.run_card['systematics_arguments']))) 1691 1692 self.exec_cmd('reweight -from_cards', postcmd=False) 1693 self.exec_cmd('decay_events -from_cards', postcmd=False) 1694 evt_file = pjoin(self.me_dir,'Events', self.run_name, 'events.lhe') 1695 1696 if not mode in ['LO', 'NLO', 'noshower', 'noshowerLO'] \ 1697 and not options['parton']: 1698 self.run_mcatnlo(evt_file, options) 1699 self.exec_cmd('madanalysis5_hadron --no_default', postcmd=False, printcmd=False) 1700 1701 elif mode == 'noshower': 1702 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical. 1703 Please, shower the Les Houches events before using them for physics analyses.""") 1704 1705 1706 self.update_status('', level='all', update_results=True) 1707 if self.run_card['ickkw'] == 3 and \ 1708 (mode in ['noshower'] or \ 1709 (('PYTHIA8' not in self.run_card['parton_shower'].upper()) and (mode in ['aMC@NLO']))): 1710 logger.warning("""You are running with FxFx merging enabled. 1711 To be able to merge samples of various multiplicities without double counting, 1712 you have to remove some events after showering 'by hand'. 1713 Please read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 1714 1715 self.store_result() 1716 #check if the param_card defines a scan. 1717 if self.param_card_iterator: 1718 cpath = pjoin(self.me_dir,'Cards','param_card.dat') 1719 param_card_iterator = self.param_card_iterator 1720 self.param_card_iterator = [] #avoid to next generate go trough here 1721 param_card_iterator.store_entry(self.run_name, self.results.current['cross'], 1722 error=self.results.current['error'], 1723 param_card_path=cpath) 1724 orig_name = self.run_name 1725 #go trough the scal 1726 with misc.TMP_variable(self, 'allow_notification_center', False): 1727 for i,card in enumerate(param_card_iterator): 1728 card.write(cpath) 1729 self.check_param_card(cpath, dependent=True) 1730 if not options['force']: 1731 options['force'] = True 1732 if options['run_name']: 1733 options['run_name'] = '%s_%s' % (orig_name, i+1) 1734 if not argss: 1735 argss = [mode, "-f"] 1736 elif argss[0] == "auto": 1737 argss[0] = mode 1738 self.do_launch("", options=options, argss=argss, switch=switch) 1739 #self.exec_cmd("launch -f ",precmd=True, postcmd=True,errorhandling=False) 1740 param_card_iterator.store_entry(self.run_name, self.results.current['cross'], 1741 error=self.results.current['error'], 1742 param_card_path=cpath) 1743 #restore original param_card 1744 param_card_iterator.write(pjoin(self.me_dir,'Cards','param_card.dat')) 1745 name = misc.get_scan_name(orig_name, self.run_name) 1746 path = pjoin(self.me_dir, 'Events','scan_%s.txt' % name) 1747 logger.info("write all cross-section results in %s" % path, '$MG:BOLD') 1748 param_card_iterator.write_summary(path) 1749 1750 if self.allow_notification_center: 1751 misc.apple_notify('Run %s finished' % os.path.basename(self.me_dir), 1752 '%s: %s +- %s ' % (self.results.current['run_name'], 1753 self.results.current['cross'], 1754 self.results.current['error']))
1755 1756 1757 ############################################################################
1758 - def do_compile(self, line):
1759 """Advanced commands: just compile the executables """ 1760 argss = self.split_arg(line) 1761 # check argument validity and normalise argument 1762 (options, argss) = _compile_parser.parse_args(argss) 1763 options = options.__dict__ 1764 options['reweightonly'] = False 1765 options['nocompile'] = False 1766 self.check_compile(argss, options) 1767 1768 mode = {'FO': 'NLO', 'MC': 'aMC@NLO'}[argss[0]] 1769 self.ask_run_configuration(mode, options) 1770 self.compile(mode, options) 1771 1772 1773 self.update_status('', level='all', update_results=True)
1774 1775
1776 - def update_random_seed(self):
1777 """Update random number seed with the value from the run_card. 1778 If this is 0, update the number according to a fresh one""" 1779 iseed = self.run_card['iseed'] 1780 if iseed == 0: 1781 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit')) 1782 iseed = int(randinit.read()[2:]) + 1 1783 randinit.close() 1784 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'), 'w') 1785 randinit.write('r=%d' % iseed) 1786 randinit.close()
1787 1788
1789 - def run(self, mode, options):
1790 """runs aMC@NLO. Returns the name of the event file created""" 1791 logger.info('Starting run') 1792 1793 if not 'only_generation' in options.keys(): 1794 options['only_generation'] = False 1795 1796 # for second step in applgrid mode, do only the event generation step 1797 if mode in ['LO', 'NLO'] and self.run_card['iappl'] == 2 and not options['only_generation']: 1798 options['only_generation'] = True 1799 self.get_characteristics(pjoin(self.me_dir, 'SubProcesses', 'proc_characteristics')) 1800 self.setup_cluster_or_multicore() 1801 self.update_random_seed() 1802 #find and keep track of all the jobs 1803 folder_names = {'LO': ['born_G*'], 'NLO': ['all_G*'], 1804 'aMC@LO': ['GB*'], 'aMC@NLO': ['GF*']} 1805 folder_names['noshower'] = folder_names['aMC@NLO'] 1806 folder_names['noshowerLO'] = folder_names['aMC@LO'] 1807 p_dirs = [d for d in \ 1808 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 1809 #Clean previous results 1810 self.clean_previous_results(options,p_dirs,folder_names[mode]) 1811 1812 mcatnlo_status = ['Setting up grids', 'Computing upper envelope', 'Generating events'] 1813 1814 1815 if options['reweightonly']: 1816 event_norm=self.run_card['event_norm'] 1817 nevents=self.run_card['nevents'] 1818 return self.reweight_and_collect_events(options, mode, nevents, event_norm) 1819 1820 if mode in ['LO', 'NLO']: 1821 # this is for fixed order runs 1822 mode_dict = {'NLO': 'all', 'LO': 'born'} 1823 logger.info('Doing fixed order %s' % mode) 1824 req_acc = self.run_card['req_acc_FO'] 1825 1826 # Re-distribute the grids for the 2nd step of the applgrid 1827 # running 1828 if self.run_card['iappl'] == 2: 1829 self.applgrid_distribute(options,mode_dict[mode],p_dirs) 1830 1831 # create a list of dictionaries "jobs_to_run" with all the 1832 # jobs that need to be run 1833 integration_step=-1 1834 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \ 1835 req_acc,mode_dict[mode],integration_step,mode,fixed_order=True) 1836 self.prepare_directories(jobs_to_run,mode) 1837 1838 # loop over the integration steps. After every step, check 1839 # if we have the required accuracy. If this is the case, 1840 # stop running, else do another step. 1841 while True: 1842 integration_step=integration_step+1 1843 self.run_all_jobs(jobs_to_run,integration_step) 1844 self.collect_log_files(jobs_to_run,integration_step) 1845 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1846 jobs_to_collect,integration_step,mode,mode_dict[mode]) 1847 if not jobs_to_run: 1848 # there are no more jobs to run (jobs_to_run is empty) 1849 break 1850 # We are done. 1851 self.finalise_run_FO(folder_names[mode],jobs_to_collect) 1852 self.update_status('Run complete', level='parton', update_results=True) 1853 return 1854 1855 elif mode in ['aMC@NLO','aMC@LO','noshower','noshowerLO']: 1856 if self.ninitial == 1: 1857 raise aMCatNLOError('Decay processes can only be run at fixed order.') 1858 mode_dict = {'aMC@NLO': 'all', 'aMC@LO': 'born',\ 1859 'noshower': 'all', 'noshowerLO': 'born'} 1860 shower = self.run_card['parton_shower'].upper() 1861 nevents = self.run_card['nevents'] 1862 req_acc = self.run_card['req_acc'] 1863 if nevents == 0 and req_acc < 0 : 1864 raise aMCatNLOError('Cannot determine the required accuracy from the number '\ 1865 'of events, because 0 events requested. Please set '\ 1866 'the "req_acc" parameter in the run_card to a value '\ 1867 'between 0 and 1') 1868 elif req_acc >1 or req_acc == 0 : 1869 raise aMCatNLOError('Required accuracy ("req_acc" in the run_card) should '\ 1870 'be between larger than 0 and smaller than 1, '\ 1871 'or set to -1 for automatic determination. Current '\ 1872 'value is %f' % req_acc) 1873 # For more than 1M events, set req_acc to 0.001 (except when it was explicitly set in the run_card) 1874 elif req_acc < 0 and nevents > 1000000 : 1875 req_acc=0.001 1876 1877 shower_list = ['HERWIG6', 'HERWIGPP', 'PYTHIA6Q', 'PYTHIA6PT', 'PYTHIA8'] 1878 1879 if not shower in shower_list: 1880 raise aMCatNLOError('%s is not a valid parton shower. '\ 1881 'Please use one of the following: %s' \ 1882 % (shower, ', '.join(shower_list))) 1883 1884 # check that PYTHIA6PT is not used for processes with FSR 1885 if shower == 'PYTHIA6PT' and self.proc_characteristics['has_fsr']: 1886 raise aMCatNLOError('PYTHIA6PT does not support processes with FSR') 1887 1888 if mode in ['aMC@NLO', 'aMC@LO']: 1889 logger.info('Doing %s matched to parton shower' % mode[4:]) 1890 elif mode in ['noshower','noshowerLO']: 1891 logger.info('Generating events without running the shower.') 1892 elif options['only_generation']: 1893 logger.info('Generating events starting from existing results') 1894 1895 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \ 1896 req_acc,mode_dict[mode],1,mode,fixed_order=False) 1897 # Make sure to update all the jobs to be ready for the event generation step 1898 if options['only_generation']: 1899 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1900 jobs_to_collect,1,mode,mode_dict[mode],fixed_order=False) 1901 else: 1902 self.prepare_directories(jobs_to_run,mode,fixed_order=False) 1903 1904 1905 # Main loop over the three MINT generation steps: 1906 for mint_step, status in enumerate(mcatnlo_status): 1907 if options['only_generation'] and mint_step < 2: 1908 continue 1909 self.update_status(status, level='parton') 1910 self.run_all_jobs(jobs_to_run,mint_step,fixed_order=False) 1911 self.collect_log_files(jobs_to_run,mint_step) 1912 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1913 jobs_to_collect,mint_step,mode,mode_dict[mode],fixed_order=False) 1914 if mint_step+1==2 and nevents==0: 1915 self.print_summary(options,2,mode) 1916 return 1917 1918 # Sanity check on the event files. If error the jobs are resubmitted 1919 self.check_event_files(jobs_to_collect) 1920 1921 if self.cluster_mode == 1: 1922 #if cluster run, wait 10 sec so that event files are transferred back 1923 self.update_status( 1924 'Waiting while files are transferred back from the cluster nodes', 1925 level='parton') 1926 time.sleep(10) 1927 1928 event_norm=self.run_card['event_norm'] 1929 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1930
1931 - def create_jobs_to_run(self,options,p_dirs,req_acc,run_mode,\ 1932 integration_step,mode,fixed_order=True):
1933 """Creates a list of dictionaries with all the jobs to be run""" 1934 jobs_to_run=[] 1935 if not options['only_generation']: 1936 # Fresh, new run. Check all the P*/channels.txt files 1937 # (created by the 'gensym' executable) to set-up all the 1938 # jobs using the default inputs. 1939 npoints = self.run_card['npoints_FO_grid'] 1940 niters = self.run_card['niters_FO_grid'] 1941 for p_dir in p_dirs: 1942 try: 1943 with open(pjoin(self.me_dir,'SubProcesses',p_dir,'channels.txt')) as chan_file: 1944 channels=chan_file.readline().split() 1945 except IOError: 1946 logger.warning('No integration channels found for contribution %s' % p_dir) 1947 continue 1948 if fixed_order: 1949 lch=len(channels) 1950 maxchannels=20 # combine up to 20 channels in a single job 1951 if self.run_card['iappl'] != 0: maxchannels=1 1952 njobs=(int(lch/maxchannels)+1 if lch%maxchannels!= 0 \ 1953 else int(lch/maxchannels)) 1954 for nj in range(1,njobs+1): 1955 job={} 1956 job['p_dir']=p_dir 1957 job['channel']=str(nj) 1958 job['nchans']=(int(lch/njobs)+1 if nj <= lch%njobs else int(lch/njobs)) 1959 job['configs']=' '.join(channels[:job['nchans']]) 1960 del channels[:job['nchans']] 1961 job['split']=0 1962 if req_acc == -1: 1963 job['accuracy']=0 1964 job['niters']=niters 1965 job['npoints']=npoints 1966 elif req_acc > 0: 1967 job['accuracy']=0.05 1968 job['niters']=6 1969 job['npoints']=-1 1970 else: 1971 raise aMCatNLOError('No consistent "req_acc_FO" set. Use a value '+ 1972 'between 0 and 1 or set it equal to -1.') 1973 job['mint_mode']=0 1974 job['run_mode']=run_mode 1975 job['wgt_frac']=1.0 1976 job['wgt_mult']=1.0 1977 jobs_to_run.append(job) 1978 if channels: 1979 raise aMCatNLOError('channels is not empty %s' % channels) 1980 else: 1981 for channel in channels: 1982 job={} 1983 job['p_dir']=p_dir 1984 job['channel']=channel 1985 job['split']=0 1986 job['accuracy']=0.03 1987 job['niters']=12 1988 job['npoints']=-1 1989 job['mint_mode']=0 1990 job['run_mode']=run_mode 1991 job['wgt_frac']=1.0 1992 jobs_to_run.append(job) 1993 jobs_to_collect=copy.copy(jobs_to_run) # These are all jobs 1994 else: 1995 # if options['only_generation'] is true, just read the current jobs from file 1996 try: 1997 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'rb') as f: 1998 jobs_to_collect=pickle.load(f) 1999 for job in jobs_to_collect: 2000 job['dirname']=pjoin(self.me_dir,'SubProcesses',job['dirname'].rsplit('/SubProcesses/',1)[1]) 2001 jobs_to_run=copy.copy(jobs_to_collect) 2002 except: 2003 raise aMCatNLOError('Cannot reconstruct saved job status in %s' % \ 2004 pjoin(self.me_dir,'SubProcesses','job_status.pkl')) 2005 # Update cross sections and determine which jobs to run next 2006 if fixed_order: 2007 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, 2008 jobs_to_collect,integration_step,mode,run_mode) 2009 # Update the integration_step to make sure that nothing will be overwritten 2010 integration_step=1 2011 for job in jobs_to_run: 2012 while os.path.exists(pjoin(job['dirname'],'res_%s.dat' % integration_step)): 2013 integration_step=integration_step+1 2014 integration_step=integration_step-1 2015 else: 2016 self.append_the_results(jobs_to_collect,integration_step) 2017 return jobs_to_run,jobs_to_collect,integration_step
2018
2019 - def prepare_directories(self,jobs_to_run,mode,fixed_order=True):
2020 """Set-up the G* directories for running""" 2021 name_suffix={'born' :'B' , 'all':'F'} 2022 for job in jobs_to_run: 2023 if job['split'] == 0: 2024 if fixed_order : 2025 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2026 job['run_mode']+'_G'+job['channel']) 2027 else: 2028 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2029 'G'+name_suffix[job['run_mode']]+job['channel']) 2030 else: 2031 if fixed_order : 2032 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2033 job['run_mode']+'_G'+job['channel']+'_'+str(job['split'])) 2034 else: 2035 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2036 'G'+name_suffix[job['run_mode']]+job['channel']+'_'+str(job['split'])) 2037 job['dirname']=dirname 2038 if not os.path.isdir(dirname): 2039 os.makedirs(dirname) 2040 self.write_input_file(job,fixed_order) 2041 # link or copy the grids from the base directory to the split directory: 2042 if not fixed_order: 2043 if job['split'] != 0: 2044 for f in ['grid.MC_integer','mint_grids','res_1']: 2045 if not os.path.isfile(pjoin(job['dirname'],f)): 2046 files.ln(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname']) 2047 else: 2048 if job['split'] != 0: 2049 for f in ['grid.MC_integer','mint_grids']: 2050 files.cp(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname'])
2051 2052
2053 - def write_input_file(self,job,fixed_order):
2054 """write the input file for the madevent_mint* executable in the appropriate directory""" 2055 if fixed_order: 2056 content= \ 2057 """NPOINTS = %(npoints)s 2058 NITERATIONS = %(niters)s 2059 ACCURACY = %(accuracy)s 2060 ADAPT_GRID = 2 2061 MULTICHANNEL = 1 2062 SUM_HELICITY = 1 2063 NCHANS = %(nchans)s 2064 CHANNEL = %(configs)s 2065 SPLIT = %(split)s 2066 WGT_MULT= %(wgt_mult)s 2067 RUN_MODE = %(run_mode)s 2068 RESTART = %(mint_mode)s 2069 """ \ 2070 % job 2071 else: 2072 content = \ 2073 """-1 12 ! points, iterations 2074 %(accuracy)s ! desired fractional accuracy 2075 1 -0.1 ! alpha, beta for Gsoft 2076 -1 -0.1 ! alpha, beta for Gazi 2077 1 ! Suppress amplitude (0 no, 1 yes)? 2078 1 ! Exact helicity sum (0 yes, n = number/event)? 2079 %(channel)s ! Enter Configuration Number: 2080 %(mint_mode)s ! MINT imode: 0 to set-up grids, 1 to perform integral, 2 generate events 2081 1 1 1 ! if imode is 1: Folding parameters for xi_i, phi_i and y_ij 2082 %(run_mode)s ! all, born, real, virt 2083 """ \ 2084 % job 2085 with open(pjoin(job['dirname'], 'input_app.txt'), 'w') as input_file: 2086 input_file.write(content)
2087 2088
2089 - def run_all_jobs(self,jobs_to_run,integration_step,fixed_order=True):
2090 """Loops over the jobs_to_run and executes them using the function 'run_exe'""" 2091 if fixed_order: 2092 if integration_step == 0: 2093 self.update_status('Setting up grids', level=None) 2094 else: 2095 self.update_status('Refining results, step %i' % integration_step, level=None) 2096 self.ijob = 0 2097 name_suffix={'born' :'B', 'all':'F'} 2098 if fixed_order: 2099 run_type="Fixed order integration step %s" % integration_step 2100 else: 2101 run_type="MINT step %s" % integration_step 2102 self.njobs=len(jobs_to_run) 2103 for job in jobs_to_run: 2104 executable='ajob1' 2105 if fixed_order: 2106 arguments=[job['channel'],job['run_mode'], \ 2107 str(job['split']),str(integration_step)] 2108 else: 2109 arguments=[job['channel'],name_suffix[job['run_mode']], \ 2110 str(job['split']),str(integration_step)] 2111 self.run_exe(executable,arguments,run_type, 2112 cwd=pjoin(self.me_dir,'SubProcesses',job['p_dir'])) 2113 2114 if self.cluster_mode == 2: 2115 time.sleep(1) # security to allow all jobs to be launched 2116 self.wait_for_complete(run_type)
2117 2118
2119 - def collect_the_results(self,options,req_acc,jobs_to_run,jobs_to_collect,\ 2120 integration_step,mode,run_mode,fixed_order=True):
2121 """Collect the results, make HTML pages, print the summary and 2122 determine if there are more jobs to run. Returns the list 2123 of the jobs that still need to be run, as well as the 2124 complete list of jobs that need to be collected to get the 2125 final answer. 2126 """ 2127 # Get the results of the current integration/MINT step 2128 self.append_the_results(jobs_to_run,integration_step) 2129 self.cross_sect_dict = self.write_res_txt_file(jobs_to_collect,integration_step) 2130 # Update HTML pages 2131 if fixed_order: 2132 cross, error = self.make_make_all_html_results(folder_names=['%s*' % run_mode], 2133 jobs=jobs_to_collect) 2134 else: 2135 name_suffix={'born' :'B' , 'all':'F'} 2136 cross, error = self.make_make_all_html_results(folder_names=['G%s*' % name_suffix[run_mode]]) 2137 self.results.add_detail('cross', cross) 2138 self.results.add_detail('error', error) 2139 # Combine grids from split fixed order jobs 2140 if fixed_order: 2141 jobs_to_run=self.combine_split_order_run(jobs_to_run) 2142 # Set-up jobs for the next iteration/MINT step 2143 jobs_to_run_new=self.update_jobs_to_run(req_acc,integration_step,jobs_to_run,fixed_order) 2144 # IF THERE ARE NO MORE JOBS, WE ARE DONE!!! 2145 if fixed_order: 2146 # Write the jobs_to_collect directory to file so that we 2147 # can restart them later (with only-generation option) 2148 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'wb') as f: 2149 pickle.dump(jobs_to_collect,f) 2150 # Print summary 2151 if (not jobs_to_run_new) and fixed_order: 2152 # print final summary of results (for fixed order) 2153 scale_pdf_info=self.collect_scale_pdf_info(options,jobs_to_collect) 2154 self.print_summary(options,integration_step,mode,scale_pdf_info,done=True) 2155 return jobs_to_run_new,jobs_to_collect 2156 elif jobs_to_run_new: 2157 # print intermediate summary of results 2158 scale_pdf_info=[] 2159 self.print_summary(options,integration_step,mode,scale_pdf_info,done=False) 2160 else: 2161 # When we are done for (N)LO+PS runs, do not print 2162 # anything yet. This will be done after the reweighting 2163 # and collection of the events 2164 scale_pdf_info=[] 2165 # Prepare for the next integration/MINT step 2166 if (not fixed_order) and integration_step+1 == 2 : 2167 # Write the jobs_to_collect directory to file so that we 2168 # can restart them later (with only-generation option) 2169 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'wb') as f: 2170 pickle.dump(jobs_to_collect,f) 2171 # next step is event generation (mint_step 2) 2172 jobs_to_run_new,jobs_to_collect_new= \ 2173 self.check_the_need_to_split(jobs_to_run_new,jobs_to_collect) 2174 self.prepare_directories(jobs_to_run_new,mode,fixed_order) 2175 self.write_nevents_unweighted_file(jobs_to_collect_new,jobs_to_collect) 2176 self.write_nevts_files(jobs_to_run_new) 2177 else: 2178 if fixed_order and self.run_card['iappl'] == 0 \ 2179 and self.run_card['req_acc_FO'] > 0: 2180 jobs_to_run_new,jobs_to_collect= \ 2181 self.split_jobs_fixed_order(jobs_to_run_new,jobs_to_collect) 2182 self.prepare_directories(jobs_to_run_new,mode,fixed_order) 2183 jobs_to_collect_new=jobs_to_collect 2184 return jobs_to_run_new,jobs_to_collect_new
2185 2186
2187 - def write_nevents_unweighted_file(self,jobs,jobs0events):
2188 """writes the nevents_unweighted file in the SubProcesses directory. 2189 We also need to write the jobs that will generate 0 events, 2190 because that makes sure that the cross section from those channels 2191 is taken into account in the event weights (by collect_events.f). 2192 """ 2193 content=[] 2194 for job in jobs: 2195 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1]) 2196 lhefile=pjoin(path,'events.lhe') 2197 content.append(' %s %d %9e %9e' % \ 2198 (lhefile.ljust(40),job['nevents'],job['resultABS']*job['wgt_frac'],job['wgt_frac'])) 2199 for job in jobs0events: 2200 if job['nevents']==0: 2201 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1]) 2202 lhefile=pjoin(path,'events.lhe') 2203 content.append(' %s %d %9e %9e' % \ 2204 (lhefile.ljust(40),job['nevents'],job['resultABS'],1.)) 2205 with open(pjoin(self.me_dir,'SubProcesses',"nevents_unweighted"),'w') as f: 2206 f.write('\n'.join(content)+'\n')
2207
2208 - def write_nevts_files(self,jobs):
2209 """write the nevts files in the SubProcesses/P*/G*/ directories""" 2210 for job in jobs: 2211 with open(pjoin(job['dirname'],'nevts'),'w') as f: 2212 if self.run_card['event_norm'].lower()=='bias': 2213 f.write('%i %f\n' % (job['nevents'],self.cross_sect_dict['xseca'])) 2214 else: 2215 f.write('%i\n' % job['nevents'])
2216
2217 - def combine_split_order_run(self,jobs_to_run):
2218 """Combines jobs and grids from split jobs that have been run""" 2219 # combine the jobs that need to be combined in job 2220 # groups. Simply combine the ones that have the same p_dir and 2221 # same channel. 2222 jobgroups_to_combine=[] 2223 jobs_to_run_new=[] 2224 for job in jobs_to_run: 2225 if job['split'] == 0: 2226 job['combined']=1 2227 jobs_to_run_new.append(job) # this jobs wasn't split 2228 elif job['split'] == 1: 2229 jobgroups_to_combine.append(filter(lambda j: j['p_dir'] == job['p_dir'] and \ 2230 j['channel'] == job['channel'], jobs_to_run)) 2231 else: 2232 continue 2233 for job_group in jobgroups_to_combine: 2234 # Combine the grids (mint-grids & MC-integer grids) first 2235 self.combine_split_order_grids(job_group) 2236 jobs_to_run_new.append(self.combine_split_order_jobs(job_group)) 2237 return jobs_to_run_new
2238
2239 - def combine_split_order_jobs(self,job_group):
2240 """combine the jobs in job_group and return a single summed job""" 2241 # first copy one of the jobs in 'jobs' 2242 sum_job=copy.copy(job_group[0]) 2243 # update the information to have a 'non-split' job: 2244 sum_job['dirname']=pjoin(sum_job['dirname'].rsplit('_',1)[0]) 2245 sum_job['split']=0 2246 sum_job['wgt_mult']=1.0 2247 sum_job['combined']=len(job_group) 2248 # information to be summed: 2249 keys=['niters_done','npoints_done','niters','npoints',\ 2250 'result','resultABS','time_spend'] 2251 keys2=['error','errorABS'] 2252 # information to be summed in quadrature: 2253 for key in keys2: 2254 sum_job[key]=math.pow(sum_job[key],2) 2255 # Loop over the jobs and sum the information 2256 for i,job in enumerate(job_group): 2257 if i==0 : continue # skip the first 2258 for key in keys: 2259 sum_job[key]+=job[key] 2260 for key in keys2: 2261 sum_job[key]+=math.pow(job[key],2) 2262 for key in keys2: 2263 sum_job[key]=math.sqrt(sum_job[key]) 2264 sum_job['err_percABS'] = sum_job['errorABS']/sum_job['resultABS']*100. 2265 sum_job['err_perc'] = sum_job['error']/sum_job['result']*100. 2266 sum_job['niters']=int(sum_job['niters_done']/len(job_group)) 2267 sum_job['niters_done']=int(sum_job['niters_done']/len(job_group)) 2268 return sum_job
2269 2270
2271 - def combine_split_order_grids(self,job_group):
2272 """Combines the mint_grids and MC-integer grids from the split order 2273 jobs (fixed order only). 2274 """ 2275 files_mint_grids=[] 2276 files_MC_integer=[] 2277 location=None 2278 for job in job_group: 2279 files_mint_grids.append(pjoin(job['dirname'],'mint_grids')) 2280 files_MC_integer.append(pjoin(job['dirname'],'grid.MC_integer')) 2281 if not location: 2282 location=pjoin(job['dirname'].rsplit('_',1)[0]) 2283 else: 2284 if location != pjoin(job['dirname'].rsplit('_',1)[0]) : 2285 raise aMCatNLOError('Not all jobs have the same location. '\ 2286 +'Cannot combine them.') 2287 # Needed to average the grids (both xgrids, ave_virt and 2288 # MC_integer grids), but sum the cross section info. The 2289 # latter is only the only line that contains integers. 2290 for j,fs in enumerate([files_mint_grids,files_MC_integer]): 2291 linesoffiles=[] 2292 for f in fs: 2293 with open(f,'r+') as fi: 2294 linesoffiles.append(fi.readlines()) 2295 to_write=[] 2296 for rowgrp in zip(*linesoffiles): 2297 try: 2298 # check that last element on the line is an 2299 # integer (will raise ValueError if not the 2300 # case). If integer, this is the line that 2301 # contains information that needs to be 2302 # summed. All other lines can be averaged. 2303 is_integer = [[int(row.strip().split()[-1])] for row in rowgrp] 2304 floatsbyfile = [[float(a) for a in row.strip().split()] for row in rowgrp] 2305 floatgrps = zip(*floatsbyfile) 2306 special=[] 2307 for i,floatgrp in enumerate(floatgrps): 2308 if i==0: # sum X-sec 2309 special.append(sum(floatgrp)) 2310 elif i==1: # sum unc in quadrature 2311 special.append(math.sqrt(sum([err**2 for err in floatgrp]))) 2312 elif i==2: # average number of PS per iteration 2313 special.append(int(sum(floatgrp)/len(floatgrp))) 2314 elif i==3: # sum the number of iterations 2315 special.append(int(sum(floatgrp))) 2316 elif i==4: # average the nhits_in_grids 2317 special.append(int(sum(floatgrp)/len(floatgrp))) 2318 else: 2319 raise aMCatNLOError('"mint_grids" files not in correct format. '+\ 2320 'Cannot combine them.') 2321 to_write.append(" ".join(str(s) for s in special) + "\n") 2322 except ValueError: 2323 # just average all 2324 floatsbyfile = [[float(a) for a in row.strip().split()] for row in rowgrp] 2325 floatgrps = zip(*floatsbyfile) 2326 averages = [sum(floatgrp)/len(floatgrp) for floatgrp in floatgrps] 2327 to_write.append(" ".join(str(a) for a in averages) + "\n") 2328 # write the data over the master location 2329 if j==0: 2330 with open(pjoin(location,'mint_grids'),'w') as f: 2331 f.writelines(to_write) 2332 elif j==1: 2333 with open(pjoin(location,'grid.MC_integer'),'w') as f: 2334 f.writelines(to_write)
2335 2336
2337 - def split_jobs_fixed_order(self,jobs_to_run,jobs_to_collect):
2338 """Looks in the jobs_to_run to see if there is the need to split the 2339 jobs, depending on the expected time they take. Updates 2340 jobs_to_run and jobs_to_collect to replace the split-job by 2341 its splits. 2342 """ 2343 # determine the number jobs we should have (this is per p_dir) 2344 if self.options['run_mode'] ==2: 2345 nb_submit = int(self.options['nb_core']) 2346 elif self.options['run_mode'] ==1: 2347 nb_submit = int(self.options['cluster_size']) 2348 else: 2349 nb_submit =1 2350 # total expected aggregated running time 2351 time_expected=0 2352 for job in jobs_to_run: 2353 time_expected+=job['time_spend']*(job['niters']*job['npoints'])/ \ 2354 (job['niters_done']*job['npoints_done']) 2355 # this means that we must expect the following per job (in 2356 # ideal conditions) 2357 time_per_job=time_expected/(nb_submit*(1+len(jobs_to_run)/2)) 2358 jobs_to_run_new=[] 2359 jobs_to_collect_new=copy.copy(jobs_to_collect) 2360 for job in jobs_to_run: 2361 # remove current job from jobs_to_collect. Make sure 2362 # to remove all the split ones in case the original 2363 # job had been a split one (before it was re-combined) 2364 for j in filter(lambda j: j['p_dir'] == job['p_dir'] and \ 2365 j['channel'] == job['channel'], jobs_to_collect_new): 2366 jobs_to_collect_new.remove(j) 2367 time_expected=job['time_spend']*(job['niters']*job['npoints'])/ \ 2368 (job['niters_done']*job['npoints_done']) 2369 # if the time expected for this job is (much) larger than 2370 # the time spend in the previous iteration, and larger 2371 # than the expected time per job, split it 2372 if time_expected > max(2*job['time_spend']/job['combined'],time_per_job): 2373 # determine the number of splits needed 2374 nsplit=min(max(int(time_expected/max(2*job['time_spend']/job['combined'],time_per_job)),2),nb_submit) 2375 for i in range(1,nsplit+1): 2376 job_new=copy.copy(job) 2377 job_new['split']=i 2378 job_new['wgt_mult']=1./float(nsplit) 2379 job_new['dirname']=job['dirname']+'_%i' % job_new['split'] 2380 job_new['accuracy']=min(job['accuracy']*math.sqrt(float(nsplit)),0.1) 2381 if nsplit >= job['niters']: 2382 job_new['npoints']=int(job['npoints']*job['niters']/nsplit) 2383 job_new['niters']=1 2384 else: 2385 job_new['npoints']=int(job['npoints']/nsplit) 2386 jobs_to_collect_new.append(job_new) 2387 jobs_to_run_new.append(job_new) 2388 else: 2389 jobs_to_collect_new.append(job) 2390 jobs_to_run_new.append(job) 2391 return jobs_to_run_new,jobs_to_collect_new
2392 2393
2394 - def check_the_need_to_split(self,jobs_to_run,jobs_to_collect):
2395 """Looks in the jobs_to_run to see if there is the need to split the 2396 event generation step. Updates jobs_to_run and 2397 jobs_to_collect to replace the split-job by its 2398 splits. Also removes jobs that do not need any events. 2399 """ 2400 nevt_job=self.run_card['nevt_job'] 2401 if nevt_job > 0: 2402 jobs_to_collect_new=copy.copy(jobs_to_collect) 2403 for job in jobs_to_run: 2404 nevents=job['nevents'] 2405 if nevents == 0: 2406 jobs_to_collect_new.remove(job) 2407 elif nevents > nevt_job: 2408 jobs_to_collect_new.remove(job) 2409 if nevents % nevt_job != 0 : 2410 nsplit=int(nevents/nevt_job)+1 2411 else: 2412 nsplit=int(nevents/nevt_job) 2413 for i in range(1,nsplit+1): 2414 job_new=copy.copy(job) 2415 left_over=nevents % nsplit 2416 if i <= left_over: 2417 job_new['nevents']=int(nevents/nsplit)+1 2418 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents) 2419 else: 2420 job_new['nevents']=int(nevents/nsplit) 2421 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents) 2422 job_new['split']=i 2423 job_new['dirname']=job['dirname']+'_%i' % job_new['split'] 2424 jobs_to_collect_new.append(job_new) 2425 jobs_to_run_new=copy.copy(jobs_to_collect_new) 2426 else: 2427 jobs_to_run_new=copy.copy(jobs_to_collect) 2428 for job in jobs_to_collect: 2429 if job['nevents'] == 0: 2430 jobs_to_run_new.remove(job) 2431 jobs_to_collect_new=copy.copy(jobs_to_run_new) 2432 2433 return jobs_to_run_new,jobs_to_collect_new
2434 2435
2436 - def update_jobs_to_run(self,req_acc,step,jobs,fixed_order=True):
2437 """ 2438 For (N)LO+PS: determines the number of events and/or the required 2439 accuracy per job. 2440 For fixed order: determines which jobs need higher precision and 2441 returns those with the newly requested precision. 2442 """ 2443 err=self.cross_sect_dict['errt'] 2444 tot=self.cross_sect_dict['xsect'] 2445 errABS=self.cross_sect_dict['erra'] 2446 totABS=self.cross_sect_dict['xseca'] 2447 jobs_new=[] 2448 if fixed_order: 2449 if req_acc == -1: 2450 if step+1 == 1: 2451 npoints = self.run_card['npoints_FO'] 2452 niters = self.run_card['niters_FO'] 2453 for job in jobs: 2454 job['mint_mode']=-1 2455 job['niters']=niters 2456 job['npoints']=npoints 2457 jobs_new.append(job) 2458 elif step+1 == 2: 2459 pass 2460 elif step+1 > 2: 2461 raise aMCatNLOError('Cannot determine number of iterations and PS points '+ 2462 'for integration step %i' % step ) 2463 elif ( req_acc > 0 and err/abs(tot) > req_acc*1.2 ) or step <= 0: 2464 req_accABS=req_acc*abs(tot)/totABS # overal relative required accuracy on ABS Xsec. 2465 for job in jobs: 2466 job['mint_mode']=-1 2467 # Determine relative required accuracy on the ABS for this job 2468 job['accuracy']=req_accABS*math.sqrt(totABS/job['resultABS']) 2469 # If already accurate enough, skip the job (except when doing the first 2470 # step for the iappl=2 run: we need to fill all the applgrid grids!) 2471 if (job['accuracy'] > job['errorABS']/job['resultABS'] and step != 0) \ 2472 and not (step==-1 and self.run_card['iappl'] == 2): 2473 continue 2474 # Update the number of PS points based on errorABS, ncall and accuracy 2475 itmax_fl=job['niters_done']*math.pow(job['errorABS']/ 2476 (job['accuracy']*job['resultABS']),2) 2477 if itmax_fl <= 4.0 : 2478 job['niters']=max(int(round(itmax_fl)),2) 2479 job['npoints']=job['npoints_done']*2 2480 elif itmax_fl > 4.0 and itmax_fl <= 16.0 : 2481 job['niters']=4 2482 job['npoints']=int(round(job['npoints_done']*itmax_fl/4.0))*2 2483 else: 2484 if itmax_fl > 100.0 : itmax_fl=50.0 2485 job['niters']=int(round(math.sqrt(itmax_fl))) 2486 job['npoints']=int(round(job['npoints_done']*itmax_fl/ 2487 round(math.sqrt(itmax_fl))))*2 2488 # Add the job to the list of jobs that need to be run 2489 jobs_new.append(job) 2490 return jobs_new 2491 elif step+1 <= 2: 2492 nevents=self.run_card['nevents'] 2493 # Total required accuracy for the upper bounding envelope 2494 if req_acc<0: 2495 req_acc2_inv=nevents 2496 else: 2497 req_acc2_inv=1/(req_acc*req_acc) 2498 if step+1 == 1 or step+1 == 2 : 2499 # determine the req. accuracy for each of the jobs for Mint-step = 1 2500 for job in jobs: 2501 accuracy=min(math.sqrt(totABS/(req_acc2_inv*job['resultABS'])),0.2) 2502 job['accuracy']=accuracy 2503 if step+1 == 2: 2504 # Randomly (based on the relative ABS Xsec of the job) determine the 2505 # number of events each job needs to generate for MINT-step = 2. 2506 r=self.get_randinit_seed() 2507 random.seed(r) 2508 totevts=nevents 2509 for job in jobs: 2510 job['nevents'] = 0 2511 while totevts : 2512 target = random.random() * totABS 2513 crosssum = 0. 2514 i = 0 2515 while i<len(jobs) and crosssum < target: 2516 job = jobs[i] 2517 crosssum += job['resultABS'] 2518 i += 1 2519 totevts -= 1 2520 i -= 1 2521 jobs[i]['nevents'] += 1 2522 for job in jobs: 2523 job['mint_mode']=step+1 # next step 2524 return jobs 2525 else: 2526 return []
2527 2528
2529 - def get_randinit_seed(self):
2530 """ Get the random number seed from the randinit file """ 2531 with open(pjoin(self.me_dir,"SubProcesses","randinit")) as randinit: 2532 # format of the file is "r=%d". 2533 iseed = int(randinit.read()[2:]) 2534 return iseed
2535 2536
2537 - def append_the_results(self,jobs,integration_step):
2538 """Appends the results for each of the jobs in the job list""" 2539 error_found=False 2540 for job in jobs: 2541 try: 2542 if integration_step >= 0 : 2543 with open(pjoin(job['dirname'],'res_%s.dat' % integration_step)) as res_file: 2544 results=res_file.readline().split() 2545 else: 2546 # should only be here when doing fixed order with the 'only_generation' 2547 # option equal to True. Take the results from the final run done. 2548 with open(pjoin(job['dirname'],'res.dat')) as res_file: 2549 results=res_file.readline().split() 2550 except IOError: 2551 if not error_found: 2552 error_found=True 2553 error_log=[] 2554 error_log.append(pjoin(job['dirname'],'log.txt')) 2555 continue 2556 job['resultABS']=float(results[0]) 2557 job['errorABS']=float(results[1]) 2558 job['result']=float(results[2]) 2559 job['error']=float(results[3]) 2560 job['niters_done']=int(results[4]) 2561 job['npoints_done']=int(results[5]) 2562 job['time_spend']=float(results[6]) 2563 job['err_percABS'] = job['errorABS']/job['resultABS']*100. 2564 job['err_perc'] = job['error']/job['result']*100. 2565 if error_found: 2566 raise aMCatNLOError('An error occurred during the collection of results.\n' + 2567 'Please check the .log files inside the directories which failed:\n' + 2568 '\n'.join(error_log)+'\n')
2569 2570 2571
2572 - def write_res_txt_file(self,jobs,integration_step):
2573 """writes the res.txt files in the SubProcess dir""" 2574 jobs.sort(key = lambda job: -job['errorABS']) 2575 content=[] 2576 content.append('\n\nCross section per integration channel:') 2577 for job in jobs: 2578 content.append('%(p_dir)20s %(channel)15s %(result)10.8e %(error)6.4e %(err_perc)6.4f%% ' % job) 2579 content.append('\n\nABS cross section per integration channel:') 2580 for job in jobs: 2581 content.append('%(p_dir)20s %(channel)15s %(resultABS)10.8e %(errorABS)6.4e %(err_percABS)6.4f%% ' % job) 2582 totABS=0 2583 errABS=0 2584 tot=0 2585 err=0 2586 for job in jobs: 2587 totABS+= job['resultABS']*job['wgt_frac'] 2588 errABS+= math.pow(job['errorABS'],2)*job['wgt_frac'] 2589 tot+= job['result']*job['wgt_frac'] 2590 err+= math.pow(job['error'],2)*job['wgt_frac'] 2591 if jobs: 2592 content.append('\nTotal ABS and \nTotal: \n %10.8e +- %6.4e (%6.4e%%)\n %10.8e +- %6.4e (%6.4e%%) \n' %\ 2593 (totABS, math.sqrt(errABS), math.sqrt(errABS)/totABS *100.,\ 2594 tot, math.sqrt(err), math.sqrt(err)/tot *100.)) 2595 with open(pjoin(self.me_dir,'SubProcesses','res_%s.txt' % integration_step),'w') as res_file: 2596 res_file.write('\n'.join(content)) 2597 randinit=self.get_randinit_seed() 2598 return {'xsect':tot,'xseca':totABS,'errt':math.sqrt(err),\ 2599 'erra':math.sqrt(errABS),'randinit':randinit}
2600 2601
2602 - def collect_scale_pdf_info(self,options,jobs):
2603 """read the scale_pdf_dependence.dat files and collects there results""" 2604 scale_pdf_info=[] 2605 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \ 2606 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1: 2607 evt_files=[] 2608 evt_wghts=[] 2609 for job in jobs: 2610 evt_files.append(pjoin(job['dirname'],'scale_pdf_dependence.dat')) 2611 evt_wghts.append(job['wgt_frac']) 2612 scale_pdf_info = self.pdf_scale_from_reweighting(evt_files,evt_wghts) 2613 return scale_pdf_info
2614 2615
2616 - def combine_plots_FO(self,folder_name,jobs):
2617 """combines the plots and puts then in the Events/run* directory""" 2618 devnull = open(os.devnull, 'w') 2619 2620 if self.analyse_card['fo_analysis_format'].lower() == 'topdrawer': 2621 topfiles = [] 2622 for job in jobs: 2623 if job['dirname'].endswith('.top'): 2624 topfiles.append(job['dirname']) 2625 else: 2626 topfiles.append(pjoin(job['dirname'],'MADatNLO.top')) 2627 misc.call(['./combine_plots_FO.sh'] + topfiles, \ 2628 stdout=devnull, 2629 cwd=pjoin(self.me_dir, 'SubProcesses')) 2630 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.top'), 2631 pjoin(self.me_dir, 'Events', self.run_name)) 2632 logger.info('The results of this run and the TopDrawer file with the plots' + \ 2633 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2634 elif self.analyse_card['fo_analysis_format'].lower() == 'hwu': 2635 out=pjoin(self.me_dir,'Events',self.run_name,'MADatNLO') 2636 self.combine_plots_HwU(jobs,out) 2637 try: 2638 misc.call(['gnuplot','MADatNLO.gnuplot'],\ 2639 stdout=devnull,stderr=devnull,\ 2640 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 2641 except Exception: 2642 pass 2643 logger.info('The results of this run and the HwU and GnuPlot files with the plots' + \ 2644 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2645 elif self.analyse_card['fo_analysis_format'].lower() == 'root': 2646 rootfiles = [] 2647 for job in jobs: 2648 if job['dirname'].endswith('.root'): 2649 rootfiles.append(job['dirname']) 2650 else: 2651 rootfiles.append(pjoin(job['dirname'],'MADatNLO.root')) 2652 misc.call(['./combine_root.sh'] + folder_name + rootfiles, \ 2653 stdout=devnull, 2654 cwd=pjoin(self.me_dir, 'SubProcesses')) 2655 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.root'), 2656 pjoin(self.me_dir, 'Events', self.run_name)) 2657 logger.info('The results of this run and the ROOT file with the plots' + \ 2658 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2659 elif self.analyse_card['fo_analysis_format'].lower() == 'lhe': 2660 self.combine_FO_lhe(jobs) 2661 logger.info('The results of this run and the LHE File (to be used for plotting only)' + \ 2662 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2663 else: 2664 logger.info('The results of this run' + \ 2665 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2666
2667 - def combine_FO_lhe(self,jobs):
2668 """combine the various lhe file generated in each directory. 2669 They are two steps: 2670 1) banner 2671 2) reweight each sample by the factor written at the end of each file 2672 3) concatenate each of the new files (gzip those). 2673 """ 2674 2675 logger.info('Combining lhe events for plotting analysis') 2676 start = time.time() 2677 self.run_card['fo_lhe_postprocessing'] = [i.lower() for i in self.run_card['fo_lhe_postprocessing']] 2678 output = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz') 2679 if os.path.exists(output): 2680 os.remove(output) 2681 2682 2683 2684 2685 # 1. write the banner 2686 text = open(pjoin(jobs[0]['dirname'],'header.txt'),'r').read() 2687 i1, i2 = text.find('<initrwgt>'),text.find('</initrwgt>') 2688 self.banner['initrwgt'] = text[10+i1:i2] 2689 # 2690 # <init> 2691 # 2212 2212 6.500000e+03 6.500000e+03 0 0 247000 247000 -4 1 2692 # 8.430000e+02 2.132160e+00 8.430000e+02 1 2693 # <generator name='MadGraph5_aMC@NLO' version='2.5.2'>please cite 1405.0301 </generator> 2694 # </init> 2695 2696 cross = sum(j['result'] for j in jobs) 2697 error = math.sqrt(sum(j['error'] for j in jobs)) 2698 self.banner['init'] = "0 0 0e0 0e0 0 0 0 0 -4 1\n %s %s %s 1" % (cross, error, cross) 2699 self.banner.write(output[:-3], close_tag=False) 2700 misc.gzip(output[:-3]) 2701 2702 2703 2704 fsock = lhe_parser.EventFile(output,'a') 2705 if 'nogrouping' in self.run_card['fo_lhe_postprocessing']: 2706 fsock.eventgroup = False 2707 else: 2708 fsock.eventgroup = True 2709 2710 if 'norandom' in self.run_card['fo_lhe_postprocessing']: 2711 for job in jobs: 2712 dirname = job['dirname'] 2713 #read last line 2714 lastline = misc.BackRead(pjoin(dirname,'events.lhe')).readline() 2715 nb_event, sumwgt, cross = [float(i) for i in lastline.split()] 2716 # get normalisation ratio 2717 ratio = cross/sumwgt 2718 lhe = lhe_parser.EventFile(pjoin(dirname,'events.lhe')) 2719 lhe.eventgroup = True # read the events by eventgroup 2720 for eventsgroup in lhe: 2721 neweventsgroup = [] 2722 for i,event in enumerate(eventsgroup): 2723 event.rescale_weights(ratio) 2724 if i>0 and 'noidentification' not in self.run_card['fo_lhe_postprocessing'] \ 2725 and event == neweventsgroup[-1]: 2726 neweventsgroup[-1].wgt += event.wgt 2727 for key in event.reweight_data: 2728 neweventsgroup[-1].reweight_data[key] += event.reweight_data[key] 2729 else: 2730 neweventsgroup.append(event) 2731 fsock.write_events(neweventsgroup) 2732 lhe.close() 2733 os.remove(pjoin(dirname,'events.lhe')) 2734 else: 2735 lhe = [] 2736 lenlhe = [] 2737 misc.sprint('need to combine %s event file' % len(jobs)) 2738 globallhe = lhe_parser.MultiEventFile() 2739 globallhe.eventgroup = True 2740 for job in jobs: 2741 dirname = job['dirname'] 2742 lastline = misc.BackRead(pjoin(dirname,'events.lhe')).readline() 2743 nb_event, sumwgt, cross = [float(i) for i in lastline.split()] 2744 lastlhe = globallhe.add(pjoin(dirname,'events.lhe'),cross, 0, cross, 2745 nb_event=int(nb_event), scale=cross/sumwgt) 2746 for eventsgroup in globallhe: 2747 neweventsgroup = [] 2748 for i,event in enumerate(eventsgroup): 2749 event.rescale_weights(event.sample_scale) 2750 if i>0 and 'noidentification' not in self.run_card['fo_lhe_postprocessing'] \ 2751 and event == neweventsgroup[-1]: 2752 neweventsgroup[-1].wgt += event.wgt 2753 for key in event.reweight_data: 2754 neweventsgroup[-1].reweight_data[key] += event.reweight_data[key] 2755 else: 2756 neweventsgroup.append(event) 2757 fsock.write_events(neweventsgroup) 2758 globallhe.close() 2759 fsock.write('</LesHouchesEvents>\n') 2760 fsock.close() 2761 misc.sprint('combining lhe file done in ', time.time()-start) 2762 for job in jobs: 2763 dirname = job['dirname'] 2764 os.remove(pjoin(dirname,'events.lhe')) 2765 2766 2767 2768 misc.sprint('combining lhe file done in ', time.time()-start)
2769 2770 2771 2772 2773 2774
2775 - def combine_plots_HwU(self,jobs,out,normalisation=None):
2776 """Sums all the plots in the HwU format.""" 2777 logger.debug('Combining HwU plots.') 2778 2779 command = [] 2780 command.append(pjoin(self.me_dir, 'bin', 'internal','histograms.py')) 2781 for job in jobs: 2782 if job['dirname'].endswith('.HwU'): 2783 command.append(job['dirname']) 2784 else: 2785 command.append(pjoin(job['dirname'],'MADatNLO.HwU')) 2786 command.append("--out="+out) 2787 command.append("--gnuplot") 2788 command.append("--band=[]") 2789 command.append("--lhapdf-config="+self.options['lhapdf']) 2790 if normalisation: 2791 command.append("--multiply="+(','.join([str(n) for n in normalisation]))) 2792 command.append("--sum") 2793 command.append("--keep_all_weights") 2794 command.append("--no_open") 2795 2796 p = misc.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, cwd=self.me_dir) 2797 2798 while p.poll() is None: 2799 line = p.stdout.readline() 2800 if any(t in line for t in ['INFO:','WARNING:','CRITICAL:','ERROR:','KEEP:']): 2801 print line[:-1] 2802 elif __debug__ and line: 2803 logger.debug(line[:-1])
2804 2805
2806 - def applgrid_combine(self,cross,error,jobs):
2807 """Combines the APPLgrids in all the SubProcess/P*/all_G*/ directories""" 2808 logger.debug('Combining APPLgrids \n') 2809 applcomb=pjoin(self.options['applgrid'].rstrip('applgrid-config'), 2810 'applgrid-combine') 2811 all_jobs=[] 2812 for job in jobs: 2813 all_jobs.append(job['dirname']) 2814 ngrids=len(all_jobs) 2815 nobs =len([name for name in os.listdir(all_jobs[0]) if name.endswith("_out.root")]) 2816 for obs in range(0,nobs): 2817 gdir = [pjoin(job,"grid_obs_"+str(obs)+"_out.root") for job in all_jobs] 2818 # combine APPLgrids from different channels for observable 'obs' 2819 if self.run_card["iappl"] == 1: 2820 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name, 2821 "aMCfast_obs_"+str(obs)+"_starting_grid.root"), '--optimise']+ gdir) 2822 elif self.run_card["iappl"] == 2: 2823 unc2_inv=pow(cross/error,2) 2824 unc2_inv_ngrids=pow(cross/error,2)*ngrids 2825 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events", 2826 self.run_name,"aMCfast_obs_"+str(obs)+".root"),'-s', 2827 str(unc2_inv),'--weight',str(unc2_inv)]+ gdir) 2828 for job in all_jobs: 2829 os.remove(pjoin(job,"grid_obs_"+str(obs)+"_in.root")) 2830 else: 2831 raise aMCatNLOError('iappl parameter can only be 0, 1 or 2') 2832 # after combining, delete the original grids 2833 for ggdir in gdir: 2834 os.remove(ggdir)
2835 2836
2837 - def applgrid_distribute(self,options,mode,p_dirs):
2838 """Distributes the APPLgrids ready to be filled by a second run of the code""" 2839 # if no appl_start_grid argument given, guess it from the time stamps 2840 # of the starting grid files 2841 if not('appl_start_grid' in options.keys() and options['appl_start_grid']): 2842 gfiles = misc.glob(pjoin('*', 'aMCfast_obs_0_starting_grid.root'), 2843 pjoin(self.me_dir,'Events')) 2844 2845 time_stamps={} 2846 for root_file in gfiles: 2847 time_stamps[root_file]=os.path.getmtime(root_file) 2848 options['appl_start_grid']= \ 2849 max(time_stamps.iterkeys(), key=(lambda key: 2850 time_stamps[key])).split('/')[-2] 2851 logger.info('No --appl_start_grid option given. '+\ 2852 'Guessing that start grid from run "%s" should be used.' \ 2853 % options['appl_start_grid']) 2854 2855 if 'appl_start_grid' in options.keys() and options['appl_start_grid']: 2856 self.appl_start_grid = options['appl_start_grid'] 2857 start_grid_dir=pjoin(self.me_dir, 'Events', self.appl_start_grid) 2858 # check that this dir exists and at least one grid file is there 2859 if not os.path.exists(pjoin(start_grid_dir, 2860 'aMCfast_obs_0_starting_grid.root')): 2861 raise self.InvalidCmd('APPLgrid file not found: %s' % \ 2862 pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root')) 2863 else: 2864 all_grids=[pjoin(start_grid_dir,name) for name in os.listdir( \ 2865 start_grid_dir) if name.endswith("_starting_grid.root")] 2866 nobs =len(all_grids) 2867 gstring=" ".join(all_grids) 2868 if not hasattr(self, 'appl_start_grid') or not self.appl_start_grid: 2869 raise self.InvalidCmd('No APPLgrid name currently defined.'+ 2870 'Please provide this information.') 2871 #copy the grid to all relevant directories 2872 for pdir in p_dirs: 2873 g_dirs = [file for file in os.listdir(pjoin(self.me_dir, 2874 "SubProcesses",pdir)) if file.startswith(mode+'_G') and 2875 os.path.isdir(pjoin(self.me_dir,"SubProcesses",pdir, file))] 2876 for g_dir in g_dirs: 2877 for grid in all_grids: 2878 obs=grid.split('_')[-3] 2879 files.cp(grid,pjoin(self.me_dir,"SubProcesses",pdir,g_dir, 2880 'grid_obs_'+obs+'_in.root'))
2881 2882 2883 2884
2885 - def collect_log_files(self, jobs, integration_step):
2886 """collect the log files and put them in a single, html-friendly file 2887 inside the Events/run_.../ directory""" 2888 log_file = pjoin(self.me_dir, 'Events', self.run_name, 2889 'alllogs_%d.html' % integration_step) 2890 outfile = open(log_file, 'w') 2891 2892 content = '' 2893 content += '<HTML><BODY>\n<font face="courier" size=2>' 2894 for job in jobs: 2895 # put an anchor 2896 log=pjoin(job['dirname'],'log_MINT%s.txt' % integration_step) 2897 content += '<a name=%s></a>\n' % (os.path.dirname(log).replace( 2898 pjoin(self.me_dir,'SubProcesses'),'')) 2899 # and put some nice header 2900 content += '<font color="red">\n' 2901 content += '<br>LOG file for integration channel %s, %s <br>' % \ 2902 (os.path.dirname(log).replace(pjoin(self.me_dir, 2903 'SubProcesses'), ''), 2904 integration_step) 2905 content += '</font>\n' 2906 #then just flush the content of the small log inside the big log 2907 #the PRE tag prints everything verbatim 2908 with open(log) as l: 2909 content += '<PRE>\n' + l.read() + '\n</PRE>' 2910 content +='<br>\n' 2911 outfile.write(content) 2912 content='' 2913 2914 outfile.write('</font>\n</BODY></HTML>\n') 2915 outfile.close()
2916 2917
2918 - def finalise_run_FO(self,folder_name,jobs):
2919 """Combine the plots and put the res*.txt files in the Events/run.../ folder.""" 2920 # Copy the res_*.txt files to the Events/run* folder 2921 res_files = misc.glob('res_*.txt', pjoin(self.me_dir, 'SubProcesses')) 2922 for res_file in res_files: 2923 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name)) 2924 # Collect the plots and put them in the Events/run* folder 2925 self.combine_plots_FO(folder_name,jobs) 2926 # If doing the applgrid-stuff, also combine those grids 2927 # and put those in the Events/run* folder 2928 if self.run_card['iappl'] != 0: 2929 cross=self.cross_sect_dict['xsect'] 2930 error=self.cross_sect_dict['errt'] 2931 self.applgrid_combine(cross,error,jobs)
2932 2933
2934 - def setup_cluster_or_multicore(self):
2935 """setup the number of cores for multicore, and the cluster-type for cluster runs""" 2936 if self.cluster_mode == 1: 2937 cluster_name = self.options['cluster_type'] 2938 try: 2939 self.cluster = cluster.from_name[cluster_name](**self.options) 2940 except KeyError: 2941 # Check if a plugin define this type of cluster 2942 # check for PLUGIN format 2943 cluster_class = misc.from_plugin_import(self.plugin_path, 2944 'new_cluster', cluster_name, 2945 info = 'cluster handling will be done with PLUGIN: %{plug}s' ) 2946 if cluster_class: 2947 self.cluster = cluster_class(**self.options) 2948 2949 if self.cluster_mode == 2: 2950 try: 2951 import multiprocessing 2952 if not self.nb_core: 2953 try: 2954 self.nb_core = int(self.options['nb_core']) 2955 except TypeError: 2956 self.nb_core = multiprocessing.cpu_count() 2957 logger.info('Using %d cores' % self.nb_core) 2958 except ImportError: 2959 self.nb_core = 1 2960 logger.warning('Impossible to detect the number of cores => Using One.\n'+ 2961 'Use set nb_core X in order to set this number and be able to'+ 2962 'run in multicore.') 2963 2964 self.cluster = cluster.MultiCore(**self.options)
2965 2966
2967 - def clean_previous_results(self,options,p_dirs,folder_name):
2968 """Clean previous results. 2969 o. If doing only the reweighting step, do not delete anything and return directlty. 2970 o. Always remove all the G*_* files (from split event generation). 2971 o. Remove the G* (or born_G* or all_G*) only when NOT doing only_generation or reweight_only.""" 2972 if options['reweightonly']: 2973 return 2974 if not options['only_generation']: 2975 self.update_status('Cleaning previous results', level=None) 2976 for dir in p_dirs: 2977 #find old folders to be removed 2978 for obj in folder_name: 2979 # list all the G* (or all_G* or born_G*) directories 2980 to_rm = [file for file in \ 2981 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 2982 if file.startswith(obj[:-1]) and \ 2983 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 2984 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 2985 # list all the G*_* directories (from split event generation) 2986 to_always_rm = [file for file in \ 2987 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 2988 if file.startswith(obj[:-1]) and 2989 '_' in file and not '_G' in file and \ 2990 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 2991 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 2992 2993 if not options['only_generation']: 2994 to_always_rm.extend(to_rm) 2995 if os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')): 2996 to_always_rm.append(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')) 2997 files.rm([pjoin(self.me_dir, 'SubProcesses', dir, d) for d in to_always_rm]) 2998 return
2999 3000
3001 - def print_summary(self, options, step, mode, scale_pdf_info=[], done=True):
3002 """print a summary of the results contained in self.cross_sect_dict. 3003 step corresponds to the mintMC step, if =2 (i.e. after event generation) 3004 some additional infos are printed""" 3005 # find process name 3006 proc_card_lines = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read().split('\n') 3007 process = '' 3008 for line in proc_card_lines: 3009 if line.startswith('generate') or line.startswith('add process'): 3010 process = process+(line.replace('generate ', '')).replace('add process ','')+' ; ' 3011 lpp = {0:'l', 1:'p', -1:'pbar'} 3012 if self.ninitial == 1: 3013 proc_info = '\n Process %s' % process[:-3] 3014 else: 3015 proc_info = '\n Process %s\n Run at %s-%s collider (%s + %s GeV)' % \ 3016 (process[:-3], lpp[self.run_card['lpp1']], lpp[self.run_card['lpp2']], 3017 self.run_card['ebeam1'], self.run_card['ebeam2']) 3018 3019 if self.ninitial == 1: 3020 self.cross_sect_dict['unit']='GeV' 3021 self.cross_sect_dict['xsec_string']='(Partial) decay width' 3022 self.cross_sect_dict['axsec_string']='(Partial) abs(decay width)' 3023 else: 3024 self.cross_sect_dict['unit']='pb' 3025 self.cross_sect_dict['xsec_string']='Total cross section' 3026 self.cross_sect_dict['axsec_string']='Total abs(cross section)' 3027 if self.run_card['event_norm'].lower()=='bias': 3028 self.cross_sect_dict['xsec_string']+=', incl. bias (DO NOT USE)' 3029 3030 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 3031 status = ['Determining the number of unweighted events per channel', 3032 'Updating the number of unweighted events per channel', 3033 'Summary:'] 3034 computed='(computed from LHE events)' 3035 elif mode in ['NLO', 'LO']: 3036 status = ['Results after grid setup:','Current results:', 3037 'Final results and run summary:'] 3038 computed='(computed from histogram information)' 3039 3040 if step != 2 and mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 3041 message = status[step] + '\n\n Intermediate results:' + \ 3042 ('\n Random seed: %(randinit)d' + \ 3043 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' + \ 3044 '\n %(axsec_string)s: %(xseca)8.3e +- %(erra)6.1e %(unit)s \n') \ 3045 % self.cross_sect_dict 3046 elif mode in ['NLO','LO'] and not done: 3047 if step == 0: 3048 message = '\n ' + status[0] + \ 3049 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 3050 self.cross_sect_dict 3051 else: 3052 message = '\n ' + status[1] + \ 3053 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 3054 self.cross_sect_dict 3055 3056 else: 3057 message = '\n --------------------------------------------------------------' 3058 message = message + \ 3059 '\n ' + status[2] + proc_info 3060 if mode not in ['LO', 'NLO']: 3061 message = message + \ 3062 '\n Number of events generated: %s' % self.run_card['nevents'] 3063 message = message + \ 3064 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 3065 self.cross_sect_dict 3066 message = message + \ 3067 '\n --------------------------------------------------------------' 3068 if scale_pdf_info and (self.run_card['nevents']>=10000 or mode in ['NLO', 'LO']): 3069 if scale_pdf_info[0]: 3070 # scale uncertainties 3071 message = message + '\n Scale variation %s:' % computed 3072 for s in scale_pdf_info[0]: 3073 if s['unc']: 3074 if self.run_card['ickkw'] != -1: 3075 message = message + \ 3076 ('\n Dynamical_scale_choice %(label)i (envelope of %(size)s values): '\ 3077 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % s 3078 else: 3079 message = message + \ 3080 ('\n Soft and hard scale dependence (added in quadrature): '\ 3081 '\n %(cen)8.3e pb +%(max_q)0.1f%% -%(min_q)0.1f%%') % s 3082 3083 else: 3084 message = message + \ 3085 ('\n Dynamical_scale_choice %(label)i: '\ 3086 '\n %(cen)8.3e pb') % s 3087 3088 if scale_pdf_info[1]: 3089 message = message + '\n PDF variation %s:' % computed 3090 for p in scale_pdf_info[1]: 3091 if p['unc']=='none': 3092 message = message + \ 3093 ('\n %(name)s (central value only): '\ 3094 '\n %(cen)8.3e pb') % p 3095 3096 elif p['unc']=='unknown': 3097 message = message + \ 3098 ('\n %(name)s (%(size)s members; combination method unknown): '\ 3099 '\n %(cen)8.3e pb') % p 3100 else: 3101 message = message + \ 3102 ('\n %(name)s (%(size)s members; using %(unc)s method): '\ 3103 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % p 3104 # pdf uncertainties 3105 message = message + \ 3106 '\n --------------------------------------------------------------' 3107 3108 3109 if (mode in ['NLO', 'LO'] and not done) or \ 3110 (mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] and step!=2): 3111 logger.info(message+'\n') 3112 return 3113 3114 # Some advanced general statistics are shown in the debug message at the 3115 # end of the run 3116 # Make sure it never stops a run 3117 # Gather some basic statistics for the run and extracted from the log files. 3118 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 3119 log_GV_files = misc.glob(pjoin('P*','G*','log_MINT*.txt'), 3120 pjoin(self.me_dir, 'SubProcesses')) 3121 all_log_files = log_GV_files 3122 elif mode == 'NLO': 3123 log_GV_files = misc.glob(pjoin('P*','all_G*','log_MINT*.txt'), 3124 pjoin(self.me_dir, 'SubProcesses')) 3125 all_log_files = log_GV_files 3126 3127 elif mode == 'LO': 3128 log_GV_files = '' 3129 all_log_files = misc.glob(pjoin('P*','born_G*','log_MINT*.txt'), 3130 pjoin(self.me_dir, 'SubProcesses')) 3131 else: 3132 raise aMCatNLOError, 'Running mode %s not supported.'%mode 3133 3134 try: 3135 message, debug_msg = \ 3136 self.compile_advanced_stats(log_GV_files, all_log_files, message) 3137 except Exception as e: 3138 debug_msg = 'Advanced statistics collection failed with error "%s"\n'%str(e) 3139 err_string = StringIO.StringIO() 3140 traceback.print_exc(limit=4, file=err_string) 3141 debug_msg += 'Please report this backtrace to a MadGraph developer:\n%s'\ 3142 %err_string.getvalue() 3143 3144 logger.debug(debug_msg+'\n') 3145 logger.info(message+'\n') 3146 3147 # Now copy relevant information in the Events/Run_<xxx> directory 3148 evt_path = pjoin(self.me_dir, 'Events', self.run_name) 3149 open(pjoin(evt_path, 'summary.txt'),'w').write(message+'\n') 3150 open(pjoin(evt_path, '.full_summary.txt'), 3151 'w').write(message+'\n\n'+debug_msg+'\n') 3152 3153 self.archive_files(evt_path,mode)
3154
3155 - def archive_files(self, evt_path, mode):
3156 """ Copies in the Events/Run_<xxx> directory relevant files characterizing 3157 the run.""" 3158 3159 files_to_arxiv = [pjoin('Cards','param_card.dat'), 3160 pjoin('Cards','MadLoopParams.dat'), 3161 pjoin('Cards','FKS_params.dat'), 3162 pjoin('Cards','run_card.dat'), 3163 pjoin('Subprocesses','setscales.f'), 3164 pjoin('Subprocesses','cuts.f')] 3165 3166 if mode in ['NLO', 'LO']: 3167 files_to_arxiv.append(pjoin('Cards','FO_analyse_card.dat')) 3168 3169 if not os.path.exists(pjoin(evt_path,'RunMaterial')): 3170 os.mkdir(pjoin(evt_path,'RunMaterial')) 3171 3172 for path in files_to_arxiv: 3173 if os.path.isfile(pjoin(self.me_dir,path)): 3174 files.cp(pjoin(self.me_dir,path),pjoin(evt_path,'RunMaterial')) 3175 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],cwd=evt_path) 3176 shutil.rmtree(pjoin(evt_path,'RunMaterial'))
3177
3178 - def compile_advanced_stats(self,log_GV_files,all_log_files,message):
3179 """ This functions goes through the log files given in arguments and 3180 compiles statistics about MadLoop stability, virtual integration 3181 optimization and detection of potential error messages into a nice 3182 debug message to printed at the end of the run """ 3183 3184 def safe_float(str_float): 3185 try: 3186 return float(str_float) 3187 except ValueError: 3188 logger.debug('Could not convert the following float during'+ 3189 ' advanced statistics printout: %s'%str(str_float)) 3190 return -1.0
3191 3192 3193 # > UPS is a dictionary of tuples with this format {channel:[nPS,nUPS]} 3194 # > Errors is a list of tuples with this format (log_file,nErrors) 3195 stats = {'UPS':{}, 'Errors':[], 'virt_stats':{}, 'timings':{}} 3196 mint_search = re.compile(r"MINT(?P<ID>\d*).txt") 3197 3198 # ================================== 3199 # == MadLoop stability statistics == 3200 # ================================== 3201 3202 # Recuperate the fraction of unstable PS points found in the runs for 3203 # the virtuals 3204 UPS_stat_finder = re.compile( 3205 r"Satistics from MadLoop:.*"+\ 3206 r"Total points tried\:\s+(?P<ntot>\d+).*"+\ 3207 r"Stability unknown\:\s+(?P<nsun>\d+).*"+\ 3208 r"Stable PS point\:\s+(?P<nsps>\d+).*"+\ 3209 r"Unstable PS point \(and rescued\)\:\s+(?P<nups>\d+).*"+\ 3210 r"Exceptional PS point \(unstable and not rescued\)\:\s+(?P<neps>\d+).*"+\ 3211 r"Double precision used\:\s+(?P<nddp>\d+).*"+\ 3212 r"Quadruple precision used\:\s+(?P<nqdp>\d+).*"+\ 3213 r"Initialization phase\-space points\:\s+(?P<nini>\d+).*"+\ 3214 r"Unknown return code \(100\)\:\s+(?P<n100>\d+).*"+\ 3215 r"Unknown return code \(10\)\:\s+(?P<n10>\d+).*",re.DOTALL) 3216 3217 unit_code_meaning = { 0 : 'Not identified (CTModeRun != -1)', 3218 1 : 'CutTools (double precision)', 3219 2 : 'PJFry++', 3220 3 : 'IREGI', 3221 4 : 'Golem95', 3222 5 : 'Samurai', 3223 6 : 'Ninja (double precision)', 3224 7 : 'COLLIER', 3225 8 : 'Ninja (quadruple precision)', 3226 9 : 'CutTools (quadruple precision)'} 3227 RetUnit_finder =re.compile( 3228 r"#Unit\s*(?P<unit>\d+)\s*=\s*(?P<n_occurences>\d+)") 3229 #Unit 3230 3231 for gv_log in log_GV_files: 3232 channel_name = '/'.join(gv_log.split('/')[-5:-1]) 3233 log=open(gv_log,'r').read() 3234 UPS_stats = re.search(UPS_stat_finder,log) 3235 for retunit_stats in re.finditer(RetUnit_finder, log): 3236 if channel_name not in stats['UPS'].keys(): 3237 stats['UPS'][channel_name] = [0]*10+[[0]*10] 3238 stats['UPS'][channel_name][10][int(retunit_stats.group('unit'))] \ 3239 += int(retunit_stats.group('n_occurences')) 3240 if not UPS_stats is None: 3241 try: 3242 stats['UPS'][channel_name][0] += int(UPS_stats.group('ntot')) 3243 stats['UPS'][channel_name][1] += int(UPS_stats.group('nsun')) 3244 stats['UPS'][channel_name][2] += int(UPS_stats.group('nsps')) 3245 stats['UPS'][channel_name][3] += int(UPS_stats.group('nups')) 3246 stats['UPS'][channel_name][4] += int(UPS_stats.group('neps')) 3247 stats['UPS'][channel_name][5] += int(UPS_stats.group('nddp')) 3248 stats['UPS'][channel_name][6] += int(UPS_stats.group('nqdp')) 3249 stats['UPS'][channel_name][7] += int(UPS_stats.group('nini')) 3250 stats['UPS'][channel_name][8] += int(UPS_stats.group('n100')) 3251 stats['UPS'][channel_name][9] += int(UPS_stats.group('n10')) 3252 except KeyError: 3253 stats['UPS'][channel_name] = [int(UPS_stats.group('ntot')), 3254 int(UPS_stats.group('nsun')),int(UPS_stats.group('nsps')), 3255 int(UPS_stats.group('nups')),int(UPS_stats.group('neps')), 3256 int(UPS_stats.group('nddp')),int(UPS_stats.group('nqdp')), 3257 int(UPS_stats.group('nini')),int(UPS_stats.group('n100')), 3258 int(UPS_stats.group('n10')),[0]*10] 3259 debug_msg = "" 3260 if len(stats['UPS'].keys())>0: 3261 nTotPS = sum([chan[0] for chan in stats['UPS'].values()],0) 3262 nTotsun = sum([chan[1] for chan in stats['UPS'].values()],0) 3263 nTotsps = sum([chan[2] for chan in stats['UPS'].values()],0) 3264 nTotups = sum([chan[3] for chan in stats['UPS'].values()],0) 3265 nToteps = sum([chan[4] for chan in stats['UPS'].values()],0) 3266 nTotddp = sum([chan[5] for chan in stats['UPS'].values()],0) 3267 nTotqdp = sum([chan[6] for chan in stats['UPS'].values()],0) 3268 nTotini = sum([chan[7] for chan in stats['UPS'].values()],0) 3269 nTot100 = sum([chan[8] for chan in stats['UPS'].values()],0) 3270 nTot10 = sum([chan[9] for chan in stats['UPS'].values()],0) 3271 nTot1 = [sum([chan[10][i] for chan in stats['UPS'].values()],0) \ 3272 for i in range(10)] 3273 UPSfracs = [(chan[0] , 0.0 if chan[1][0]==0 else \ 3274 safe_float(chan[1][4]*100)/chan[1][0]) for chan in stats['UPS'].items()] 3275 maxUPS = max(UPSfracs, key = lambda w: w[1]) 3276 3277 tmpStr = "" 3278 tmpStr += '\n Number of loop ME evaluations (by MadLoop): %d'%nTotPS 3279 tmpStr += '\n Stability unknown: %d'%nTotsun 3280 tmpStr += '\n Stable PS point: %d'%nTotsps 3281 tmpStr += '\n Unstable PS point (and rescued): %d'%nTotups 3282 tmpStr += '\n Unstable PS point (and not rescued): %d'%nToteps 3283 tmpStr += '\n Only double precision used: %d'%nTotddp 3284 tmpStr += '\n Quadruple precision used: %d'%nTotqdp 3285 tmpStr += '\n Initialization phase-space points: %d'%nTotini 3286 tmpStr += '\n Reduction methods used:' 3287 red_methods = [(unit_code_meaning[i],nTot1[i]) for i in \ 3288 unit_code_meaning.keys() if nTot1[i]>0] 3289 for method, n in sorted(red_methods, key= lambda l: l[1], reverse=True): 3290 tmpStr += '\n > %s%s%s'%(method,' '*(33-len(method)),n) 3291 if nTot100 != 0: 3292 debug_msg += '\n Unknown return code (100): %d'%nTot100 3293 if nTot10 != 0: 3294 debug_msg += '\n Unknown return code (10): %d'%nTot10 3295 nUnknownUnit = sum(nTot1[u] for u in range(10) if u \ 3296 not in unit_code_meaning.keys()) 3297 if nUnknownUnit != 0: 3298 debug_msg += '\n Unknown return code (1): %d'\ 3299 %nUnknownUnit 3300 3301 if maxUPS[1]>0.001: 3302 message += tmpStr 3303 message += '\n Total number of unstable PS point detected:'+\ 3304 ' %d (%4.2f%%)'%(nToteps,safe_float(100*nToteps)/nTotPS) 3305 message += '\n Maximum fraction of UPS points in '+\ 3306 'channel %s (%4.2f%%)'%maxUPS 3307 message += '\n Please report this to the authors while '+\ 3308 'providing the file' 3309 message += '\n %s'%str(pjoin(os.path.dirname(self.me_dir), 3310 maxUPS[0],'UPS.log')) 3311 else: 3312 debug_msg += tmpStr 3313 3314 3315 # ==================================================== 3316 # == aMC@NLO virtual integration optimization stats == 3317 # ==================================================== 3318 3319 virt_tricks_finder = re.compile( 3320 r"accumulated results Virtual ratio\s*=\s*-?(?P<v_ratio>[\d\+-Eed\.]*)"+\ 3321 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_ratio_err>[\d\+-Eed\.]*)\s*\%\)\s*\n"+\ 3322 r"accumulated results ABS virtual\s*=\s*-?(?P<v_abs_contr>[\d\+-Eed\.]*)"+\ 3323 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_abs_contr_err>[\d\+-Eed\.]*)\s*\%\)") 3324 3325 virt_frac_finder = re.compile(r"update virtual fraction to\s*:\s*"+\ 3326 "-?(?P<v_frac>[\d\+-Eed\.]*)\s*-?(?P<v_average>[\d\+-Eed\.]*)") 3327 3328 channel_contr_finder = re.compile(r"Final result \[ABS\]\s*:\s*-?(?P<v_contr>[\d\+-Eed\.]*)") 3329 3330 channel_contr_list = {} 3331 for gv_log in log_GV_files: 3332 logfile=open(gv_log,'r') 3333 log = logfile.read() 3334 logfile.close() 3335 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 3336 vf_stats = None 3337 for vf_stats in re.finditer(virt_frac_finder, log): 3338 pass 3339 if not vf_stats is None: 3340 v_frac = safe_float(vf_stats.group('v_frac')) 3341 v_average = safe_float(vf_stats.group('v_average')) 3342 try: 3343 if v_frac < stats['virt_stats']['v_frac_min'][0]: 3344 stats['virt_stats']['v_frac_min']=(v_frac,channel_name) 3345 if v_frac > stats['virt_stats']['v_frac_max'][0]: 3346 stats['virt_stats']['v_frac_max']=(v_frac,channel_name) 3347 stats['virt_stats']['v_frac_avg'][0] += v_frac 3348 stats['virt_stats']['v_frac_avg'][1] += 1 3349 except KeyError: 3350 stats['virt_stats']['v_frac_min']=[v_frac,channel_name] 3351 stats['virt_stats']['v_frac_max']=[v_frac,channel_name] 3352 stats['virt_stats']['v_frac_avg']=[v_frac,1] 3353 3354 3355 ccontr_stats = None 3356 for ccontr_stats in re.finditer(channel_contr_finder, log): 3357 pass 3358 if not ccontr_stats is None: 3359 contrib = safe_float(ccontr_stats.group('v_contr')) 3360 try: 3361 if contrib>channel_contr_list[channel_name]: 3362 channel_contr_list[channel_name]=contrib 3363 except KeyError: 3364 channel_contr_list[channel_name]=contrib 3365 3366 3367 # Now build the list of relevant virt log files to look for the maxima 3368 # of virt fractions and such. 3369 average_contrib = 0.0 3370 for value in channel_contr_list.values(): 3371 average_contrib += value 3372 if len(channel_contr_list.values()) !=0: 3373 average_contrib = average_contrib / len(channel_contr_list.values()) 3374 3375 relevant_log_GV_files = [] 3376 excluded_channels = set([]) 3377 all_channels = set([]) 3378 for log_file in log_GV_files: 3379 channel_name = '/'.join(log_file.split('/')[-3:-1]) 3380 all_channels.add(channel_name) 3381 try: 3382 if channel_contr_list[channel_name] > (0.1*average_contrib): 3383 relevant_log_GV_files.append(log_file) 3384 else: 3385 excluded_channels.add(channel_name) 3386 except KeyError: 3387 relevant_log_GV_files.append(log_file) 3388 3389 # Now we want to use the latest occurence of accumulated result in the log file 3390 for gv_log in relevant_log_GV_files: 3391 logfile=open(gv_log,'r') 3392 log = logfile.read() 3393 logfile.close() 3394 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 3395 3396 vt_stats = None 3397 for vt_stats in re.finditer(virt_tricks_finder, log): 3398 pass 3399 if not vt_stats is None: 3400 vt_stats_group = vt_stats.groupdict() 3401 v_ratio = safe_float(vt_stats.group('v_ratio')) 3402 v_ratio_err = safe_float(vt_stats.group('v_ratio_err')) 3403 v_contr = safe_float(vt_stats.group('v_abs_contr')) 3404 v_contr_err = safe_float(vt_stats.group('v_abs_contr_err')) 3405 try: 3406 if v_ratio < stats['virt_stats']['v_ratio_min'][0]: 3407 stats['virt_stats']['v_ratio_min']=(v_ratio,channel_name) 3408 if v_ratio > stats['virt_stats']['v_ratio_max'][0]: 3409 stats['virt_stats']['v_ratio_max']=(v_ratio,channel_name) 3410 if v_ratio < stats['virt_stats']['v_ratio_err_min'][0]: 3411 stats['virt_stats']['v_ratio_err_min']=(v_ratio_err,channel_name) 3412 if v_ratio > stats['virt_stats']['v_ratio_err_max'][0]: 3413 stats['virt_stats']['v_ratio_err_max']=(v_ratio_err,channel_name) 3414 if v_contr < stats['virt_stats']['v_contr_min'][0]: 3415 stats['virt_stats']['v_contr_min']=(v_contr,channel_name) 3416 if v_contr > stats['virt_stats']['v_contr_max'][0]: 3417 stats['virt_stats']['v_contr_max']=(v_contr,channel_name) 3418 if v_contr_err < stats['virt_stats']['v_contr_err_min'][0]: 3419 stats['virt_stats']['v_contr_err_min']=(v_contr_err,channel_name) 3420 if v_contr_err > stats['virt_stats']['v_contr_err_max'][0]: 3421 stats['virt_stats']['v_contr_err_max']=(v_contr_err,channel_name) 3422 except KeyError: 3423 stats['virt_stats']['v_ratio_min']=[v_ratio,channel_name] 3424 stats['virt_stats']['v_ratio_max']=[v_ratio,channel_name] 3425 stats['virt_stats']['v_ratio_err_min']=[v_ratio_err,channel_name] 3426 stats['virt_stats']['v_ratio_err_max']=[v_ratio_err,channel_name] 3427 stats['virt_stats']['v_contr_min']=[v_contr,channel_name] 3428 stats['virt_stats']['v_contr_max']=[v_contr,channel_name] 3429 stats['virt_stats']['v_contr_err_min']=[v_contr_err,channel_name] 3430 stats['virt_stats']['v_contr_err_max']=[v_contr_err,channel_name] 3431 3432 vf_stats = None 3433 for vf_stats in re.finditer(virt_frac_finder, log): 3434 pass 3435 if not vf_stats is None: 3436 v_frac = safe_float(vf_stats.group('v_frac')) 3437 v_average = safe_float(vf_stats.group('v_average')) 3438 try: 3439 if v_average < stats['virt_stats']['v_average_min'][0]: 3440 stats['virt_stats']['v_average_min']=(v_average,channel_name) 3441 if v_average > stats['virt_stats']['v_average_max'][0]: 3442 stats['virt_stats']['v_average_max']=(v_average,channel_name) 3443 stats['virt_stats']['v_average_avg'][0] += v_average 3444 stats['virt_stats']['v_average_avg'][1] += 1 3445 except KeyError: 3446 stats['virt_stats']['v_average_min']=[v_average,channel_name] 3447 stats['virt_stats']['v_average_max']=[v_average,channel_name] 3448 stats['virt_stats']['v_average_avg']=[v_average,1] 3449 3450 try: 3451 debug_msg += '\n\n Statistics on virtual integration optimization : ' 3452 3453 debug_msg += '\n Maximum virt fraction computed %.3f (%s)'\ 3454 %tuple(stats['virt_stats']['v_frac_max']) 3455 debug_msg += '\n Minimum virt fraction computed %.3f (%s)'\ 3456 %tuple(stats['virt_stats']['v_frac_min']) 3457 debug_msg += '\n Average virt fraction computed %.3f'\ 3458 %safe_float(stats['virt_stats']['v_frac_avg'][0]/safe_float(stats['virt_stats']['v_frac_avg'][1])) 3459 debug_msg += '\n Stats below exclude negligible channels (%d excluded out of %d)'%\ 3460 (len(excluded_channels),len(all_channels)) 3461 debug_msg += '\n Maximum virt ratio used %.2f (%s)'\ 3462 %tuple(stats['virt_stats']['v_average_max']) 3463 debug_msg += '\n Maximum virt ratio found from grids %.2f (%s)'\ 3464 %tuple(stats['virt_stats']['v_ratio_max']) 3465 tmpStr = '\n Max. MC err. on virt ratio from grids %.1f %% (%s)'\ 3466 %tuple(stats['virt_stats']['v_ratio_err_max']) 3467 debug_msg += tmpStr 3468 # After all it was decided that it is better not to alarm the user unecessarily 3469 # with such printout of the statistics. 3470 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0 or \ 3471 # stats['virt_stats']['v_ratio_err_max'][0]>100.0: 3472 # message += "\n Suspiciously large MC error in :" 3473 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0: 3474 # message += tmpStr 3475 3476 tmpStr = '\n Maximum MC error on abs virt %.1f %% (%s)'\ 3477 %tuple(stats['virt_stats']['v_contr_err_max']) 3478 debug_msg += tmpStr 3479 # if stats['virt_stats']['v_contr_err_max'][0]>100.0: 3480 # message += tmpStr 3481 3482 3483 except KeyError: 3484 debug_msg += '\n Could not find statistics on the integration optimization. ' 3485 3486 # ======================================= 3487 # == aMC@NLO timing profile statistics == 3488 # ======================================= 3489 3490 timing_stat_finder = re.compile(r"\s*Time spent in\s*(?P<name>\w*)\s*:\s*"+\ 3491 "(?P<time>[\d\+-Eed\.]*)\s*") 3492 3493 for logf in log_GV_files: 3494 logfile=open(logf,'r') 3495 log = logfile.read() 3496 logfile.close() 3497 channel_name = '/'.join(logf.split('/')[-3:-1]) 3498 mint = re.search(mint_search,logf) 3499 if not mint is None: 3500 channel_name = channel_name+' [step %s]'%mint.group('ID') 3501 3502 for time_stats in re.finditer(timing_stat_finder, log): 3503 try: 3504 stats['timings'][time_stats.group('name')][channel_name]+=\ 3505 safe_float(time_stats.group('time')) 3506 except KeyError: 3507 if time_stats.group('name') not in stats['timings'].keys(): 3508 stats['timings'][time_stats.group('name')] = {} 3509 stats['timings'][time_stats.group('name')][channel_name]=\ 3510 safe_float(time_stats.group('time')) 3511 3512 # useful inline function 3513 Tstr = lambda secs: str(datetime.timedelta(seconds=int(secs))) 3514 try: 3515 totTimeList = [(time, chan) for chan, time in \ 3516 stats['timings']['Total'].items()] 3517 except KeyError: 3518 totTimeList = [] 3519 3520 totTimeList.sort() 3521 if len(totTimeList)>0: 3522 debug_msg += '\n\n Inclusive timing profile :' 3523 debug_msg += '\n Overall slowest channel %s (%s)'%\ 3524 (Tstr(totTimeList[-1][0]),totTimeList[-1][1]) 3525 debug_msg += '\n Average channel running time %s'%\ 3526 Tstr(sum([el[0] for el in totTimeList])/len(totTimeList)) 3527 debug_msg += '\n Aggregated total running time %s'%\ 3528 Tstr(sum([el[0] for el in totTimeList])) 3529 else: 3530 debug_msg += '\n\n Inclusive timing profile non available.' 3531 3532 sorted_keys = sorted(stats['timings'].keys(), key= lambda stat: \ 3533 sum(stats['timings'][stat].values()), reverse=True) 3534 for name in sorted_keys: 3535 if name=='Total': 3536 continue 3537 if sum(stats['timings'][name].values())<=0.0: 3538 debug_msg += '\n Zero time record for %s.'%name 3539 continue 3540 try: 3541 TimeList = [((100.0*time/stats['timings']['Total'][chan]), 3542 chan) for chan, time in stats['timings'][name].items()] 3543 except KeyError, ZeroDivisionError: 3544 debug_msg += '\n\n Timing profile for %s unavailable.'%name 3545 continue 3546 TimeList.sort() 3547 debug_msg += '\n Timing profile for <%s> :'%name 3548 try: 3549 debug_msg += '\n Overall fraction of time %.3f %%'%\ 3550 safe_float((100.0*(sum(stats['timings'][name].values())/ 3551 sum(stats['timings']['Total'].values())))) 3552 except KeyError, ZeroDivisionError: 3553 debug_msg += '\n Overall fraction of time unavailable.' 3554 debug_msg += '\n Largest fraction of time %.3f %% (%s)'%\ 3555 (TimeList[-1][0],TimeList[-1][1]) 3556 debug_msg += '\n Smallest fraction of time %.3f %% (%s)'%\ 3557 (TimeList[0][0],TimeList[0][1]) 3558 3559 # ============================= 3560 # == log file eror detection == 3561 # ============================= 3562 3563 # Find the number of potential errors found in all log files 3564 # This re is a simple match on a case-insensitve 'error' but there is 3565 # also some veto added for excluding the sentence 3566 # "See Section 6 of paper for error calculation." 3567 # which appear in the header of lhapdf in the logs. 3568 err_finder = re.compile(\ 3569 r"(?<!of\spaper\sfor\s)\bERROR\b(?!\scalculation\.)",re.IGNORECASE) 3570 for log in all_log_files: 3571 logfile=open(log,'r') 3572 nErrors = len(re.findall(err_finder, logfile.read())) 3573 logfile.close() 3574 if nErrors != 0: 3575 stats['Errors'].append((str(log),nErrors)) 3576 3577 nErrors = sum([err[1] for err in stats['Errors']],0) 3578 if nErrors != 0: 3579 debug_msg += '\n WARNING:: A total of %d error%s ha%s been '\ 3580 %(nErrors,'s' if nErrors>1 else '','ve' if nErrors>1 else 's')+\ 3581 'found in the following log file%s:'%('s' if \ 3582 len(stats['Errors'])>1 else '') 3583 for error in stats['Errors'][:3]: 3584 log_name = '/'.join(error[0].split('/')[-5:]) 3585 debug_msg += '\n > %d error%s in %s'%\ 3586 (error[1],'s' if error[1]>1 else '',log_name) 3587 if len(stats['Errors'])>3: 3588 nRemainingErrors = sum([err[1] for err in stats['Errors']][3:],0) 3589 nRemainingLogs = len(stats['Errors'])-3 3590 debug_msg += '\n And another %d error%s in %d other log file%s'%\ 3591 (nRemainingErrors, 's' if nRemainingErrors>1 else '', 3592 nRemainingLogs, 's ' if nRemainingLogs>1 else '') 3593 3594 return message, debug_msg 3595 3596
3597 - def reweight_and_collect_events(self, options, mode, nevents, event_norm):
3598 """this function calls the reweighting routines and creates the event file in the 3599 Event dir. Return the name of the event file created 3600 """ 3601 scale_pdf_info=[] 3602 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \ 3603 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1: 3604 scale_pdf_info = self.run_reweight(options['reweightonly']) 3605 self.update_status('Collecting events', level='parton', update_results=True) 3606 misc.compile(['collect_events'], 3607 cwd=pjoin(self.me_dir, 'SubProcesses'), nocompile=options['nocompile']) 3608 p = misc.Popen(['./collect_events'], cwd=pjoin(self.me_dir, 'SubProcesses'), 3609 stdin=subprocess.PIPE, 3610 stdout=open(pjoin(self.me_dir, 'collect_events.log'), 'w')) 3611 if event_norm.lower() == 'sum': 3612 p.communicate(input = '1\n') 3613 elif event_norm.lower() == 'unity': 3614 p.communicate(input = '3\n') 3615 elif event_norm.lower() == 'bias': 3616 p.communicate(input = '0\n') 3617 else: 3618 p.communicate(input = '2\n') 3619 3620 #get filename from collect events 3621 filename = open(pjoin(self.me_dir, 'collect_events.log')).read().split()[-1] 3622 3623 if not os.path.exists(pjoin(self.me_dir, 'SubProcesses', filename)): 3624 raise aMCatNLOError('An error occurred during event generation. ' + \ 3625 'The event file has not been created. Check collect_events.log') 3626 evt_file = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz') 3627 misc.gzip(pjoin(self.me_dir, 'SubProcesses', filename), stdout=evt_file) 3628 if not options['reweightonly']: 3629 self.print_summary(options, 2, mode, scale_pdf_info) 3630 res_files = misc.glob('res*.txt', pjoin(self.me_dir, 'SubProcesses')) 3631 for res_file in res_files: 3632 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name)) 3633 3634 logger.info('The %s file has been generated.\n' % (evt_file)) 3635 self.results.add_detail('nb_event', nevents) 3636 self.update_status('Events generated', level='parton', update_results=True) 3637 return evt_file[:-3]
3638 3639
3640 - def run_mcatnlo(self, evt_file, options):
3641 """runs mcatnlo on the generated event file, to produce showered-events 3642 """ 3643 logger.info('Preparing MCatNLO run') 3644 try: 3645 misc.gunzip(evt_file) 3646 except Exception: 3647 pass 3648 3649 self.banner = banner_mod.Banner(evt_file) 3650 shower = self.banner.get_detail('run_card', 'parton_shower').upper() 3651 3652 #check that the number of split event files divides the number of 3653 # events, otherwise set it to 1 3654 if int(self.banner.get_detail('run_card', 'nevents') / \ 3655 self.shower_card['nsplit_jobs']) * self.shower_card['nsplit_jobs'] \ 3656 != self.banner.get_detail('run_card', 'nevents'): 3657 logger.warning(\ 3658 'nsplit_jobs in the shower card is not a divisor of the number of events.\n' + \ 3659 'Setting it to 1.') 3660 self.shower_card['nsplit_jobs'] = 1 3661 3662 # don't split jobs if the user asks to shower only a part of the events 3663 if self.shower_card['nevents'] > 0 and \ 3664 self.shower_card['nevents'] < self.banner.get_detail('run_card', 'nevents') and \ 3665 self.shower_card['nsplit_jobs'] != 1: 3666 logger.warning(\ 3667 'Only a part of the events will be showered.\n' + \ 3668 'Setting nsplit_jobs in the shower_card to 1.') 3669 self.shower_card['nsplit_jobs'] = 1 3670 3671 self.banner_to_mcatnlo(evt_file) 3672 3673 # if fastjet has to be linked (in extralibs) then 3674 # add lib /include dirs for fastjet if fastjet-config is present on the 3675 # system, otherwise add fjcore to the files to combine 3676 if 'fastjet' in self.shower_card['extralibs']: 3677 #first, check that stdc++ is also linked 3678 if not 'stdc++' in self.shower_card['extralibs']: 3679 logger.warning('Linking FastJet: adding stdc++ to EXTRALIBS') 3680 self.shower_card['extralibs'] += ' stdc++' 3681 # then check if options[fastjet] corresponds to a valid fj installation 3682 try: 3683 #this is for a complete fj installation 3684 p = subprocess.Popen([self.options['fastjet'], '--prefix'], \ 3685 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 3686 output, error = p.communicate() 3687 #remove the line break from output (last character) 3688 output = output[:-1] 3689 # add lib/include paths 3690 if not pjoin(output, 'lib') in self.shower_card['extrapaths']: 3691 logger.warning('Linking FastJet: updating EXTRAPATHS') 3692 self.shower_card['extrapaths'] += ' ' + pjoin(output, 'lib') 3693 if not pjoin(output, 'include') in self.shower_card['includepaths']: 3694 logger.warning('Linking FastJet: updating INCLUDEPATHS') 3695 self.shower_card['includepaths'] += ' ' + pjoin(output, 'include') 3696 # to be changed in the fortran wrapper 3697 include_line = '#include "fastjet/ClusterSequence.hh"//INCLUDE_FJ' 3698 namespace_line = 'namespace fj = fastjet;//NAMESPACE_FJ' 3699 except Exception: 3700 logger.warning('Linking FastJet: using fjcore') 3701 # this is for FJcore, so no FJ library has to be linked 3702 self.shower_card['extralibs'] = self.shower_card['extralibs'].replace('fastjet', '') 3703 if not 'fjcore.o' in self.shower_card['analyse']: 3704 self.shower_card['analyse'] += ' fjcore.o' 3705 # to be changed in the fortran wrapper 3706 include_line = '#include "fjcore.hh"//INCLUDE_FJ' 3707 namespace_line = 'namespace fj = fjcore;//NAMESPACE_FJ' 3708 # change the fortran wrapper with the correct namespaces/include 3709 fjwrapper_lines = open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc')).read().split('\n') 3710 for line in fjwrapper_lines: 3711 if '//INCLUDE_FJ' in line: 3712 fjwrapper_lines[fjwrapper_lines.index(line)] = include_line 3713 if '//NAMESPACE_FJ' in line: 3714 fjwrapper_lines[fjwrapper_lines.index(line)] = namespace_line 3715 with open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc'), 'w') as fsock: 3716 fsock.write('\n'.join(fjwrapper_lines) + '\n') 3717 3718 extrapaths = self.shower_card['extrapaths'].split() 3719 3720 # check that the path needed by HW++ and PY8 are set if one uses these shower 3721 if shower in ['HERWIGPP', 'PYTHIA8']: 3722 path_dict = {'HERWIGPP': ['hepmc_path', 3723 'thepeg_path', 3724 'hwpp_path'], 3725 'PYTHIA8': ['pythia8_path']} 3726 3727 if not all([self.options[ppath] and os.path.exists(self.options[ppath]) for ppath in path_dict[shower]]): 3728 raise aMCatNLOError('Some paths are missing or invalid in the configuration file.\n' + \ 3729 ('Please make sure you have set these variables: %s' % ', '.join(path_dict[shower]))) 3730 3731 if shower == 'HERWIGPP': 3732 extrapaths.append(pjoin(self.options['hepmc_path'], 'lib')) 3733 self.shower_card['extrapaths'] += ' %s' % pjoin(self.options['hepmc_path'], 'lib') 3734 3735 # add the HEPMC path of the pythia8 installation 3736 if shower == 'PYTHIA8': 3737 hepmc = subprocess.Popen([pjoin(self.options['pythia8_path'], 'bin', 'pythia8-config'), '--hepmc2'], 3738 stdout = subprocess.PIPE).stdout.read().strip() 3739 #this gives all the flags, i.e. 3740 #-I/Path/to/HepMC/include -L/Path/to/HepMC/lib -lHepMC 3741 # we just need the path to the HepMC libraries 3742 extrapaths.append(hepmc.split()[1].replace('-L', '')) 3743 3744 if shower == 'PYTHIA8' and not os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 3745 extrapaths.append(pjoin(self.options['pythia8_path'], 'lib')) 3746 3747 # set the PATH for the dynamic libraries 3748 if sys.platform == 'darwin': 3749 ld_library_path = 'DYLD_LIBRARY_PATH' 3750 else: 3751 ld_library_path = 'LD_LIBRARY_PATH' 3752 if ld_library_path in os.environ.keys(): 3753 paths = os.environ[ld_library_path] 3754 else: 3755 paths = '' 3756 paths += ':' + ':'.join(extrapaths) 3757 os.putenv(ld_library_path, paths) 3758 3759 shower_card_path = pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat') 3760 self.shower_card.write_card(shower, shower_card_path) 3761 3762 # overwrite if shower_card_set.dat exists in MCatNLO 3763 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat')): 3764 files.mv(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat'), 3765 pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat')) 3766 3767 mcatnlo_log = pjoin(self.me_dir, 'mcatnlo.log') 3768 self.update_status('Compiling MCatNLO for %s...' % shower, level='shower') 3769 3770 3771 # libdl may be needded for pythia 82xx 3772 #if shower == 'PYTHIA8' and not \ 3773 # os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')) and \ 3774 # 'dl' not in self.shower_card['extralibs'].split(): 3775 # # 'dl' has to be linked with the extralibs 3776 # self.shower_card['extralibs'] += ' dl' 3777 # logger.warning("'dl' was added to extralibs from the shower_card.dat.\n" + \ 3778 # "It is needed for the correct running of PY8.2xx.\n" + \ 3779 # "If this library cannot be found on your system, a crash will occur.") 3780 3781 misc.call(['./MCatNLO_MadFKS.inputs'], stdout=open(mcatnlo_log, 'w'), 3782 stderr=open(mcatnlo_log, 'w'), 3783 cwd=pjoin(self.me_dir, 'MCatNLO'), 3784 close_fds=True) 3785 3786 exe = 'MCATNLO_%s_EXE' % shower 3787 if not os.path.exists(pjoin(self.me_dir, 'MCatNLO', exe)) and \ 3788 not os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe')): 3789 print open(mcatnlo_log).read() 3790 raise aMCatNLOError('Compilation failed, check %s for details' % mcatnlo_log) 3791 logger.info(' ... done') 3792 3793 # create an empty dir where to run 3794 count = 1 3795 while os.path.isdir(pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 3796 (shower, count))): 3797 count += 1 3798 rundir = pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 3799 (shower, count)) 3800 os.mkdir(rundir) 3801 files.cp(shower_card_path, rundir) 3802 3803 #look for the event files (don't resplit if one asks for the 3804 # same number of event files as in the previous run) 3805 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name)) 3806 if max(len(event_files), 1) != self.shower_card['nsplit_jobs']: 3807 logger.info('Cleaning old files and splitting the event file...') 3808 #clean the old files 3809 files.rm([f for f in event_files if 'events.lhe' not in f]) 3810 if self.shower_card['nsplit_jobs'] > 1: 3811 misc.compile(['split_events'], cwd = pjoin(self.me_dir, 'Utilities'), nocompile=options['nocompile']) 3812 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'split_events')], 3813 stdin=subprocess.PIPE, 3814 stdout=open(pjoin(self.me_dir, 'Events', self.run_name, 'split_events.log'), 'w'), 3815 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3816 p.communicate(input = 'events.lhe\n%d\n' % self.shower_card['nsplit_jobs']) 3817 logger.info('Splitting done.') 3818 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name)) 3819 3820 event_files.sort() 3821 3822 self.update_status('Showering events...', level='shower') 3823 logger.info('(Running in %s)' % rundir) 3824 if shower != 'PYTHIA8': 3825 files.mv(pjoin(self.me_dir, 'MCatNLO', exe), rundir) 3826 files.mv(pjoin(self.me_dir, 'MCatNLO', 'MCATNLO_%s_input' % shower), rundir) 3827 else: 3828 # special treatment for pythia8 3829 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.cmd'), rundir) 3830 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe'), rundir) 3831 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): # this is PY8.1xxx 3832 files.ln(pjoin(self.options['pythia8_path'], 'examples', 'config.sh'), rundir) 3833 files.ln(pjoin(self.options['pythia8_path'], 'xmldoc'), rundir) 3834 else: # this is PY8.2xxx 3835 files.ln(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'), rundir) 3836 #link the hwpp exe in the rundir 3837 if shower == 'HERWIGPP': 3838 try: 3839 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')): 3840 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++'), rundir) 3841 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')): 3842 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig'), rundir) 3843 except Exception: 3844 raise aMCatNLOError('The Herwig++ path set in the configuration file is not valid.') 3845 3846 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so')): 3847 files.cp(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so'), rundir) 3848 3849 files.ln(evt_file, rundir, 'events.lhe') 3850 for i, f in enumerate(event_files): 3851 files.ln(f, rundir,'events_%d.lhe' % (i + 1)) 3852 3853 if not self.shower_card['analyse']: 3854 # an hep/hepmc file as output 3855 out_id = 'HEP' 3856 else: 3857 # one or more .top file(s) as output 3858 if "HwU" in self.shower_card['analyse']: 3859 out_id = 'HWU' 3860 else: 3861 out_id = 'TOP' 3862 3863 # write the executable 3864 with open(pjoin(rundir, 'shower.sh'), 'w') as fsock: 3865 # set the PATH for the dynamic libraries 3866 if sys.platform == 'darwin': 3867 ld_library_path = 'DYLD_LIBRARY_PATH' 3868 else: 3869 ld_library_path = 'LD_LIBRARY_PATH' 3870 fsock.write(open(pjoin(self.me_dir, 'MCatNLO', 'shower_template.sh')).read() \ 3871 % {'ld_library_path': ld_library_path, 3872 'extralibs': ':'.join(extrapaths)}) 3873 subprocess.call(['chmod', '+x', pjoin(rundir, 'shower.sh')]) 3874 3875 if event_files: 3876 arg_list = [[shower, out_id, self.run_name, '%d' % (i + 1)] \ 3877 for i in range(len(event_files))] 3878 else: 3879 arg_list = [[shower, out_id, self.run_name]] 3880 3881 self.run_all({rundir: 'shower.sh'}, arg_list, 'shower') 3882 self.njobs = 1 3883 self.wait_for_complete('shower') 3884 3885 # now collect the results 3886 message = '' 3887 warning = '' 3888 to_gzip = [evt_file] 3889 if out_id == 'HEP': 3890 #copy the showered stdhep/hepmc file back in events 3891 if shower in ['PYTHIA8', 'HERWIGPP']: 3892 hep_format = 'HEPMC' 3893 ext = 'hepmc' 3894 else: 3895 hep_format = 'StdHEP' 3896 ext = 'hep' 3897 3898 hep_file = '%s_%s_0.%s.gz' % \ 3899 (pjoin(os.path.dirname(evt_file), 'events'), shower, ext) 3900 count = 0 3901 3902 # find the first available name for the output: 3903 # check existing results with or without event splitting 3904 while os.path.exists(hep_file) or \ 3905 os.path.exists(hep_file.replace('.%s.gz' % ext, '__1.%s.gz' % ext)) : 3906 count +=1 3907 hep_file = '%s_%s_%d.%s.gz' % \ 3908 (pjoin(os.path.dirname(evt_file), 'events'), shower, count, ext) 3909 3910 try: 3911 if self.shower_card['nsplit_jobs'] == 1: 3912 files.mv(os.path.join(rundir, 'events.%s.gz' % ext), hep_file) 3913 message = ('The file %s has been generated. \nIt contains showered' + \ 3914 ' and hadronized events in the %s format obtained' + \ 3915 ' showering the parton-level event file %s.gz with %s') % \ 3916 (hep_file, hep_format, evt_file, shower) 3917 else: 3918 hep_list = [] 3919 for i in range(self.shower_card['nsplit_jobs']): 3920 hep_list.append(hep_file.replace('.%s.gz' % ext, '__%d.%s.gz' % (i + 1, ext))) 3921 files.mv(os.path.join(rundir, 'events_%d.%s.gz' % (i + 1, ext)), hep_list[-1]) 3922 message = ('The following files have been generated:\n %s\nThey contain showered' + \ 3923 ' and hadronized events in the %s format obtained' + \ 3924 ' showering the (split) parton-level event file %s.gz with %s') % \ 3925 ('\n '.join(hep_list), hep_format, evt_file, shower) 3926 3927 except OSError, IOError: 3928 raise aMCatNLOError('No file has been generated, an error occurred.'+\ 3929 ' More information in %s' % pjoin(os.getcwd(), 'amcatnlo_run.log')) 3930 3931 # run the plot creation in a secure way 3932 if hep_format == 'StdHEP': 3933 try: 3934 self.do_plot('%s -f' % self.run_name) 3935 except Exception, error: 3936 logger.info("Fail to make the plot. Continue...") 3937 pass 3938 3939 elif out_id == 'TOP' or out_id == 'HWU': 3940 #copy the topdrawer or HwU file(s) back in events 3941 if out_id=='TOP': 3942 ext='top' 3943 elif out_id=='HWU': 3944 ext='HwU' 3945 topfiles = [] 3946 top_tars = [tarfile.TarFile(f) for f in misc.glob('histfile*.tar', rundir)] 3947 for top_tar in top_tars: 3948 topfiles.extend(top_tar.getnames()) 3949 3950 # safety check 3951 if len(top_tars) != self.shower_card['nsplit_jobs']: 3952 raise aMCatNLOError('%d job(s) expected, %d file(s) found' % \ 3953 (self.shower_card['nsplit_jobs'], len(top_tars))) 3954 3955 # find the first available name for the output: 3956 # check existing results with or without event splitting 3957 filename = 'plot_%s_%d_' % (shower, 1) 3958 count = 1 3959 while os.path.exists(pjoin(self.me_dir, 'Events', 3960 self.run_name, '%s0.%s' % (filename,ext))) or \ 3961 os.path.exists(pjoin(self.me_dir, 'Events', 3962 self.run_name, '%s0__1.%s' % (filename,ext))): 3963 count += 1 3964 filename = 'plot_%s_%d_' % (shower, count) 3965 3966 if out_id=='TOP': 3967 hist_format='TopDrawer format' 3968 elif out_id=='HWU': 3969 hist_format='HwU and GnuPlot formats' 3970 3971 if not topfiles: 3972 # if no topfiles are found just warn the user 3973 warning = 'No .top file has been generated. For the results of your ' +\ 3974 'run, please check inside %s' % rundir 3975 elif self.shower_card['nsplit_jobs'] == 1: 3976 # only one job for the shower 3977 top_tars[0].extractall(path = rundir) 3978 plotfiles = [] 3979 for i, file in enumerate(topfiles): 3980 if out_id=='TOP': 3981 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 3982 '%s%d.top' % (filename, i)) 3983 files.mv(pjoin(rundir, file), plotfile) 3984 elif out_id=='HWU': 3985 out=pjoin(self.me_dir,'Events', 3986 self.run_name,'%s%d'% (filename,i)) 3987 histos=[{'dirname':pjoin(rundir,file)}] 3988 self.combine_plots_HwU(histos,out) 3989 try: 3990 misc.call(['gnuplot','%s%d.gnuplot' % (filename,i)],\ 3991 stdout=os.open(os.devnull, os.O_RDWR),\ 3992 stderr=os.open(os.devnull, os.O_RDWR),\ 3993 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3994 except Exception: 3995 pass 3996 plotfile=pjoin(self.me_dir,'Events',self.run_name, 3997 '%s%d.HwU'% (filename,i)) 3998 plotfiles.append(plotfile) 3999 4000 ffiles = 'files' 4001 have = 'have' 4002 if len(plotfiles) == 1: 4003 ffiles = 'file' 4004 have = 'has' 4005 4006 message = ('The %s %s %s been generated, with histograms in the' + \ 4007 ' %s, obtained by showering the parton-level' + \ 4008 ' file %s.gz with %s.') % (ffiles, ', '.join(plotfiles), have, \ 4009 hist_format, evt_file, shower) 4010 else: 4011 # many jobs for the shower have been run 4012 topfiles_set = set(topfiles) 4013 plotfiles = [] 4014 for j, top_tar in enumerate(top_tars): 4015 top_tar.extractall(path = rundir) 4016 for i, file in enumerate(topfiles_set): 4017 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 4018 '%s%d__%d.%s' % (filename, i, j + 1,ext)) 4019 files.mv(pjoin(rundir, file), plotfile) 4020 plotfiles.append(plotfile) 4021 4022 # check if the user asked to combine the .top into a single file 4023 if self.shower_card['combine_td']: 4024 misc.compile(['sum_plots'], cwd = pjoin(self.me_dir, 'Utilities')) 4025 4026 if self.banner.get('run_card', 'event_norm').lower() == 'sum': 4027 norm = 1. 4028 else: 4029 norm = 1./float(self.shower_card['nsplit_jobs']) 4030 4031 plotfiles2 = [] 4032 for i, file in enumerate(topfiles_set): 4033 filelist = ['%s%d__%d.%s' % (filename, i, j + 1,ext) \ 4034 for j in range(self.shower_card['nsplit_jobs'])] 4035 if out_id=='TOP': 4036 infile="%d\n%s\n%s\n" % \ 4037 (self.shower_card['nsplit_jobs'], 4038 '\n'.join(filelist), 4039 '\n'.join([str(norm)] * self.shower_card['nsplit_jobs'])) 4040 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'sum_plots')], 4041 stdin=subprocess.PIPE, 4042 stdout=os.open(os.devnull, os.O_RDWR), 4043 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 4044 p.communicate(input = infile) 4045 files.mv(pjoin(self.me_dir, 'Events', self.run_name, 'sum.top'), 4046 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i))) 4047 elif out_id=='HWU': 4048 out=pjoin(self.me_dir,'Events', 4049 self.run_name,'%s%d'% (filename,i)) 4050 histos=[] 4051 norms=[] 4052 for plotfile in plotfiles: 4053 histos.append({'dirname':plotfile}) 4054 norms.append(norm) 4055 self.combine_plots_HwU(histos,out,normalisation=norms) 4056 try: 4057 misc.call(['gnuplot','%s%d.gnuplot' % (filename, i)],\ 4058 stdout=os.open(os.devnull, os.O_RDWR),\ 4059 stderr=os.open(os.devnull, os.O_RDWR),\ 4060 cwd=pjoin(self.me_dir, 'Events',self.run_name)) 4061 except Exception: 4062 pass 4063 4064 plotfiles2.append(pjoin(self.me_dir, 'Events', self.run_name, '%s%d.%s' % (filename, i,ext))) 4065 tar = tarfile.open( 4066 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.tar.gz' % (filename, i)), 'w:gz') 4067 for f in filelist: 4068 tar.add(pjoin(self.me_dir, 'Events', self.run_name, f), arcname=f) 4069 files.rm([pjoin(self.me_dir, 'Events', self.run_name, f) for f in filelist]) 4070 4071 tar.close() 4072 4073 ffiles = 'files' 4074 have = 'have' 4075 if len(plotfiles2) == 1: 4076 ffiles = 'file' 4077 have = 'has' 4078 4079 message = ('The %s %s %s been generated, with histograms in the' + \ 4080 ' %s, obtained by showering the parton-level' + \ 4081 ' file %s.gz with %s.\n' + \ 4082 'The files from the different shower ' + \ 4083 'jobs (before combining them) can be found inside %s.') % \ 4084 (ffiles, ', '.join(plotfiles2), have, hist_format,\ 4085 evt_file, shower, 4086 ', '.join([f.replace('%s' % ext, 'tar.gz') for f in plotfiles2])) 4087 4088 else: 4089 message = ('The following files have been generated:\n %s\n' + \ 4090 'They contain histograms in the' + \ 4091 ' %s, obtained by showering the parton-level' + \ 4092 ' file %s.gz with %s.') % ('\n '.join(plotfiles), \ 4093 hist_format, evt_file, shower) 4094 4095 # Now arxiv the shower card used if RunMaterial is present 4096 run_dir_path = pjoin(rundir, self.run_name) 4097 if os.path.exists(pjoin(run_dir_path,'RunMaterial.tar.gz')): 4098 misc.call(['tar','-xzpf','RunMaterial.tar.gz'],cwd=run_dir_path) 4099 files.cp(pjoin(self.me_dir,'Cards','shower_card.dat'), 4100 pjoin(run_dir_path,'RunMaterial','shower_card_for_%s_%d.dat'\ 4101 %(shower, count))) 4102 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'], 4103 cwd=run_dir_path) 4104 shutil.rmtree(pjoin(run_dir_path,'RunMaterial')) 4105 # end of the run, gzip files and print out the message/warning 4106 for f in to_gzip: 4107 misc.gzip(f) 4108 if message: 4109 logger.info(message) 4110 if warning: 4111 logger.warning(warning) 4112 4113 self.update_status('Run complete', level='shower', update_results=True)
4114 4115 ############################################################################
4116 - def set_run_name(self, name, tag=None, level='parton', reload_card=False):
4117 """define the run name, the run_tag, the banner and the results.""" 4118 4119 # when are we force to change the tag new_run:previous run requiring changes 4120 upgrade_tag = {'parton': ['parton','delphes','shower','madanalysis5_hadron'], 4121 'shower': ['shower','delphes','madanalysis5_hadron'], 4122 'delphes':['delphes'], 4123 'madanalysis5_hadron':['madanalysis5_hadron'], 4124 'plot':[]} 4125 4126 if name == self.run_name: 4127 if reload_card: 4128 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 4129 self.run_card = banner_mod.RunCardNLO(run_card) 4130 4131 #check if we need to change the tag 4132 if tag: 4133 self.run_card['run_tag'] = tag 4134 self.run_tag = tag 4135 self.results.add_run(self.run_name, self.run_card) 4136 else: 4137 for tag in upgrade_tag[level]: 4138 if getattr(self.results[self.run_name][-1], tag): 4139 tag = self.get_available_tag() 4140 self.run_card['run_tag'] = tag 4141 self.run_tag = tag 4142 self.results.add_run(self.run_name, self.run_card) 4143 break 4144 return # Nothing to do anymore 4145 4146 # save/clean previous run 4147 if self.run_name: 4148 self.store_result() 4149 # store new name 4150 self.run_name = name 4151 4152 # Read run_card 4153 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 4154 self.run_card = banner_mod.RunCardNLO(run_card) 4155 4156 new_tag = False 4157 # First call for this run -> set the banner 4158 self.banner = banner_mod.recover_banner(self.results, level, self.run_name, tag) 4159 if 'mgruncard' in self.banner: 4160 self.run_card = self.banner.charge_card('run_card') 4161 if tag: 4162 self.run_card['run_tag'] = tag 4163 new_tag = True 4164 elif not self.run_name in self.results and level =='parton': 4165 pass # No results yet, so current tag is fine 4166 elif not self.run_name in self.results: 4167 #This is only for case when you want to trick the interface 4168 logger.warning('Trying to run data on unknown run.') 4169 self.results.add_run(name, self.run_card) 4170 self.results.update('add run %s' % name, 'all', makehtml=True) 4171 else: 4172 for tag in upgrade_tag[level]: 4173 4174 if getattr(self.results[self.run_name][-1], tag): 4175 # LEVEL is already define in the last tag -> need to switch tag 4176 tag = self.get_available_tag() 4177 self.run_card['run_tag'] = tag 4178 new_tag = True 4179 break 4180 if not new_tag: 4181 # We can add the results to the current run 4182 tag = self.results[self.run_name][-1]['tag'] 4183 self.run_card['run_tag'] = tag # ensure that run_tag is correct 4184 4185 4186 if name in self.results and not new_tag: 4187 self.results.def_current(self.run_name) 4188 else: 4189 self.results.add_run(self.run_name, self.run_card) 4190 4191 self.run_tag = self.run_card['run_tag'] 4192 4193 # Return the tag of the previous run having the required data for this 4194 # tag/run to working wel. 4195 if level == 'parton': 4196 return 4197 elif level == 'pythia': 4198 return self.results[self.run_name][0]['tag'] 4199 else: 4200 for i in range(-1,-len(self.results[self.run_name])-1,-1): 4201 tagRun = self.results[self.run_name][i] 4202 if tagRun.pythia: 4203 return tagRun['tag']
4204 4205
4206 - def store_result(self):
4207 """ tar the pythia results. This is done when we are quite sure that 4208 the pythia output will not be use anymore """ 4209 4210 if not self.run_name: 4211 return 4212 4213 self.results.save() 4214 4215 if not self.to_store: 4216 return 4217 4218 if 'event' in self.to_store: 4219 if os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')): 4220 if not os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')): 4221 self.update_status('gzipping output file: events.lhe', level='parton', error=True) 4222 misc.gzip(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')) 4223 else: 4224 os.remove(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')) 4225 if os.path.exists(pjoin(self.me_dir,'Events','reweight.lhe')): 4226 os.remove(pjoin(self.me_dir,'Events', 'reweight.lhe')) 4227 4228 4229 tag = self.run_card['run_tag'] 4230 4231 self.to_store = []
4232 4233 4234 ############################################################################
4235 - def get_Gdir(self, Pdir=None):
4236 """get the list of Gdirectory if not yet saved.""" 4237 4238 if hasattr(self, "Gdirs"): 4239 if self.me_dir in self.Gdirs: 4240 if Pdir is None: 4241 return sum(self.Gdirs.values()) 4242 else: 4243 return self.Gdirs[Pdir] 4244 4245 Pdirs = self.get_Pdir() 4246 Gdirs = {self.me_dir:[]} 4247 for P in Pdirs: 4248 Gdirs[P] = [pjoin(P,G) for G in os.listdir(P) if G.startswith('G') and 4249 os.path.isdir(pjoin(P,G))] 4250 4251 self.Gdirs = Gdirs 4252 return self.getGdir(Pdir)
4253 4254
4255 - def get_init_dict(self, evt_file):
4256 """reads the info in the init block and returns them in a dictionary""" 4257 ev_file = open(evt_file) 4258 init = "" 4259 found = False 4260 while True: 4261 line = ev_file.readline() 4262 if "<init>" in line: 4263 found = True 4264 elif found and not line.startswith('#'): 4265 init += line 4266 if "</init>" in line or "<event>" in line: 4267 break 4268 ev_file.close() 4269 4270 # IDBMUP(1),IDBMUP(2),EBMUP(1),EBMUP(2), PDFGUP(1),PDFGUP(2), 4271 # PDFSUP(1),PDFSUP(2),IDWTUP,NPRUP 4272 # these are not included (so far) in the init_dict 4273 # XSECUP(1),XERRUP(1),XMAXUP(1),LPRUP(1) 4274 4275 init_dict = {} 4276 init_dict['idbmup1'] = int(init.split()[0]) 4277 init_dict['idbmup2'] = int(init.split()[1]) 4278 init_dict['ebmup1'] = float(init.split()[2]) 4279 init_dict['ebmup2'] = float(init.split()[3]) 4280 init_dict['pdfgup1'] = int(init.split()[4]) 4281 init_dict['pdfgup2'] = int(init.split()[5]) 4282 init_dict['pdfsup1'] = int(init.split()[6]) 4283 init_dict['pdfsup2'] = int(init.split()[7]) 4284 init_dict['idwtup'] = int(init.split()[8]) 4285 init_dict['nprup'] = int(init.split()[9]) 4286 4287 return init_dict
4288 4289
4290 - def banner_to_mcatnlo(self, evt_file):
4291 """creates the mcatnlo input script using the values set in the header of the event_file. 4292 It also checks if the lhapdf library is used""" 4293 4294 shower = self.banner.get('run_card', 'parton_shower').upper() 4295 pdlabel = self.banner.get('run_card', 'pdlabel') 4296 itry = 0 4297 nevents = self.shower_card['nevents'] 4298 init_dict = self.get_init_dict(evt_file) 4299 4300 if nevents < 0 or \ 4301 nevents > self.banner.get_detail('run_card', 'nevents'): 4302 nevents = self.banner.get_detail('run_card', 'nevents') 4303 4304 nevents = nevents / self.shower_card['nsplit_jobs'] 4305 4306 mcmass_dict = {} 4307 for line in [l for l in self.banner['montecarlomasses'].split('\n') if l]: 4308 pdg = int(line.split()[0]) 4309 mass = float(line.split()[1]) 4310 mcmass_dict[pdg] = mass 4311 4312 content = 'EVPREFIX=%s\n' % pjoin(os.path.split(evt_file)[1]) 4313 content += 'NEVENTS=%d\n' % nevents 4314 content += 'NEVENTS_TOT=%d\n' % (self.banner.get_detail('run_card', 'nevents') /\ 4315 self.shower_card['nsplit_jobs']) 4316 content += 'MCMODE=%s\n' % shower 4317 content += 'PDLABEL=%s\n' % pdlabel 4318 content += 'ALPHAEW=%s\n' % self.banner.get_detail('param_card', 'sminputs', 1).value 4319 #content += 'PDFSET=%s\n' % self.banner.get_detail('run_card', 'lhaid') 4320 #content += 'PDFSET=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 4321 content += 'TMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 6).value 4322 content += 'TWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 6).value 4323 content += 'ZMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 23).value 4324 content += 'ZWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 23).value 4325 content += 'WMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 24).value 4326 content += 'WWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 24).value 4327 try: 4328 content += 'HGGMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 25).value 4329 content += 'HGGWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 25).value 4330 except KeyError: 4331 content += 'HGGMASS=120.\n' 4332 content += 'HGGWIDTH=0.00575308848\n' 4333 content += 'beammom1=%s\n' % self.banner.get_detail('run_card', 'ebeam1') 4334 content += 'beammom2=%s\n' % self.banner.get_detail('run_card', 'ebeam2') 4335 content += 'BEAM1=%s\n' % self.banner.get_detail('run_card', 'lpp1') 4336 content += 'BEAM2=%s\n' % self.banner.get_detail('run_card', 'lpp2') 4337 content += 'DMASS=%s\n' % mcmass_dict[1] 4338 content += 'UMASS=%s\n' % mcmass_dict[2] 4339 content += 'SMASS=%s\n' % mcmass_dict[3] 4340 content += 'CMASS=%s\n' % mcmass_dict[4] 4341 content += 'BMASS=%s\n' % mcmass_dict[5] 4342 try: 4343 content += 'EMASS=%s\n' % mcmass_dict[11] 4344 content += 'MUMASS=%s\n' % mcmass_dict[13] 4345 content += 'TAUMASS=%s\n' % mcmass_dict[15] 4346 except KeyError: 4347 # this is for backward compatibility 4348 mcmass_lines = [l for l in \ 4349 open(pjoin(self.me_dir, 'SubProcesses', 'MCmasses_%s.inc' % shower.upper()) 4350 ).read().split('\n') if l] 4351 new_mcmass_dict = {} 4352 for l in mcmass_lines: 4353 key, val = l.split('=') 4354 new_mcmass_dict[key.strip()] = val.replace('d', 'e').strip() 4355 content += 'EMASS=%s\n' % new_mcmass_dict['mcmass(11)'] 4356 content += 'MUMASS=%s\n' % new_mcmass_dict['mcmass(13)'] 4357 content += 'TAUMASS=%s\n' % new_mcmass_dict['mcmass(15)'] 4358 4359 content += 'GMASS=%s\n' % mcmass_dict[21] 4360 content += 'EVENT_NORM=%s\n' % self.banner.get_detail('run_card', 'event_norm').lower() 4361 # check if need to link lhapdf 4362 if int(self.shower_card['pdfcode']) > 1 or \ 4363 (pdlabel=='lhapdf' and int(self.shower_card['pdfcode'])==1) or \ 4364 shower=='HERWIGPP' : 4365 # Use LHAPDF (should be correctly installed, because 4366 # either events were already generated with them, or the 4367 # user explicitly gives an LHAPDF number in the 4368 # shower_card). 4369 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 4370 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 4371 stdout = subprocess.PIPE).stdout.read().strip() 4372 content += 'LHAPDFPATH=%s\n' % lhapdfpath 4373 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 4374 if self.shower_card['pdfcode']==0: 4375 lhaid_list = '' 4376 content += '' 4377 elif self.shower_card['pdfcode']==1: 4378 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 4379 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 4380 else: 4381 lhaid_list = [abs(int(self.shower_card['pdfcode']))] 4382 content += 'PDFCODE=%s\n' % self.shower_card['pdfcode'] 4383 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 4384 elif int(self.shower_card['pdfcode'])==1 or \ 4385 int(self.shower_card['pdfcode'])==-1 and True: 4386 # Try to use LHAPDF because user wants to use the same PDF 4387 # as was used for the event generation. However, for the 4388 # event generation, LHAPDF was not used, so non-trivial to 4389 # see if if LHAPDF is available with the corresponding PDF 4390 # set. If not found, give a warning and use build-in PDF 4391 # set instead. 4392 try: 4393 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 4394 stdout = subprocess.PIPE).stdout.read().strip() 4395 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 4396 content += 'LHAPDFPATH=%s\n' % lhapdfpath 4397 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 4398 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 4399 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 4400 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 4401 except Exception: 4402 logger.warning('Trying to shower events using the same PDF in the shower as used in the generation'+\ 4403 ' of the events using LHAPDF. However, no valid LHAPDF installation found with the'+\ 4404 ' needed PDF set. Will use default internal PDF for the shower instead. To use the'+\ 4405 ' same set as was used in the event generation install LHAPDF and set the path using'+\ 4406 ' "set /path_to_lhapdf/bin/lhapdf-config" from the MadGraph5_aMC@NLO python shell') 4407 content += 'LHAPDFPATH=\n' 4408 content += 'PDFCODE=0\n' 4409 else: 4410 content += 'LHAPDFPATH=\n' 4411 content += 'PDFCODE=0\n' 4412 4413 content += 'ICKKW=%s\n' % self.banner.get_detail('run_card', 'ickkw') 4414 content += 'PTJCUT=%s\n' % self.banner.get_detail('run_card', 'ptj') 4415 # add the pythia8/hwpp path(s) 4416 if self.options['pythia8_path']: 4417 content+='PY8PATH=%s\n' % self.options['pythia8_path'] 4418 if self.options['hwpp_path']: 4419 content+='HWPPPATH=%s\n' % self.options['hwpp_path'] 4420 if self.options['thepeg_path'] and self.options['thepeg_path'] != self.options['hwpp_path']: 4421 content+='THEPEGPATH=%s\n' % self.options['thepeg_path'] 4422 if self.options['hepmc_path'] and self.options['hepmc_path'] != self.options['hwpp_path']: 4423 content+='HEPMCPATH=%s\n' % self.options['hepmc_path'] 4424 4425 output = open(pjoin(self.me_dir, 'MCatNLO', 'banner.dat'), 'w') 4426 output.write(content) 4427 output.close() 4428 return shower
4429 4430
4431 - def run_reweight(self, only):
4432 """runs the reweight_xsec_events executables on each sub-event file generated 4433 to compute on the fly scale and/or PDF uncertainities""" 4434 logger.info(' Doing reweight') 4435 4436 nev_unw = pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted') 4437 # if only doing reweight, copy back the nevents_unweighted file 4438 if only: 4439 if os.path.exists(nev_unw + '.orig'): 4440 files.cp(nev_unw + '.orig', nev_unw) 4441 else: 4442 raise aMCatNLOError('Cannot find event file information') 4443 4444 #read the nevents_unweighted file to get the list of event files 4445 file = open(nev_unw) 4446 lines = file.read().split('\n') 4447 file.close() 4448 # make copy of the original nevent_unweighted file 4449 files.cp(nev_unw, nev_unw + '.orig') 4450 # loop over lines (all but the last one whith is empty) and check that the 4451 # number of events is not 0 4452 evt_files = [line.split()[0] for line in lines[:-1] if line.split()[1] != '0'] 4453 evt_wghts = [float(line.split()[3]) for line in lines[:-1] if line.split()[1] != '0'] 4454 if self.run_card['event_norm'].lower()=='bias' and self.run_card['nevents'] != 0: 4455 evt_wghts[:]=[1./float(self.run_card['nevents']) for wgt in evt_wghts] 4456 #prepare the job_dict 4457 job_dict = {} 4458 exe = 'reweight_xsec_events.local' 4459 for i, evt_file in enumerate(evt_files): 4460 path, evt = os.path.split(evt_file) 4461 files.ln(pjoin(self.me_dir, 'SubProcesses', exe), \ 4462 pjoin(self.me_dir, 'SubProcesses', path)) 4463 job_dict[path] = [exe] 4464 4465 self.run_all(job_dict, [[evt, '1']], 'Running reweight') 4466 4467 #check that the new event files are complete 4468 for evt_file in evt_files: 4469 last_line = subprocess.Popen(['tail', '-n1', '%s.rwgt' % \ 4470 pjoin(self.me_dir, 'SubProcesses', evt_file)], \ 4471 stdout = subprocess.PIPE).stdout.read().strip() 4472 if last_line != "</LesHouchesEvents>": 4473 raise aMCatNLOError('An error occurred during reweight. Check the' + \ 4474 '\'reweight_xsec_events.output\' files inside the ' + \ 4475 '\'SubProcesses/P*/G*/ directories for details') 4476 4477 #update file name in nevents_unweighted 4478 newfile = open(nev_unw, 'w') 4479 for line in lines: 4480 if line: 4481 newfile.write(line.replace(line.split()[0], line.split()[0] + '.rwgt') + '\n') 4482 newfile.close() 4483 4484 return self.pdf_scale_from_reweighting(evt_files,evt_wghts)
4485
4486 - def pdf_scale_from_reweighting(self, evt_files,evt_wghts):
4487 """This function takes the files with the scale and pdf values 4488 written by the reweight_xsec_events.f code 4489 (P*/G*/pdf_scale_dependence.dat) and computes the overall 4490 scale and PDF uncertainty (the latter is computed using the 4491 Hessian method (if lhaid<90000) or Gaussian (if lhaid>90000)) 4492 and returns it in percents. The expected format of the file 4493 is: n_scales xsec_scale_central xsec_scale1 ... n_pdf 4494 xsec_pdf0 xsec_pdf1 ....""" 4495 4496 scales=[] 4497 pdfs=[] 4498 for i,evt_file in enumerate(evt_files): 4499 path, evt=os.path.split(evt_file) 4500 with open(pjoin(self.me_dir, 'SubProcesses', path, 'scale_pdf_dependence.dat'),'r') as f: 4501 data_line=f.readline() 4502 if "scale variations:" in data_line: 4503 for j,scale in enumerate(self.run_card['dynamical_scale_choice']): 4504 data_line = f.readline().split() 4505 scales_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()] 4506 try: 4507 scales[j] = [a + b for a, b in zip(scales[j], scales_this)] 4508 except IndexError: 4509 scales+=[scales_this] 4510 data_line=f.readline() 4511 if "pdf variations:" in data_line: 4512 for j,pdf in enumerate(self.run_card['lhaid']): 4513 data_line = f.readline().split() 4514 pdfs_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()] 4515 try: 4516 pdfs[j] = [a + b for a, b in zip(pdfs[j], pdfs_this)] 4517 except IndexError: 4518 pdfs+=[pdfs_this] 4519 4520 # get the scale uncertainty in percent 4521 scale_info=[] 4522 for j,scale in enumerate(scales): 4523 s_cen=scale[0] 4524 if s_cen != 0.0 and self.run_card['reweight_scale'][j]: 4525 # max and min of the full envelope 4526 s_max=(max(scale)/s_cen-1)*100 4527 s_min=(1-min(scale)/s_cen)*100 4528 # ren and fac scale dependence added in quadrature 4529 ren_var=[] 4530 fac_var=[] 4531 for i in range(len(self.run_card['rw_rscale'])): 4532 ren_var.append(scale[i]-s_cen) # central fac scale 4533 for i in range(len(self.run_card['rw_fscale'])): 4534 fac_var.append(scale[i*len(self.run_card['rw_rscale'])]-s_cen) # central ren scale 4535 s_max_q=((s_cen+math.sqrt(math.pow(max(ren_var),2)+math.pow(max(fac_var),2)))/s_cen-1)*100 4536 s_min_q=(1-(s_cen-math.sqrt(math.pow(min(ren_var),2)+math.pow(min(fac_var),2)))/s_cen)*100 4537 s_size=len(scale) 4538 else: 4539 s_max=0.0 4540 s_min=0.0 4541 s_max_q=0.0 4542 s_min_q=0.0 4543 s_size=len(scale) 4544 scale_info.append({'cen':s_cen, 'min':s_min, 'max':s_max, \ 4545 'min_q':s_min_q, 'max_q':s_max_q, 'size':s_size, \ 4546 'label':self.run_card['dynamical_scale_choice'][j], \ 4547 'unc':self.run_card['reweight_scale'][j]}) 4548 4549 # check if we can use LHAPDF to compute the PDF uncertainty 4550 if any(self.run_card['reweight_pdf']): 4551 use_lhapdf=False 4552 lhapdf_libdir=subprocess.Popen([self.options['lhapdf'],'--libdir'],\ 4553 stdout=subprocess.PIPE).stdout.read().strip() 4554 4555 try: 4556 candidates=[dirname for dirname in os.listdir(lhapdf_libdir) \ 4557 if os.path.isdir(pjoin(lhapdf_libdir,dirname))] 4558 except OSError: 4559 candidates=[] 4560 for candidate in candidates: 4561 if os.path.isfile(pjoin(lhapdf_libdir,candidate,'site-packages','lhapdf.so')): 4562 sys.path.insert(0,pjoin(lhapdf_libdir,candidate,'site-packages')) 4563 try: 4564 import lhapdf 4565 use_lhapdf=True 4566 break 4567 except ImportError: 4568 sys.path.pop(0) 4569 continue 4570 4571 if not use_lhapdf: 4572 try: 4573 candidates=[dirname for dirname in os.listdir(lhapdf_libdir+'64') \ 4574 if os.path.isdir(pjoin(lhapdf_libdir+'64',dirname))] 4575 except OSError: 4576 candidates=[] 4577 for candidate in candidates: 4578 if os.path.isfile(pjoin(lhapdf_libdir+'64',candidate,'site-packages','lhapdf.so')): 4579 sys.path.insert(0,pjoin(lhapdf_libdir+'64',candidate,'site-packages')) 4580 try: 4581 import lhapdf 4582 use_lhapdf=True 4583 break 4584 except ImportError: 4585 sys.path.pop(0) 4586 continue 4587 4588 if not use_lhapdf: 4589 try: 4590 import lhapdf 4591 use_lhapdf=True 4592 except ImportError: 4593 logger.warning("Failed to access python version of LHAPDF: "\ 4594 "cannot compute PDF uncertainty from the "\ 4595 "weights in the events. The weights in the LHE " \ 4596 "event files will still cover all PDF set members, "\ 4597 "but there will be no PDF uncertainty printed in the run summary. \n "\ 4598 "If the python interface to LHAPDF is available on your system, try "\ 4599 "adding its location to the PYTHONPATH environment variable and the"\ 4600 "LHAPDF library location to LD_LIBRARY_PATH (linux) or DYLD_LIBRARY_PATH (mac os x).") 4601 use_lhapdf=False 4602 4603 # turn off lhapdf printing any messages 4604 if any(self.run_card['reweight_pdf']) and use_lhapdf: lhapdf.setVerbosity(0) 4605 4606 pdf_info=[] 4607 for j,pdfset in enumerate(pdfs): 4608 p_cen=pdfset[0] 4609 if p_cen != 0.0 and self.run_card['reweight_pdf'][j]: 4610 if use_lhapdf: 4611 pdfsetname=self.run_card['lhapdfsetname'][j] 4612 try: 4613 p=lhapdf.getPDFSet(pdfsetname) 4614 ep=p.uncertainty(pdfset,-1) 4615 p_cen=ep.central 4616 p_min=abs(ep.errminus/p_cen)*100 4617 p_max=abs(ep.errplus/p_cen)*100 4618 p_type=p.errorType 4619 p_size=p.size 4620 p_conf=p.errorConfLevel 4621 except: 4622 logger.warning("Could not access LHAPDF to compute uncertainties for %s" % pdfsetname) 4623 p_min=0.0 4624 p_max=0.0 4625 p_type='unknown' 4626 p_conf='unknown' 4627 p_size=len(pdfset) 4628 else: 4629 p_min=0.0 4630 p_max=0.0 4631 p_type='unknown' 4632 p_conf='unknown' 4633 p_size=len(pdfset) 4634 pdfsetname=self.run_card['lhaid'][j] 4635 else: 4636 p_min=0.0 4637 p_max=0.0 4638 p_type='none' 4639 p_conf='unknown' 4640 p_size=len(pdfset) 4641 pdfsetname=self.run_card['lhaid'][j] 4642 pdf_info.append({'cen':p_cen, 'min':p_min, 'max':p_max, \ 4643 'unc':p_type, 'name':pdfsetname, 'size':p_size, \ 4644 'label':self.run_card['lhaid'][j], 'conf':p_conf}) 4645 4646 scale_pdf_info=[scale_info,pdf_info] 4647 return scale_pdf_info
4648 4649
4650 - def wait_for_complete(self, run_type):
4651 """this function waits for jobs on cluster to complete their run.""" 4652 starttime = time.time() 4653 #logger.info(' Waiting for submitted jobs to complete') 4654 update_status = lambda i, r, f: self.update_status((i, r, f, run_type), 4655 starttime=starttime, level='parton', update_results=True) 4656 try: 4657 self.cluster.wait(self.me_dir, update_status) 4658 except: 4659 self.cluster.remove() 4660 raise
4661
4662 - def run_all(self, job_dict, arg_list, run_type='monitor', split_jobs = False):
4663 """runs the jobs in job_dict (organized as folder: [job_list]), with arguments args""" 4664 self.ijob = 0 4665 if run_type != 'shower': 4666 self.njobs = sum(len(jobs) for jobs in job_dict.values()) * len(arg_list) 4667 for args in arg_list: 4668 for Pdir, jobs in job_dict.items(): 4669 for job in jobs: 4670 self.run_exe(job, args, run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) ) 4671 if self.cluster_mode == 2: 4672 time.sleep(1) # security to allow all jobs to be launched 4673 else: 4674 self.njobs = len(arg_list) 4675 for args in arg_list: 4676 [(cwd, exe)] = job_dict.items() 4677 self.run_exe(exe, args, run_type, cwd) 4678 4679 self.wait_for_complete(run_type)
4680 4681 4682
4683 - def check_event_files(self,jobs):
4684 """check the integrity of the event files after splitting, and resubmit 4685 those which are not nicely terminated""" 4686 jobs_to_resubmit = [] 4687 for job in jobs: 4688 last_line = '' 4689 try: 4690 last_line = subprocess.Popen( 4691 ['tail', '-n1', pjoin(job['dirname'], 'events.lhe')], \ 4692 stdout = subprocess.PIPE).stdout.read().strip() 4693 except IOError: 4694 pass 4695 if last_line != "</LesHouchesEvents>": 4696 jobs_to_resubmit.append(job) 4697 self.njobs = 0 4698 if jobs_to_resubmit: 4699 run_type = 'Resubmitting broken jobs' 4700 logger.info('Some event files are broken, corresponding jobs will be resubmitted.') 4701 for job in jobs_to_resubmit: 4702 logger.debug('Resubmitting ' + job['dirname'] + '\n') 4703 self.run_all_jobs(jobs_to_resubmit,2,fixed_order=False)
4704 4705
4706 - def find_jobs_to_split(self, pdir, job, arg):
4707 """looks into the nevents_unweighed_splitted file to check how many 4708 split jobs are needed for this (pdir, job). arg is F, B or V""" 4709 # find the number of the integration channel 4710 splittings = [] 4711 ajob = open(pjoin(self.me_dir, 'SubProcesses', pdir, job)).read() 4712 pattern = re.compile('for i in (\d+) ; do') 4713 match = re.search(pattern, ajob) 4714 channel = match.groups()[0] 4715 # then open the nevents_unweighted_splitted file and look for the 4716 # number of splittings to be done 4717 nevents_file = open(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted')).read() 4718 # This skips the channels with zero events, because they are 4719 # not of the form GFXX_YY, but simply GFXX 4720 pattern = re.compile(r"%s_(\d+)/events.lhe" % \ 4721 pjoin(pdir, 'G%s%s' % (arg,channel))) 4722 matches = re.findall(pattern, nevents_file) 4723 for m in matches: 4724 splittings.append(m) 4725 return splittings
4726 4727
4728 - def run_exe(self, exe, args, run_type, cwd=None):
4729 """this basic function launch locally/on cluster exe with args as argument. 4730 """ 4731 # first test that exe exists: 4732 execpath = None 4733 if cwd and os.path.exists(pjoin(cwd, exe)): 4734 execpath = pjoin(cwd, exe) 4735 elif not cwd and os.path.exists(exe): 4736 execpath = exe 4737 else: 4738 raise aMCatNLOError('Cannot find executable %s in %s' \ 4739 % (exe, os.getcwd())) 4740 # check that the executable has exec permissions 4741 if self.cluster_mode == 1 and not os.access(execpath, os.X_OK): 4742 subprocess.call(['chmod', '+x', exe], cwd=cwd) 4743 # finally run it 4744 if self.cluster_mode == 0: 4745 #this is for the serial run 4746 misc.call(['./'+exe] + args, cwd=cwd) 4747 self.ijob += 1 4748 self.update_status((max([self.njobs - self.ijob - 1, 0]), 4749 min([1, self.njobs - self.ijob]), 4750 self.ijob, run_type), level='parton') 4751 4752 #this is for the cluster/multicore run 4753 elif 'reweight' in exe: 4754 # a reweight run 4755 # Find the correct PDF input file 4756 input_files, output_files = [], [] 4757 pdfinput = self.get_pdf_input_filename() 4758 if os.path.exists(pdfinput): 4759 input_files.append(pdfinput) 4760 input_files.append(pjoin(os.path.dirname(exe), os.path.pardir, 'reweight_xsec_events')) 4761 input_files.append(pjoin(cwd, os.path.pardir, 'leshouche_info.dat')) 4762 input_files.append(args[0]) 4763 output_files.append('%s.rwgt' % os.path.basename(args[0])) 4764 output_files.append('reweight_xsec_events.output') 4765 output_files.append('scale_pdf_dependence.dat') 4766 4767 return self.cluster.submit2(exe, args, cwd=cwd, 4768 input_files=input_files, output_files=output_files, 4769 required_output=output_files) 4770 4771 elif 'ajob' in exe: 4772 # the 'standard' amcatnlo job 4773 # check if args is a list of string 4774 if type(args[0]) == str: 4775 input_files, output_files, required_output, args = self.getIO_ajob(exe,cwd,args) 4776 #submitting 4777 self.cluster.submit2(exe, args, cwd=cwd, 4778 input_files=input_files, output_files=output_files, 4779 required_output=required_output) 4780 4781 # # keep track of folders and arguments for splitted evt gen 4782 # subfolder=output_files[-1].split('/')[0] 4783 # if len(args) == 4 and '_' in subfolder: 4784 # self.split_folders[pjoin(cwd,subfolder)] = [exe] + args 4785 4786 elif 'shower' in exe: 4787 # a shower job 4788 # args are [shower, output(HEP or TOP), run_name] 4789 # cwd is the shower rundir, where the executable are found 4790 input_files, output_files = [], [] 4791 shower = args[0] 4792 # the input files 4793 if shower == 'PYTHIA8': 4794 input_files.append(pjoin(cwd, 'Pythia8.exe')) 4795 input_files.append(pjoin(cwd, 'Pythia8.cmd')) 4796 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 4797 input_files.append(pjoin(cwd, 'config.sh')) 4798 input_files.append(pjoin(self.options['pythia8_path'], 'xmldoc')) 4799 else: 4800 input_files.append(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc')) 4801 else: 4802 input_files.append(pjoin(cwd, 'MCATNLO_%s_EXE' % shower)) 4803 input_files.append(pjoin(cwd, 'MCATNLO_%s_input' % shower)) 4804 if shower == 'HERWIGPP': 4805 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')): 4806 input_files.append(pjoin(cwd, 'Herwig++')) 4807 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')): 4808 input_files.append(pjoin(cwd, 'Herwig')) 4809 input_files.append(pjoin(cwd, 'HepMCFortran.so')) 4810 if len(args) == 3: 4811 if os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')): 4812 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')) 4813 elif os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')): 4814 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')) 4815 else: 4816 raise aMCatNLOError, 'Event file not present in %s' % \ 4817 pjoin(self.me_dir, 'Events', self.run_name) 4818 else: 4819 input_files.append(pjoin(cwd, 'events_%s.lhe' % args[3])) 4820 # the output files 4821 if len(args) == 3: 4822 output_files.append('mcatnlo_run.log') 4823 else: 4824 output_files.append('mcatnlo_run_%s.log' % args[3]) 4825 if args[1] == 'HEP': 4826 if len(args) == 3: 4827 fname = 'events' 4828 else: 4829 fname = 'events_%s' % args[3] 4830 if shower in ['PYTHIA8', 'HERWIGPP']: 4831 output_files.append(fname + '.hepmc.gz') 4832 else: 4833 output_files.append(fname + '.hep.gz') 4834 elif args[1] == 'TOP' or args[1] == 'HWU': 4835 if len(args) == 3: 4836 fname = 'histfile' 4837 else: 4838 fname = 'histfile_%s' % args[3] 4839 output_files.append(fname + '.tar') 4840 else: 4841 raise aMCatNLOError, 'Not a valid output argument for shower job : %d' % args[1] 4842 #submitting 4843 self.cluster.submit2(exe, args, cwd=cwd, 4844 input_files=input_files, output_files=output_files) 4845 4846 else: 4847 return self.cluster.submit(exe, args, cwd=cwd)
4848
4849 - def getIO_ajob(self,exe,cwd, args):
4850 # use local disk if possible => need to stands what are the 4851 # input/output files 4852 4853 output_files = [] 4854 required_output = [] 4855 input_files = [pjoin(self.me_dir, 'SubProcesses', 'randinit'), 4856 pjoin(cwd, 'symfact.dat'), 4857 pjoin(cwd, 'iproc.dat'), 4858 pjoin(cwd, 'initial_states_map.dat'), 4859 pjoin(cwd, 'configs_and_props_info.dat'), 4860 pjoin(cwd, 'leshouche_info.dat'), 4861 pjoin(cwd, 'FKS_params.dat')] 4862 4863 # For GoSam interface, we must copy the SLHA card as well 4864 if os.path.exists(pjoin(self.me_dir,'OLP_virtuals','gosam.rc')): 4865 input_files.append(pjoin(self.me_dir, 'Cards', 'param_card.dat')) 4866 4867 if os.path.exists(pjoin(cwd,'nevents.tar')): 4868 input_files.append(pjoin(cwd,'nevents.tar')) 4869 4870 if os.path.exists(pjoin(self.me_dir,'SubProcesses','OLE_order.olc')): 4871 input_files.append(pjoin(cwd, 'OLE_order.olc')) 4872 4873 # File for the loop (might not be present if MadLoop is not used) 4874 if os.path.exists(pjoin(cwd,'MadLoop5_resources.tar.gz')) and \ 4875 cluster.need_transfer(self.options): 4876 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 4877 elif os.path.exists(pjoin(cwd,'MadLoop5_resources')) and \ 4878 cluster.need_transfer(self.options): 4879 tf=tarfile.open(pjoin(cwd,'MadLoop5_resources.tar.gz'),'w:gz', 4880 dereference=True) 4881 tf.add(pjoin(cwd,'MadLoop5_resources'),arcname='MadLoop5_resources') 4882 tf.close() 4883 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 4884 4885 if args[1] == 'born' or args[1] == 'all': 4886 # MADEVENT MINT FO MODE 4887 input_files.append(pjoin(cwd, 'madevent_mintFO')) 4888 if args[2] == '0': 4889 current = '%s_G%s' % (args[1],args[0]) 4890 else: 4891 current = '%s_G%s_%s' % (args[1],args[0],args[2]) 4892 if os.path.exists(pjoin(cwd,current)): 4893 input_files.append(pjoin(cwd, current)) 4894 output_files.append(current) 4895 4896 required_output.append('%s/results.dat' % current) 4897 required_output.append('%s/res_%s.dat' % (current,args[3])) 4898 required_output.append('%s/log_MINT%s.txt' % (current,args[3])) 4899 required_output.append('%s/mint_grids' % current) 4900 required_output.append('%s/grid.MC_integer' % current) 4901 if args[3] != '0': 4902 required_output.append('%s/scale_pdf_dependence.dat' % current) 4903 4904 elif args[1] == 'F' or args[1] == 'B': 4905 # MINTMC MODE 4906 input_files.append(pjoin(cwd, 'madevent_mintMC')) 4907 4908 if args[2] == '0': 4909 current = 'G%s%s' % (args[1],args[0]) 4910 else: 4911 current = 'G%s%s_%s' % (args[1],args[0],args[2]) 4912 if os.path.exists(pjoin(cwd,current)): 4913 input_files.append(pjoin(cwd, current)) 4914 output_files.append(current) 4915 if args[2] > '0': 4916 # this is for the split event generation 4917 output_files.append('G%s%s_%s' % (args[1], args[0], args[2])) 4918 required_output.append('G%s%s_%s/log_MINT%s.txt' % (args[1],args[0],args[2],args[3])) 4919 4920 else: 4921 required_output.append('%s/log_MINT%s.txt' % (current,args[3])) 4922 if args[3] in ['0','1']: 4923 required_output.append('%s/results.dat' % current) 4924 if args[3] == '1': 4925 output_files.append('%s/results.dat' % current) 4926 4927 else: 4928 raise aMCatNLOError, 'not valid arguments: %s' %(', '.join(args)) 4929 4930 #Find the correct PDF input file 4931 pdfinput = self.get_pdf_input_filename() 4932 if os.path.exists(pdfinput): 4933 input_files.append(pdfinput) 4934 return input_files, output_files, required_output, args
4935 4936
4937 - def compile(self, mode, options):
4938 """compiles aMC@NLO to compute either NLO or NLO matched to shower, as 4939 specified in mode""" 4940 4941 os.mkdir(pjoin(self.me_dir, 'Events', self.run_name)) 4942 4943 self.banner.write(pjoin(self.me_dir, 'Events', self.run_name, 4944 '%s_%s_banner.txt' % (self.run_name, self.run_tag))) 4945 4946 self.get_characteristics(pjoin(self.me_dir, 4947 'SubProcesses', 'proc_characteristics')) 4948 4949 #define a bunch of log files 4950 amcatnlo_log = pjoin(self.me_dir, 'compile_amcatnlo.log') 4951 madloop_log = pjoin(self.me_dir, 'compile_madloop.log') 4952 reweight_log = pjoin(self.me_dir, 'compile_reweight.log') 4953 test_log = pjoin(self.me_dir, 'test.log') 4954 4955 # environmental variables to be included in make_opts 4956 self.make_opts_var = {} 4957 if self.proc_characteristics['has_loops'] and \ 4958 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')): 4959 self.make_opts_var['madloop'] = 'true' 4960 4961 self.update_status('Compiling the code', level=None, update_results=True) 4962 4963 libdir = pjoin(self.me_dir, 'lib') 4964 sourcedir = pjoin(self.me_dir, 'Source') 4965 4966 #clean files 4967 files.rm([amcatnlo_log, madloop_log, reweight_log, test_log]) 4968 #define which executable/tests to compile 4969 if '+' in mode: 4970 mode = mode.split('+')[0] 4971 if mode in ['NLO', 'LO']: 4972 exe = 'madevent_mintFO' 4973 tests = ['test_ME'] 4974 self.analyse_card.write_card(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts')) 4975 elif mode in ['aMC@NLO', 'aMC@LO','noshower','noshowerLO']: 4976 exe = 'madevent_mintMC' 4977 tests = ['test_ME', 'test_MC'] 4978 # write an analyse_opts with a dummy analysis so that compilation goes through 4979 with open(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'),'w') as fsock: 4980 fsock.write('FO_ANALYSE=analysis_dummy.o dbook.o open_output_files_dummy.o HwU_dummy.o\n') 4981 4982 #directory where to compile exe 4983 p_dirs = [d for d in \ 4984 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 4985 # create param_card.inc and run_card.inc 4986 self.do_treatcards('', amcatnlo=True, mode=mode) 4987 # if --nocompile option is specified, check here that all exes exists. 4988 # If they exists, return 4989 if all([os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, exe)) \ 4990 for p_dir in p_dirs]) and options['nocompile']: 4991 return 4992 4993 # rm links to lhapdflib/ PDFsets if exist 4994 if os.path.exists(pjoin(libdir, 'PDFsets')): 4995 files.rm(pjoin(libdir, 'PDFsets')) 4996 4997 # read the run_card to find if lhapdf is used or not 4998 if self.run_card['pdlabel'] == 'lhapdf' and \ 4999 (self.banner.get_detail('run_card', 'lpp1') != 0 or \ 5000 self.banner.get_detail('run_card', 'lpp2') != 0): 5001 5002 self.link_lhapdf(libdir, [pjoin('SubProcesses', p) for p in p_dirs]) 5003 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 5004 lhaid_list = self.run_card['lhaid'] 5005 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 5006 5007 else: 5008 if self.run_card['lpp1'] == 1 == self.run_card['lpp2']: 5009 logger.info('Using built-in libraries for PDFs') 5010 5011 self.make_opts_var['lhapdf'] = "" 5012 5013 # read the run_card to find if applgrid is used or not 5014 if self.run_card['iappl'] != 0: 5015 self.make_opts_var['applgrid'] = 'True' 5016 # check versions of applgrid and amcfast 5017 for code in ['applgrid','amcfast']: 5018 try: 5019 p = subprocess.Popen([self.options[code], '--version'], \ 5020 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 5021 except OSError: 5022 raise aMCatNLOError(('No valid %s installation found. \n' + \ 5023 'Please set the path to %s-config by using \n' + \ 5024 'MG5_aMC> set <absolute-path-to-%s>/bin/%s-config \n') % (code,code,code,code)) 5025 else: 5026 output, _ = p.communicate() 5027 if code is 'applgrid' and output < '1.4.63': 5028 raise aMCatNLOError('Version of APPLgrid is too old. Use 1.4.69 or later.'\ 5029 +' You are using %s',output) 5030 if code is 'amcfast' and output < '1.1.1': 5031 raise aMCatNLOError('Version of aMCfast is too old. Use 1.1.1 or later.'\ 5032 +' You are using %s',output) 5033 5034 # set-up the Source/make_opts with the correct applgrid-config file 5035 appllibs=" APPLLIBS=$(shell %s --ldflags) $(shell %s --ldcflags) \n" \ 5036 % (self.options['amcfast'],self.options['applgrid']) 5037 text=open(pjoin(self.me_dir,'Source','make_opts'),'r').readlines() 5038 text_out=[] 5039 for line in text: 5040 if line.strip().startswith('APPLLIBS=$'): 5041 line=appllibs 5042 text_out.append(line) 5043 with open(pjoin(self.me_dir,'Source','make_opts'),'w') as fsock: 5044 fsock.writelines(text_out) 5045 else: 5046 self.make_opts_var['applgrid'] = "" 5047 5048 if 'fastjet' in self.options.keys() and self.options['fastjet']: 5049 self.make_opts_var['fastjet_config'] = self.options['fastjet'] 5050 5051 # add the make_opts_var to make_opts 5052 self.update_make_opts() 5053 5054 # make Source 5055 self.update_status('Compiling source...', level=None) 5056 misc.compile(['clean4pdf'], cwd = sourcedir) 5057 misc.compile(cwd = sourcedir) 5058 if os.path.exists(pjoin(libdir, 'libdhelas.a')) \ 5059 and os.path.exists(pjoin(libdir, 'libgeneric.a')) \ 5060 and os.path.exists(pjoin(libdir, 'libmodel.a')) \ 5061 and os.path.exists(pjoin(libdir, 'libpdf.a')): 5062 logger.info(' ...done, continuing with P* directories') 5063 else: 5064 raise aMCatNLOError('Compilation failed') 5065 5066 # make StdHep (only necessary with MG option output_dependencies='internal') 5067 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib') 5068 if not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))) or \ 5069 not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libFmcfio.a'))): 5070 if os.path.exists(pjoin(sourcedir,'StdHEP')): 5071 logger.info('Compiling StdHEP (can take a couple of minutes) ...') 5072 misc.compile(['StdHEP'], cwd = sourcedir) 5073 logger.info(' ...done.') 5074 else: 5075 raise aMCatNLOError('Could not compile StdHEP because its'+\ 5076 ' source directory could not be found in the SOURCE folder.\n'+\ 5077 " Check the MG5_aMC option 'output_dependencies.'") 5078 5079 # make CutTools (only necessary with MG option output_dependencies='internal') 5080 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 5081 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 5082 if os.path.exists(pjoin(sourcedir,'CutTools')): 5083 logger.info('Compiling CutTools (can take a couple of minutes) ...') 5084 misc.compile(['CutTools','-j1'], cwd = sourcedir, nb_core=1) 5085 logger.info(' ...done.') 5086 else: 5087 raise aMCatNLOError('Could not compile CutTools because its'+\ 5088 ' source directory could not be found in the SOURCE folder.\n'+\ 5089 " Check the MG5_aMC option 'output_dependencies.'") 5090 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 5091 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 5092 raise aMCatNLOError('CutTools compilation failed.') 5093 5094 # Verify compatibility between current compiler and the one which was 5095 # used when last compiling CutTools (if specified). 5096 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 5097 libdir, 'libcts.a')))),'compiler_version.log') 5098 if os.path.exists(compiler_log_path): 5099 compiler_version_used = open(compiler_log_path,'r').read() 5100 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 5101 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 5102 if os.path.exists(pjoin(sourcedir,'CutTools')): 5103 logger.info('CutTools was compiled with a different fortran'+\ 5104 ' compiler. Re-compiling it now...') 5105 misc.compile(['cleanCT'], cwd = sourcedir) 5106 misc.compile(['CutTools','-j1'], cwd = sourcedir, nb_core=1) 5107 logger.info(' ...done.') 5108 else: 5109 raise aMCatNLOError("CutTools installation in %s"\ 5110 %os.path.realpath(pjoin(libdir, 'libcts.a'))+\ 5111 " seems to have been compiled with a different compiler than"+\ 5112 " the one specified in MG5_aMC. Please recompile CutTools.") 5113 5114 # make IREGI (only necessary with MG option output_dependencies='internal') 5115 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libiregi.a'))) \ 5116 and os.path.exists(pjoin(sourcedir,'IREGI')): 5117 logger.info('Compiling IREGI (can take a couple of minutes) ...') 5118 misc.compile(['IREGI'], cwd = sourcedir) 5119 logger.info(' ...done.') 5120 5121 if os.path.exists(pjoin(libdir, 'libiregi.a')): 5122 # Verify compatibility between current compiler and the one which was 5123 # used when last compiling IREGI (if specified). 5124 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 5125 libdir, 'libiregi.a')))),'compiler_version.log') 5126 if os.path.exists(compiler_log_path): 5127 compiler_version_used = open(compiler_log_path,'r').read() 5128 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 5129 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 5130 if os.path.exists(pjoin(sourcedir,'IREGI')): 5131 logger.info('IREGI was compiled with a different fortran'+\ 5132 ' compiler. Re-compiling it now...') 5133 misc.compile(['cleanIR'], cwd = sourcedir) 5134 misc.compile(['IREGI'], cwd = sourcedir) 5135 logger.info(' ...done.') 5136 else: 5137 raise aMCatNLOError("IREGI installation in %s"\ 5138 %os.path.realpath(pjoin(libdir, 'libiregi.a'))+\ 5139 " seems to have been compiled with a different compiler than"+\ 5140 " the one specified in MG5_aMC. Please recompile IREGI.") 5141 5142 # check if MadLoop virtuals have been generated 5143 if self.proc_characteristics['has_loops'] and \ 5144 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')): 5145 if mode in ['NLO', 'aMC@NLO', 'noshower']: 5146 tests.append('check_poles') 5147 5148 # make and run tests (if asked for), gensym and make madevent in each dir 5149 self.update_status('Compiling directories...', level=None) 5150 5151 for test in tests: 5152 self.write_test_input(test) 5153 5154 try: 5155 import multiprocessing 5156 if not self.nb_core: 5157 try: 5158 self.nb_core = int(self.options['nb_core']) 5159 except TypeError: 5160 self.nb_core = multiprocessing.cpu_count() 5161 except ImportError: 5162 self.nb_core = 1 5163 5164 compile_options = copy.copy(self.options) 5165 compile_options['nb_core'] = self.nb_core 5166 compile_cluster = cluster.MultiCore(**compile_options) 5167 logger.info('Compiling on %d cores' % self.nb_core) 5168 5169 update_status = lambda i, r, f: self.donothing(i,r,f) 5170 for p_dir in p_dirs: 5171 compile_cluster.submit(prog = compile_dir, 5172 argument = [self.me_dir, p_dir, mode, options, 5173 tests, exe, self.options['run_mode']]) 5174 try: 5175 compile_cluster.wait(self.me_dir, update_status) 5176 except Exception, error: 5177 logger.warning("Fail to compile the Subprocesses") 5178 if __debug__: 5179 raise 5180 compile_cluster.remove() 5181 self.do_quit('') 5182 5183 logger.info('Checking test output:') 5184 for p_dir in p_dirs: 5185 logger.info(p_dir) 5186 for test in tests: 5187 logger.info(' Result for %s:' % test) 5188 5189 this_dir = pjoin(self.me_dir, 'SubProcesses', p_dir) 5190 #check that none of the tests failed 5191 self.check_tests(test, this_dir)
5192 5193
5194 - def donothing(*args):
5195 pass
5196 5197
5198 - def check_tests(self, test, dir):
5199 """just call the correct parser for the test log. 5200 Skip check_poles for LOonly folders""" 5201 if test in ['test_ME', 'test_MC']: 5202 return self.parse_test_mx_log(pjoin(dir, '%s.log' % test)) 5203 elif test == 'check_poles' and not os.path.exists(pjoin(dir,'parton_lum_0.f')): 5204 return self.parse_check_poles_log(pjoin(dir, '%s.log' % test))
5205 5206
5207 - def parse_test_mx_log(self, log):
5208 """read and parse the test_ME/MC.log file""" 5209 content = open(log).read() 5210 if 'FAILED' in content: 5211 logger.info('Output of the failing test:\n'+content[:-1],'$MG:BOLD') 5212 raise aMCatNLOError('Some tests failed, run cannot continue.\n' + \ 5213 'Please check that widths of final state particles (e.g. top) have been' + \ 5214 ' set to 0 in the param_card.dat.') 5215 else: 5216 lines = [l for l in content.split('\n') if 'PASSED' in l] 5217 logger.info(' Passed.') 5218 logger.debug('\n'+'\n'.join(lines))
5219 5220
5221 - def parse_check_poles_log(self, log):
5222 """reads and parse the check_poles.log file""" 5223 content = open(log).read() 5224 npass = 0 5225 nfail = 0 5226 for line in content.split('\n'): 5227 if 'PASSED' in line: 5228 npass +=1 5229 tolerance = float(line.split()[1]) 5230 if 'FAILED' in line: 5231 nfail +=1 5232 tolerance = float(line.split()[1]) 5233 5234 if nfail + npass == 0: 5235 logger.warning('0 points have been tried') 5236 return 5237 5238 if float(nfail)/float(nfail+npass) > 0.1: 5239 raise aMCatNLOError('Poles do not cancel, run cannot continue') 5240 else: 5241 logger.info(' Poles successfully cancel for %d points over %d (tolerance=%2.1e)' \ 5242 %(npass, nfail+npass, tolerance))
5243 5244
5245 - def write_test_input(self, test):
5246 """write the input files to run test_ME/MC or check_poles""" 5247 if test in ['test_ME', 'test_MC']: 5248 content = "-2 -2\n" #generate randomly energy/angle 5249 content+= "100 100\n" #run 100 points for soft and collinear tests 5250 content+= "0\n" #all FKS configs 5251 content+= '\n'.join(["-1"] * 50) #random diagram (=first diagram) 5252 elif test == 'check_poles': 5253 content = '20 \n -1\n' 5254 5255 file = open(pjoin(self.me_dir, '%s_input.txt' % test), 'w') 5256 if test == 'test_MC': 5257 shower = self.run_card['parton_shower'] 5258 header = "1 \n %s\n 1 -0.1\n-1 -0.1\n" % shower 5259 file.write(header + content) 5260 elif test == 'test_ME': 5261 header = "2 \n" 5262 file.write(header + content) 5263 else: 5264 file.write(content) 5265 file.close()
5266 5267 5268 action_switcher = AskRunNLO 5269 ############################################################################
5270 - def ask_run_configuration(self, mode, options, switch={}):
5271 """Ask the question when launching generate_events/multi_run""" 5272 5273 if 'parton' not in options: 5274 options['parton'] = False 5275 if 'reweightonly' not in options: 5276 options['reweightonly'] = False 5277 5278 if mode == 'auto': 5279 mode = None 5280 if not mode and (options['parton'] or options['reweightonly']): 5281 mode = 'noshower' 5282 5283 passing_cmd = [] 5284 for key,value in switch.keys(): 5285 passing_cmd.append('%s=%s' % (key,value)) 5286 5287 if 'do_reweight' in options and options['do_reweight']: 5288 passing_cmd.append('reweight=ON') 5289 if 'do_madspin' in options and options['do_madspin']: 5290 passing_cmd.append('madspin=ON') 5291 5292 force = self.force 5293 if mode == 'onlyshower': 5294 passing_cmd.append('onlyshower') 5295 force = True 5296 elif mode: 5297 passing_cmd.append(mode) 5298 5299 switch, cmd_switch = self.ask('', '0', [], ask_class = self.action_switcher, 5300 mode=mode, force=force, 5301 first_cmd=passing_cmd, 5302 return_instance=True) 5303 5304 if 'mode' in switch: 5305 mode = switch['mode'] 5306 5307 #assign the mode depending of the switch 5308 if not mode or mode == 'auto': 5309 if switch['order'] == 'LO': 5310 if switch['runshower']: 5311 mode = 'aMC@LO' 5312 elif switch['fixed_order'] == 'ON': 5313 mode = 'LO' 5314 else: 5315 mode = 'noshowerLO' 5316 elif switch['order'] == 'NLO': 5317 if switch['runshower']: 5318 mode = 'aMC@NLO' 5319 elif switch['fixed_order'] == 'ON': 5320 mode = 'NLO' 5321 else: 5322 mode = 'noshower' 5323 logger.info('will run in mode: %s' % mode) 5324 5325 if mode == 'noshower': 5326 if switch['shower'] == 'OFF': 5327 logger.warning("""You have chosen not to run a parton shower. 5328 NLO events without showering are NOT physical. 5329 Please, shower the LesHouches events before using them for physics analyses. 5330 You have to choose NOW which parton-shower you WILL use and specify it in the run_card.""") 5331 else: 5332 logger.info("""Your Parton-shower choice is not available for running. 5333 The events will be generated for the associated Parton-Shower. 5334 Remember that NLO events without showering are NOT physical.""", '$MG:BOLD') 5335 5336 5337 # specify the cards which are needed for this run. 5338 cards = ['param_card.dat', 'run_card.dat'] 5339 ignore = [] 5340 if mode in ['LO', 'NLO']: 5341 options['parton'] = True 5342 ignore = ['shower_card.dat', 'madspin_card.dat'] 5343 cards.append('FO_analyse_card.dat') 5344 else: 5345 if switch['madspin'] != 'OFF': 5346 cards.append('madspin_card.dat') 5347 if switch['reweight'] != 'OFF': 5348 cards.append('reweight_card.dat') 5349 if switch['madanalysis'] in ['HADRON', 'ON']: 5350 cards.append('madanalysis5_hadron_card.dat') 5351 if 'aMC@' in mode: 5352 cards.append('shower_card.dat') 5353 if mode == 'onlyshower': 5354 cards = ['shower_card.dat'] 5355 if options['reweightonly']: 5356 cards = ['run_card.dat'] 5357 5358 self.keep_cards(cards, ignore) 5359 5360 if mode =='onlyshower': 5361 cards = ['shower_card.dat'] 5362 5363 5364 # automatically switch to keep_wgt option 5365 first_cmd = cmd_switch.get_cardcmd() 5366 5367 if not options['force'] and not self.force: 5368 self.ask_edit_cards(cards, plot=False, first_cmd=first_cmd) 5369 5370 self.banner = banner_mod.Banner() 5371 5372 # store the cards in the banner 5373 for card in cards: 5374 self.banner.add(pjoin(self.me_dir, 'Cards', card)) 5375 # and the run settings 5376 run_settings = '\n'.join(['%s = %s' % (k, v) for (k, v) in switch.items()]) 5377 self.banner.add_text('run_settings', run_settings) 5378 5379 if not mode =='onlyshower': 5380 self.run_card = self.banner.charge_card('run_card') 5381 self.run_tag = self.run_card['run_tag'] 5382 #this is if the user did not provide a name for the current run 5383 if not hasattr(self, 'run_name') or not self.run_name: 5384 self.run_name = self.find_available_run_name(self.me_dir) 5385 #add a tag in the run_name for distinguish run_type 5386 if self.run_name.startswith('run_'): 5387 if mode in ['LO','aMC@LO','noshowerLO']: 5388 self.run_name += '_LO' 5389 self.set_run_name(self.run_name, self.run_tag, 'parton') 5390 if self.run_card['ickkw'] == 3 and mode in ['LO', 'aMC@LO', 'noshowerLO']: 5391 raise self.InvalidCmd("""FxFx merging (ickkw=3) not allowed at LO""") 5392 elif self.run_card['ickkw'] == 3 and mode in ['aMC@NLO', 'noshower']: 5393 logger.warning("""You are running with FxFx merging enabled. To be able to merge 5394 samples of various multiplicities without double counting, you 5395 have to remove some events after showering 'by hand'. Please 5396 read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 5397 if self.run_card['parton_shower'].upper() == 'PYTHIA6Q': 5398 raise self.InvalidCmd("""FxFx merging does not work with Q-squared ordered showers.""") 5399 elif self.run_card['parton_shower'].upper() != 'HERWIG6' and self.run_card['parton_shower'].upper() != 'PYTHIA8': 5400 question="FxFx merging not tested for %s shower. Do you want to continue?\n" % self.run_card['parton_shower'] + \ 5401 "Type \'n\' to stop or \'y\' to continue" 5402 answers = ['n','y'] 5403 answer = self.ask(question, 'n', answers, alias=alias) 5404 if answer == 'n': 5405 error = '''Stop opertation''' 5406 self.ask_run_configuration(mode, options) 5407 # raise aMCatNLOError(error) 5408 elif self.run_card['ickkw'] == -1 and mode in ['aMC@NLO', 'noshower']: 5409 # NNLL+NLO jet-veto only possible for LO event generation or fNLO runs. 5410 raise self.InvalidCmd("""NNLL+NLO jet veto runs (ickkw=-1) only possible for fNLO or LO.""") 5411 if 'aMC@' in mode or mode == 'onlyshower': 5412 self.shower_card = self.banner.charge_card('shower_card') 5413 5414 elif mode in ['LO', 'NLO']: 5415 analyse_card_path = pjoin(self.me_dir, 'Cards','FO_analyse_card.dat') 5416 self.analyse_card = self.banner.charge_card('FO_analyse_card') 5417 5418 return mode
5419
5420 5421 #=============================================================================== 5422 # aMCatNLOCmd 5423 #=============================================================================== 5424 -class aMCatNLOCmdShell(aMCatNLOCmd, cmd.CmdShell):
5425 """The command line processor of MadGraph"""
5426 5427 _compile_usage = "compile [MODE] [options]\n" + \ 5428 "-- compiles aMC@NLO \n" + \ 5429 " MODE can be either FO, for fixed-order computations, \n" + \ 5430 " or MC for matching with parton-shower monte-carlos. \n" + \ 5431 " (if omitted, it is set to MC)\n" 5432 _compile_parser = misc.OptionParser(usage=_compile_usage) 5433 _compile_parser.add_option("-f", "--force", default=False, action='store_true', 5434 help="Use the card present in the directory for the launch, without editing them") 5435 5436 _launch_usage = "launch [MODE] [options]\n" + \ 5437 "-- execute aMC@NLO \n" + \ 5438 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 5439 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 5440 " computation of the total cross section and the filling of parton-level histograms \n" + \ 5441 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 5442 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 5443 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 5444 " in the run_card.dat\n" 5445 5446 _launch_parser = misc.OptionParser(usage=_launch_usage) 5447 _launch_parser.add_option("-f", "--force", default=False, action='store_true', 5448 help="Use the card present in the directory for the launch, without editing them") 5449 _launch_parser.add_option("-c", "--cluster", default=False, action='store_true', 5450 help="Submit the jobs on the cluster") 5451 _launch_parser.add_option("-m", "--multicore", default=False, action='store_true', 5452 help="Submit the jobs on multicore mode") 5453 _launch_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5454 help="Skip compilation. Ignored if no executable is found") 5455 _launch_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 5456 help="Skip integration and event generation, just run reweight on the" + \ 5457 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 5458 _launch_parser.add_option("-p", "--parton", default=False, action='store_true', 5459 help="Stop the run after the parton level file generation (you need " + \ 5460 "to shower the file in order to get physical results)") 5461 _launch_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5462 help="Skip grid set up, just generate events starting from " + \ 5463 "the last available results") 5464 _launch_parser.add_option("-n", "--name", default=False, dest='run_name', 5465 help="Provide a name to the run") 5466 _launch_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 5467 help="For use with APPLgrid only: start from existing grids") 5468 _launch_parser.add_option("-R", "--reweight", default=False, dest='do_reweight', action='store_true', 5469 help="Run the reweight module (reweighting by different model parameters)") 5470 _launch_parser.add_option("-M", "--madspin", default=False, dest='do_madspin', action='store_true', 5471 help="Run the madspin package") 5472 5473 5474 5475 _generate_events_usage = "generate_events [MODE] [options]\n" + \ 5476 "-- execute aMC@NLO \n" + \ 5477 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 5478 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 5479 " computation of the total cross section and the filling of parton-level histograms \n" + \ 5480 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 5481 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 5482 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 5483 " in the run_card.dat\n" 5484 5485 _generate_events_parser = misc.OptionParser(usage=_generate_events_usage) 5486 _generate_events_parser.add_option("-f", "--force", default=False, action='store_true', 5487 help="Use the card present in the directory for the generate_events, without editing them") 5488 _generate_events_parser.add_option("-c", "--cluster", default=False, action='store_true', 5489 help="Submit the jobs on the cluster") 5490 _generate_events_parser.add_option("-m", "--multicore", default=False, action='store_true', 5491 help="Submit the jobs on multicore mode") 5492 _generate_events_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5493 help="Skip compilation. Ignored if no executable is found") 5494 _generate_events_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 5495 help="Skip integration and event generation, just run reweight on the" + \ 5496 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 5497 _generate_events_parser.add_option("-p", "--parton", default=False, action='store_true', 5498 help="Stop the run after the parton level file generation (you need " + \ 5499 "to shower the file in order to get physical results)") 5500 _generate_events_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5501 help="Skip grid set up, just generate events starting from " + \ 5502 "the last available results") 5503 _generate_events_parser.add_option("-n", "--name", default=False, dest='run_name', 5504 help="Provide a name to the run") 5505 5506 5507 5508 _calculate_xsect_usage = "calculate_xsect [ORDER] [options]\n" + \ 5509 "-- calculate cross section up to ORDER.\n" + \ 5510 " ORDER can be either LO or NLO (if omitted, it is set to NLO). \n" 5511 5512 _calculate_xsect_parser = misc.OptionParser(usage=_calculate_xsect_usage) 5513 _calculate_xsect_parser.add_option("-f", "--force", default=False, action='store_true', 5514 help="Use the card present in the directory for the launch, without editing them") 5515 _calculate_xsect_parser.add_option("-c", "--cluster", default=False, action='store_true', 5516 help="Submit the jobs on the cluster") 5517 _calculate_xsect_parser.add_option("-m", "--multicore", default=False, action='store_true', 5518 help="Submit the jobs on multicore mode") 5519 _calculate_xsect_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5520 help="Skip compilation. Ignored if no executable is found") 5521 _calculate_xsect_parser.add_option("-n", "--name", default=False, dest='run_name', 5522 help="Provide a name to the run") 5523 _calculate_xsect_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 5524 help="For use with APPLgrid only: start from existing grids") 5525 _calculate_xsect_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5526 help="Skip grid set up, just generate events starting from " + \ 5527 "the last available results") 5528 5529 _shower_usage = 'shower run_name [options]\n' + \ 5530 '-- do shower/hadronization on parton-level file generated for run run_name\n' + \ 5531 ' all the information (e.g. number of events, MonteCarlo, ...\n' + \ 5532 ' are directly read from the header of the event file\n' 5533 _shower_parser = misc.OptionParser(usage=_shower_usage) 5534 _shower_parser.add_option("-f", "--force", default=False, action='store_true', 5535 help="Use the shower_card present in the directory for the launch, without editing") 5536 5537 if '__main__' == __name__: 5538 # Launch the interface without any check if one code is already running. 5539 # This can ONLY run a single command !! 5540 import sys 5541 if not sys.version_info[0] == 2 or sys.version_info[1] < 6: 5542 sys.exit('MadGraph/MadEvent 5 works only with python 2.6 or later (but not python 3.X).\n'+\ 5543 'Please upgrate your version of python.') 5544 5545 import os 5546 import optparse 5547 # Get the directory of the script real path (bin) 5548 # and add it to the current PYTHONPATH 5549 root_path = os.path.dirname(os.path.dirname(os.path.realpath( __file__ ))) 5550 sys.path.insert(0, root_path)
5551 5552 - class MyOptParser(optparse.OptionParser):
5553 - class InvalidOption(Exception): pass
5554 - def error(self, msg=''):
5555 raise MyOptParser.InvalidOption(msg)
5556 # Write out nice usage message if called with -h or --help 5557 usage = "usage: %prog [options] [FILE] " 5558 parser = MyOptParser(usage=usage) 5559 parser.add_option("-l", "--logging", default='INFO', 5560 help="logging level (DEBUG|INFO|WARNING|ERROR|CRITICAL) [%default]") 5561 parser.add_option("","--web", action="store_true", default=False, dest='web', \ 5562 help='force toce to be in secure mode') 5563 parser.add_option("","--debug", action="store_true", default=False, dest='debug', \ 5564 help='force to launch debug mode') 5565 parser_error = '' 5566 done = False 5567 5568 for i in range(len(sys.argv)-1): 5569 try: 5570 (options, args) = parser.parse_args(sys.argv[1:len(sys.argv)-i]) 5571 done = True 5572 except MyOptParser.InvalidOption, error: 5573 pass 5574 else: 5575 args += sys.argv[len(sys.argv)-i:] 5576 if not done: 5577 # raise correct error: 5578 try: 5579 (options, args) = parser.parse_args() 5580 except MyOptParser.InvalidOption, error: 5581 print error 5582 sys.exit(2) 5583 5584 if len(args) == 0: 5585 args = '' 5586 5587 import subprocess 5588 import logging 5589 import logging.config 5590 # Set logging level according to the logging level given by options 5591 #logging.basicConfig(level=vars(logging)[options.logging]) 5592 import internal.coloring_logging 5593 try: 5594 if __debug__ and options.logging == 'INFO': 5595 options.logging = 'DEBUG' 5596 if options.logging.isdigit(): 5597 level = int(options.logging) 5598 else: 5599 level = eval('logging.' + options.logging) 5600 print os.path.join(root_path, 'internal', 'me5_logging.conf') 5601 logging.config.fileConfig(os.path.join(root_path, 'internal', 'me5_logging.conf')) 5602 logging.root.setLevel(level) 5603 logging.getLogger('madgraph').setLevel(level) 5604 except: 5605 raise 5606 pass 5607 5608 # Call the cmd interface main loop 5609 try: 5610 if args: 5611 # a single command is provided 5612 if '--web' in args: 5613 i = args.index('--web') 5614 args.pop(i) 5615 cmd_line = aMCatNLOCmd(me_dir=os.path.dirname(root_path),force_run=True) 5616 else: 5617 cmd_line = aMCatNLOCmdShell(me_dir=os.path.dirname(root_path),force_run=True) 5618 5619 if not hasattr(cmd_line, 'do_%s' % args[0]): 5620 if parser_error: 5621 print parser_error 5622 print 'and %s can not be interpreted as a valid command.' % args[0] 5623 else: 5624 print 'ERROR: %s not a valid command. Please retry' % args[0] 5625 else: 5626 cmd_line.use_rawinput = False 5627 cmd_line.run_cmd(' '.join(args)) 5628 cmd_line.run_cmd('quit') 5629 5630 except KeyboardInterrupt: 5631 print 'quit on KeyboardInterrupt' 5632 pass 5633