Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  from madgraph.iolibs.helas_call_writers import HelasCallWriter 
  16  """Methods and classes to export matrix elements to v4 format.""" 
  17   
  18  import copy 
  19  from cStringIO import StringIO 
  20  from distutils import dir_util 
  21  import itertools 
  22  import fractions 
  23  import glob 
  24  import logging 
  25  import math 
  26  import os 
  27  import re 
  28  import shutil 
  29  import subprocess 
  30  import sys 
  31  import time 
  32  import traceback 
  33   
  34  import aloha 
  35   
  36  import madgraph.core.base_objects as base_objects 
  37  import madgraph.core.color_algebra as color 
  38  import madgraph.core.helas_objects as helas_objects 
  39  import madgraph.iolibs.drawing_eps as draw 
  40  import madgraph.iolibs.files as files 
  41  import madgraph.iolibs.group_subprocs as group_subprocs 
  42  import madgraph.iolibs.file_writers as writers 
  43  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  44  import madgraph.iolibs.template_files as template_files 
  45  import madgraph.iolibs.ufo_expression_parsers as parsers 
  46  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  47  import madgraph.interface.common_run_interface as common_run_interface 
  48  import madgraph.various.diagram_symmetry as diagram_symmetry 
  49  import madgraph.various.misc as misc 
  50  import madgraph.various.banner as banner_mod 
  51  import madgraph.various.process_checks as process_checks 
  52  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  53  import aloha.create_aloha as create_aloha 
  54  import models.import_ufo as import_ufo 
  55  import models.write_param_card as param_writer 
  56  import models.check_param_card as check_param_card 
  57   
  58   
  59  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  60  from madgraph.iolibs.files import cp, ln, mv 
  61   
  62  from madgraph import InvalidCmd 
  63   
  64  pjoin = os.path.join 
  65   
  66  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  67  logger = logging.getLogger('madgraph.export_v4') 
  68   
  69  default_compiler= {'fortran': 'gfortran', 
  70                         'f2py': 'f2py', 
  71                         'cpp':'g++'} 
72 73 74 -class VirtualExporter(object):
75 76 #exporter variable who modified the way madgraph interacts with this class 77 78 grouped_mode = 'madevent' 79 # This variable changes the type of object called within 'generate_subprocess_directory' 80 #functions. 81 # False to avoid grouping (only identical matrix element are merged) 82 # 'madevent' group the massless quark and massless lepton 83 # 'madweight' group the gluon with the massless quark 84 sa_symmetry = False 85 # If no grouped_mode=False, uu~ and u~u will be called independently. 86 #Putting sa_symmetry generates only one of the two matrix-element. 87 check = True 88 # Ask madgraph to check if the directory already exists and propose to the user to 89 #remove it first if this is the case 90 output = 'Template' 91 # [Template, None, dir] 92 # - Template, madgraph will call copy_template 93 # - dir, madgraph will just create an empty directory for initialisation 94 # - None, madgraph do nothing for initialisation 95 exporter = 'v4' 96 # language of the output 'v4' for Fortran output 97 # 'cpp' for C++ output 98 99
100 - def __init__(self, dir_path = "", opt=None):
101 # cmd_options is a dictionary with all the optional argurment passed at output time 102 103 # Activate some monkey patching for the helas call writer. 104 helas_call_writers.HelasCallWriter.customize_argument_for_all_other_helas_object = \ 105 self.helas_call_writer_custom
106 107 108 # helper function for customise helas writter 109 @staticmethod
110 - def custom_helas_call(call, arg):
111 """static method to customise the way aloha function call are written 112 call is the default template for the call 113 arg are the dictionary used for the call 114 """ 115 return call, arg
116 117 helas_call_writer_custom = lambda x,y,z: x.custom_helas_call(y,z) 118 119
120 - def copy_template(self, model):
121 return
122
123 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
124 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 125 return 0 # return an integer stating the number of call to helicity routine
126
127 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
128 return
129
130 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
131 return
132 133
134 - def pass_information_from_cmd(self, cmd):
135 """pass information from the command interface to the exporter. 136 Please do not modify any object of the interface from the exporter. 137 """ 138 return
139
140 - def modify_grouping(self, matrix_element):
141 return False, matrix_element
142
143 - def export_model_files(self, model_v4_path):
144 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 145 return
146
147 - def export_helas(self, HELAS_PATH):
148 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 149 return
150
151 #=============================================================================== 152 # ProcessExporterFortran 153 #=============================================================================== 154 -class ProcessExporterFortran(VirtualExporter):
155 """Class to take care of exporting a set of matrix elements to 156 Fortran (v4) format.""" 157 158 default_opt = {'clean': False, 'complex_mass':False, 159 'export_format':'madevent', 'mp': False, 160 'v5_model': True, 161 'output_options':{} 162 } 163 grouped_mode = False 164
165 - def __init__(self, dir_path = "", opt=None):
166 """Initiate the ProcessExporterFortran with directory information""" 167 self.mgme_dir = MG5DIR 168 self.dir_path = dir_path 169 self.model = None 170 171 self.opt = dict(self.default_opt) 172 if opt: 173 self.opt.update(opt) 174 175 self.cmd_options = self.opt['output_options'] 176 177 #place holder to pass information to the run_interface 178 self.proc_characteristic = banner_mod.ProcCharacteristic() 179 # call mother class 180 super(ProcessExporterFortran,self).__init__(dir_path, opt)
181 182 183 #=========================================================================== 184 # process exporter fortran switch between group and not grouped 185 #===========================================================================
186 - def export_processes(self, matrix_elements, fortran_model):
187 """Make the switch between grouped and not grouped output""" 188 189 calls = 0 190 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 191 for (group_number, me_group) in enumerate(matrix_elements): 192 calls = calls + self.generate_subprocess_directory(\ 193 me_group, fortran_model, group_number) 194 else: 195 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 196 calls = calls + self.generate_subprocess_directory(\ 197 me, fortran_model, me_number) 198 199 return calls
200 201 202 #=========================================================================== 203 # create the run_card 204 #===========================================================================
205 - def create_run_card(self, matrix_elements, history):
206 """ """ 207 208 209 # bypass this for the loop-check 210 import madgraph.loop.loop_helas_objects as loop_helas_objects 211 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 212 matrix_elements = None 213 214 run_card = banner_mod.RunCard() 215 216 217 default=True 218 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 219 processes = [me.get('processes') for megroup in matrix_elements 220 for me in megroup['matrix_elements']] 221 elif matrix_elements: 222 processes = [me.get('processes') 223 for me in matrix_elements['matrix_elements']] 224 else: 225 default =False 226 227 if default: 228 run_card.create_default_for_process(self.proc_characteristic, 229 history, 230 processes) 231 232 233 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 234 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
235 236 237 #=========================================================================== 238 # copy the Template in a new directory. 239 #===========================================================================
240 - def copy_template(self, model):
241 """create the directory run_name as a copy of the MadEvent 242 Template, and clean the directory 243 """ 244 245 #First copy the full template tree if dir_path doesn't exit 246 if not os.path.isdir(self.dir_path): 247 assert self.mgme_dir, \ 248 "No valid MG_ME path given for MG4 run directory creation." 249 logger.info('initialize a new directory: %s' % \ 250 os.path.basename(self.dir_path)) 251 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 252 self.dir_path, True) 253 # distutils.dir_util.copy_tree since dir_path already exists 254 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 255 self.dir_path) 256 # copy plot_card 257 for card in ['plot_card']: 258 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 259 try: 260 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 261 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 262 except IOError: 263 logger.warning("Failed to copy " + card + ".dat to default") 264 elif os.getcwd() == os.path.realpath(self.dir_path): 265 logger.info('working in local directory: %s' % \ 266 os.path.realpath(self.dir_path)) 267 # distutils.dir_util.copy_tree since dir_path already exists 268 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), 269 self.dir_path) 270 # for name in misc.glob('Template/LO/*', self.mgme_dir): 271 # name = os.path.basename(name) 272 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 273 # if os.path.isfile(filename): 274 # files.cp(filename, pjoin(self.dir_path,name)) 275 # elif os.path.isdir(filename): 276 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 277 # distutils.dir_util.copy_tree since dir_path already exists 278 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 279 self.dir_path) 280 # Copy plot_card 281 for card in ['plot_card']: 282 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 283 try: 284 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 285 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 286 except IOError: 287 logger.warning("Failed to copy " + card + ".dat to default") 288 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 289 assert self.mgme_dir, \ 290 "No valid MG_ME path given for MG4 run directory creation." 291 try: 292 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 293 except IOError: 294 MG5_version = misc.get_pkg_info() 295 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 296 297 #Ensure that the Template is clean 298 if self.opt['clean']: 299 logger.info('remove old information in %s' % \ 300 os.path.basename(self.dir_path)) 301 if os.environ.has_key('MADGRAPH_BASE'): 302 misc.call([pjoin('bin', 'internal', 'clean_template'), 303 '--web'], cwd=self.dir_path) 304 else: 305 try: 306 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 307 cwd=self.dir_path) 308 except Exception, why: 309 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 310 % (os.path.basename(self.dir_path),why)) 311 312 #Write version info 313 MG_version = misc.get_pkg_info() 314 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 315 MG_version['version']) 316 317 # add the makefile in Source directory 318 filename = pjoin(self.dir_path,'Source','makefile') 319 self.write_source_makefile(writers.FileWriter(filename)) 320 321 # add the DiscreteSampler information 322 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 323 pjoin(self.dir_path, 'Source')) 324 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 325 pjoin(self.dir_path, 'Source')) 326 327 # We need to create the correct open_data for the pdf 328 self.write_pdf_opendata()
329 330 331 #=========================================================================== 332 # Call MadAnalysis5 to generate the default cards for this process 333 #===========================================================================
334 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 335 ma5_path, output_dir, levels = ['parton','hadron']):
336 """ Call MA5 so that it writes default cards for both parton and 337 post-shower levels, tailored for this particular process.""" 338 339 if len(levels)==0: 340 return 341 start = time.time() 342 logger.info('Generating MadAnalysis5 default cards tailored to this process') 343 try: 344 MA5_interpreter = common_run_interface.CommonRunCmd.\ 345 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 346 except (Exception, SystemExit) as e: 347 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty.') 348 return 349 if MA5_interpreter is None: 350 return 351 352 MA5_main = MA5_interpreter.main 353 for lvl in ['parton','hadron']: 354 if lvl in levels: 355 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 356 try: 357 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 358 except (Exception, SystemExit) as e: 359 # keep the default card (skip only) 360 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 361 ' default analysis card for this process.') 362 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 363 error=StringIO() 364 traceback.print_exc(file=error) 365 logger.debug('MadAnalysis5 error was:') 366 logger.debug('-'*60) 367 logger.debug(error.getvalue()[:-1]) 368 logger.debug('-'*60) 369 else: 370 open(card_to_generate,'w').write(text) 371 stop = time.time() 372 if stop-start >1: 373 logger.info('Cards created in %.2fs' % (stop-start))
374 375 #=========================================================================== 376 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 377 #===========================================================================
378 - def write_procdef_mg5(self, file_pos, modelname, process_str):
379 """ write an equivalent of the MG4 proc_card in order that all the Madevent 380 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 381 382 proc_card_template = template_files.mg4_proc_card.mg4_template 383 process_template = template_files.mg4_proc_card.process_template 384 process_text = '' 385 coupling = '' 386 new_process_content = [] 387 388 389 # First find the coupling and suppress the coupling from process_str 390 #But first ensure that coupling are define whithout spaces: 391 process_str = process_str.replace(' =', '=') 392 process_str = process_str.replace('= ', '=') 393 process_str = process_str.replace(',',' , ') 394 #now loop on the element and treat all the coupling 395 for info in process_str.split(): 396 if '=' in info: 397 coupling += info + '\n' 398 else: 399 new_process_content.append(info) 400 # Recombine the process_str (which is the input process_str without coupling 401 #info) 402 process_str = ' '.join(new_process_content) 403 404 #format the SubProcess 405 replace_dict = {'process': process_str, 406 'coupling': coupling} 407 process_text += process_template.substitute(replace_dict) 408 409 replace_dict = {'process': process_text, 410 'model': modelname, 411 'multiparticle':''} 412 text = proc_card_template.substitute(replace_dict) 413 414 if file_pos: 415 ff = open(file_pos, 'w') 416 ff.write(text) 417 ff.close() 418 else: 419 return replace_dict
420 421
422 - def pass_information_from_cmd(self, cmd):
423 """Pass information for MA5""" 424 425 self.proc_defs = cmd._curr_proc_defs
426 427 #=========================================================================== 428 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 429 #===========================================================================
430 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
431 """Function to finalize v4 directory, for inheritance.""" 432 433 self.create_run_card(matrix_elements, history) 434 self.create_MA5_cards(matrix_elements, history)
435
436 - def create_MA5_cards(self,matrix_elements,history):
437 """ A wrapper around the creation of the MA5 cards so that it can be 438 bypassed by daughter classes (i.e. in standalone).""" 439 if 'madanalysis5_path' in self.opt and not \ 440 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 441 processes = None 442 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 443 processes = [me.get('processes') for megroup in matrix_elements 444 for me in megroup['matrix_elements']] 445 elif matrix_elements: 446 processes = [me.get('processes') 447 for me in matrix_elements['matrix_elements']] 448 449 self.create_default_madanalysis5_cards( 450 history, self.proc_defs, processes, 451 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 452 levels = ['hadron','parton']) 453 454 for level in ['hadron','parton']: 455 # Copying these cards turn on the use of MadAnalysis5 by default. 456 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 457 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 458 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
459 460 #=========================================================================== 461 # Create the proc_characteristic file passing information to the run_interface 462 #===========================================================================
463 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
464 465 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
466 467 #=========================================================================== 468 # write_matrix_element_v4 469 #===========================================================================
470 - def write_matrix_element_v4(self):
471 """Function to write a matrix.f file, for inheritance. 472 """ 473 pass
474 475 #=========================================================================== 476 # write_pdf_opendata 477 #===========================================================================
478 - def write_pdf_opendata(self):
479 """ modify the pdf opendata file, to allow direct access to cluster node 480 repository if configure""" 481 482 if not self.opt["cluster_local_path"]: 483 changer = {"pdf_systemwide": ""} 484 else: 485 to_add = """ 486 tempname='%(path)s'//Tablefile 487 open(IU,file=tempname,status='old',ERR=1) 488 return 489 1 tempname='%(path)s/Pdfdata/'//Tablefile 490 open(IU,file=tempname,status='old',ERR=2) 491 return 492 2 tempname='%(path)s/lhapdf'//Tablefile 493 open(IU,file=tempname,status='old',ERR=3) 494 return 495 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 496 open(IU,file=tempname,status='old',ERR=4) 497 return 498 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 499 open(IU,file=tempname,status='old',ERR=5) 500 return 501 """ % {"path" : self.opt["cluster_local_path"]} 502 503 changer = {"pdf_systemwide": to_add} 504 505 506 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 507 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 508 ff.writelines(template % changer) 509 510 # Do the same for lhapdf set 511 if not self.opt["cluster_local_path"]: 512 changer = {"cluster_specific_path": ""} 513 else: 514 to_add=""" 515 LHAPath='%(path)s/PDFsets' 516 Inquire(File=LHAPath, exist=exists) 517 if(exists)return 518 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 519 Inquire(File=LHAPath, exist=exists) 520 if(exists)return 521 LHAPath='%(path)s/../lhapdf/pdfsets/' 522 Inquire(File=LHAPath, exist=exists) 523 if(exists)return 524 LHAPath='./PDFsets' 525 """ % {"path" : self.opt["cluster_local_path"]} 526 changer = {"cluster_specific_path": to_add} 527 528 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 529 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 530 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 531 ff.writelines(template % changer) 532 533 534 return
535 536 537 538 #=========================================================================== 539 # write_maxparticles_file 540 #===========================================================================
541 - def write_maxparticles_file(self, writer, matrix_elements):
542 """Write the maxparticles.inc file for MadEvent""" 543 544 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 545 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 546 matrix_elements.get('matrix_elements')]) 547 else: 548 maxparticles = max([me.get_nexternal_ninitial()[0] \ 549 for me in matrix_elements]) 550 551 lines = "integer max_particles\n" 552 lines += "parameter(max_particles=%d)" % maxparticles 553 554 # Write the file 555 writer.writelines(lines) 556 557 return True
558 559 560 #=========================================================================== 561 # export the model 562 #===========================================================================
563 - def export_model_files(self, model_path):
564 """Configure the files/link of the process according to the model""" 565 566 # Import the model 567 for file in os.listdir(model_path): 568 if os.path.isfile(pjoin(model_path, file)): 569 shutil.copy2(pjoin(model_path, file), \ 570 pjoin(self.dir_path, 'Source', 'MODEL'))
571 572 586 594 595 596 #=========================================================================== 597 # export the helas routine 598 #===========================================================================
599 - def export_helas(self, helas_path):
600 """Configure the files/link of the process according to the model""" 601 602 # Import helas routine 603 for filename in os.listdir(helas_path): 604 filepos = pjoin(helas_path, filename) 605 if os.path.isfile(filepos): 606 if filepos.endswith('Makefile.template'): 607 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 608 elif filepos.endswith('Makefile'): 609 pass 610 else: 611 cp(filepos, self.dir_path + '/Source/DHELAS')
612 # following lines do the same but whithout symbolic link 613 # 614 #def export_helas(mgme_dir, dir_path): 615 # 616 # # Copy the HELAS directory 617 # helas_dir = pjoin(mgme_dir, 'HELAS') 618 # for filename in os.listdir(helas_dir): 619 # if os.path.isfile(pjoin(helas_dir, filename)): 620 # shutil.copy2(pjoin(helas_dir, filename), 621 # pjoin(dir_path, 'Source', 'DHELAS')) 622 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 623 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 624 # 625 626 #=========================================================================== 627 # generate_subprocess_directory 628 #===========================================================================
629 - def generate_subprocess_directory(self, matrix_element, 630 fortran_model, 631 me_number):
632 """Routine to generate a subprocess directory (for inheritance)""" 633 634 pass
635 636 #=========================================================================== 637 # get_source_libraries_list 638 #===========================================================================
639 - def get_source_libraries_list(self):
640 """ Returns the list of libraries to be compiling when compiling the 641 SOURCE directory. It is different for loop_induced processes and 642 also depends on the value of the 'output_dependencies' option""" 643 644 return ['$(LIBDIR)libdhelas.$(libext)', 645 '$(LIBDIR)libpdf.$(libext)', 646 '$(LIBDIR)libmodel.$(libext)', 647 '$(LIBDIR)libcernlib.$(libext)', 648 '$(LIBDIR)libbias.$(libext)']
649 650 #=========================================================================== 651 # write_source_makefile 652 #===========================================================================
653 - def write_source_makefile(self, writer):
654 """Write the nexternal.inc file for MG4""" 655 656 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 657 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 658 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 659 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 660 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 661 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 662 else: 663 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 664 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 665 666 replace_dict= {'libraries': set_of_lib, 667 'model':model_line, 668 'additional_dsample': '', 669 'additional_dependencies':''} 670 671 if writer: 672 text = open(path).read() % replace_dict 673 writer.write(text) 674 675 return replace_dict
676 677 #=========================================================================== 678 # write_nexternal_madspin 679 #===========================================================================
680 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
681 """Write the nexternal_prod.inc file for madspin""" 682 683 replace_dict = {} 684 685 replace_dict['nexternal'] = nexternal 686 replace_dict['ninitial'] = ninitial 687 688 file = """ \ 689 integer nexternal_prod 690 parameter (nexternal_prod=%(nexternal)d) 691 integer nincoming_prod 692 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 693 694 # Write the file 695 if writer: 696 writer.writelines(file) 697 return True 698 else: 699 return replace_dict
700 701 #=========================================================================== 702 # write_helamp_madspin 703 #===========================================================================
704 - def write_helamp_madspin(self, writer, ncomb):
705 """Write the helamp.inc file for madspin""" 706 707 replace_dict = {} 708 709 replace_dict['ncomb'] = ncomb 710 711 file = """ \ 712 integer ncomb1 713 parameter (ncomb1=%(ncomb)d) 714 double precision helamp(ncomb1) 715 common /to_helamp/helamp """ % replace_dict 716 717 # Write the file 718 if writer: 719 writer.writelines(file) 720 return True 721 else: 722 return replace_dict
723 724 725 726 #=========================================================================== 727 # write_nexternal_file 728 #===========================================================================
729 - def write_nexternal_file(self, writer, nexternal, ninitial):
730 """Write the nexternal.inc file for MG4""" 731 732 replace_dict = {} 733 734 replace_dict['nexternal'] = nexternal 735 replace_dict['ninitial'] = ninitial 736 737 file = """ \ 738 integer nexternal 739 parameter (nexternal=%(nexternal)d) 740 integer nincoming 741 parameter (nincoming=%(ninitial)d)""" % replace_dict 742 743 # Write the file 744 if writer: 745 writer.writelines(file) 746 return True 747 else: 748 return replace_dict
749 #=========================================================================== 750 # write_pmass_file 751 #===========================================================================
752 - def write_pmass_file(self, writer, matrix_element):
753 """Write the pmass.inc file for MG4""" 754 755 model = matrix_element.get('processes')[0].get('model') 756 757 lines = [] 758 for wf in matrix_element.get_external_wavefunctions(): 759 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 760 if mass.lower() != "zero": 761 mass = "abs(%s)" % mass 762 763 lines.append("pmass(%d)=%s" % \ 764 (wf.get('number_external'), mass)) 765 766 # Write the file 767 writer.writelines(lines) 768 769 return True
770 771 #=========================================================================== 772 # write_ngraphs_file 773 #===========================================================================
774 - def write_ngraphs_file(self, writer, nconfigs):
775 """Write the ngraphs.inc file for MG4. Needs input from 776 write_configs_file.""" 777 778 file = " integer n_max_cg\n" 779 file = file + "parameter (n_max_cg=%d)" % nconfigs 780 781 # Write the file 782 writer.writelines(file) 783 784 return True
785 786 #=========================================================================== 787 # write_leshouche_file 788 #===========================================================================
789 - def write_leshouche_file(self, writer, matrix_element):
790 """Write the leshouche.inc file for MG4""" 791 792 # Write the file 793 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 794 795 return True
796 797 #=========================================================================== 798 # get_leshouche_lines 799 #===========================================================================
800 - def get_leshouche_lines(self, matrix_element, numproc):
801 """Write the leshouche.inc file for MG4""" 802 803 # Extract number of external particles 804 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 805 806 lines = [] 807 for iproc, proc in enumerate(matrix_element.get('processes')): 808 legs = proc.get_legs_with_decays() 809 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 810 (iproc + 1, numproc+1, nexternal, 811 ",".join([str(l.get('id')) for l in legs]))) 812 if iproc == 0 and numproc == 0: 813 for i in [1, 2]: 814 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 815 (i, nexternal, 816 ",".join([ "%3r" % 0 ] * ninitial + \ 817 [ "%3r" % i ] * (nexternal - ninitial)))) 818 819 # Here goes the color connections corresponding to the JAMPs 820 # Only one output, for the first subproc! 821 if iproc == 0: 822 # If no color basis, just output trivial color flow 823 if not matrix_element.get('color_basis'): 824 for i in [1, 2]: 825 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 826 (i, numproc+1,nexternal, 827 ",".join([ "%3r" % 0 ] * nexternal))) 828 829 else: 830 # First build a color representation dictionnary 831 repr_dict = {} 832 for l in legs: 833 repr_dict[l.get('number')] = \ 834 proc.get('model').get_particle(l.get('id')).get_color()\ 835 * (-1)**(1+l.get('state')) 836 # Get the list of color flows 837 color_flow_list = \ 838 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 839 ninitial) 840 # And output them properly 841 for cf_i, color_flow_dict in enumerate(color_flow_list): 842 for i in [0, 1]: 843 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 844 (i + 1, cf_i + 1, numproc+1, nexternal, 845 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 846 for l in legs]))) 847 848 return lines
849 850 851 852 853 #=========================================================================== 854 # write_maxamps_file 855 #===========================================================================
856 - def write_maxamps_file(self, writer, maxamps, maxflows, 857 maxproc,maxsproc):
858 """Write the maxamps.inc file for MG4.""" 859 860 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 861 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 862 (maxamps, maxflows) 863 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 864 (maxproc, maxsproc) 865 866 # Write the file 867 writer.writelines(file) 868 869 return True
870 871 872 #=========================================================================== 873 # Routines to output UFO models in MG4 format 874 #=========================================================================== 875
876 - def convert_model(self, model, wanted_lorentz = [], 877 wanted_couplings = []):
878 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 879 880 # Make sure aloha is in quadruple precision if needed 881 old_aloha_mp=aloha.mp_precision 882 aloha.mp_precision=self.opt['mp'] 883 884 # create the MODEL 885 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 886 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 887 model_builder.build(wanted_couplings) 888 889 # Backup the loop mode, because it can be changed in what follows. 890 old_loop_mode = aloha.loop_mode 891 892 # Create the aloha model or use the existing one (for loop exporters 893 # this is useful as the aloha model will be used again in the 894 # LoopHelasMatrixElements generated). We do not save the model generated 895 # here if it didn't exist already because it would be a waste of 896 # memory for tree level applications since aloha is only needed at the 897 # time of creating the aloha fortran subroutines. 898 if hasattr(self, 'aloha_model'): 899 aloha_model = self.aloha_model 900 else: 901 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 902 aloha_model.add_Lorentz_object(model.get('lorentz')) 903 904 # Compute the subroutines 905 if wanted_lorentz: 906 aloha_model.compute_subset(wanted_lorentz) 907 else: 908 aloha_model.compute_all(save=False) 909 910 # Write them out 911 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 912 aloha_model.write(write_dir, 'Fortran') 913 914 # Revert the original aloha loop mode 915 aloha.loop_mode = old_loop_mode 916 917 #copy Helas Template 918 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 919 if any([any(['L' in tag for tag in d[1]]) for d in wanted_lorentz]): 920 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 921 write_dir+'/aloha_functions.f') 922 aloha_model.loop_mode = False 923 else: 924 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 925 write_dir+'/aloha_functions.f') 926 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 927 928 # Make final link in the Process 929 self.make_model_symbolic_link() 930 931 # Re-establish original aloha mode 932 aloha.mp_precision=old_aloha_mp
933 934 935 #=========================================================================== 936 # Helper functions 937 #===========================================================================
938 - def modify_grouping(self, matrix_element):
939 """allow to modify the grouping (if grouping is in place) 940 return two value: 941 - True/False if the matrix_element was modified 942 - the new(or old) matrix element""" 943 944 return False, matrix_element
945 946 #=========================================================================== 947 # Helper functions 948 #===========================================================================
949 - def get_mg5_info_lines(self):
950 """Return info lines for MG5, suitable to place at beginning of 951 Fortran files""" 952 953 info = misc.get_pkg_info() 954 info_lines = "" 955 if info and info.has_key('version') and info.has_key('date'): 956 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 957 (info['version'], info['date']) 958 info_lines = info_lines + \ 959 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 960 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 961 else: 962 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 963 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 964 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 965 966 return info_lines
967
968 - def get_process_info_lines(self, matrix_element):
969 """Return info lines describing the processes for this matrix element""" 970 971 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 972 for process in matrix_element.get('processes')])
973 974
975 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
976 """Return the Helicity matrix definition lines for this matrix element""" 977 978 helicity_line_list = [] 979 i = 0 980 for helicities in matrix_element.get_helicity_matrix(): 981 i = i + 1 982 int_list = [i, len(helicities)] 983 int_list.extend(helicities) 984 helicity_line_list.append(\ 985 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 986 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 987 988 return "\n".join(helicity_line_list)
989
990 - def get_ic_line(self, matrix_element):
991 """Return the IC definition line coming after helicities, required by 992 switchmom in madevent""" 993 994 nexternal = matrix_element.get_nexternal_ninitial()[0] 995 int_list = range(1, nexternal + 1) 996 997 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 998 ",".join([str(i) for \ 999 i in int_list]))
1000
1001 - def set_chosen_SO_index(self, process, squared_orders):
1002 """ From the squared order constraints set by the user, this function 1003 finds what indices of the squared_orders list the user intends to pick. 1004 It returns this as a string of comma-separated successive '.true.' or 1005 '.false.' for each index.""" 1006 1007 user_squared_orders = process.get('squared_orders') 1008 split_orders = process.get('split_orders') 1009 1010 if len(user_squared_orders)==0: 1011 return ','.join(['.true.']*len(squared_orders)) 1012 1013 res = [] 1014 for sqsos in squared_orders: 1015 is_a_match = True 1016 for user_sqso, value in user_squared_orders.items(): 1017 if (process.get_squared_order_type(user_sqso) =='==' and \ 1018 value!=sqsos[split_orders.index(user_sqso)]) or \ 1019 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 1020 value<sqsos[split_orders.index(user_sqso)]) or \ 1021 (process.get_squared_order_type(user_sqso) == '>' and \ 1022 value>=sqsos[split_orders.index(user_sqso)]): 1023 is_a_match = False 1024 break 1025 res.append('.true.' if is_a_match else '.false.') 1026 1027 return ','.join(res)
1028
1029 - def get_split_orders_lines(self, orders, array_name, n=5):
1030 """ Return the split orders definition as defined in the list orders and 1031 for the name of the array 'array_name'. Split rows in chunks of size n.""" 1032 1033 ret_list = [] 1034 for index, order in enumerate(orders): 1035 for k in xrange(0, len(order), n): 1036 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1037 (array_name,index + 1, k + 1, min(k + n, len(order)), 1038 ','.join(["%5r" % i for i in order[k:k + n]]))) 1039 return ret_list
1040
1041 - def format_integer_list(self, list, name, n=5):
1042 """ Return an initialization of the python list in argument following 1043 the fortran syntax using the data keyword assignment, filling an array 1044 of name 'name'. It splits rows in chunks of size n.""" 1045 1046 ret_list = [] 1047 for k in xrange(0, len(list), n): 1048 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1049 (name, k + 1, min(k + n, len(list)), 1050 ','.join(["%5r" % i for i in list[k:k + n]]))) 1051 return ret_list
1052
1053 - def get_color_data_lines(self, matrix_element, n=6):
1054 """Return the color matrix definition lines for this matrix element. Split 1055 rows in chunks of size n.""" 1056 1057 if not matrix_element.get('color_matrix'): 1058 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1059 else: 1060 ret_list = [] 1061 my_cs = color.ColorString() 1062 for index, denominator in \ 1063 enumerate(matrix_element.get('color_matrix').\ 1064 get_line_denominators()): 1065 # First write the common denominator for this color matrix line 1066 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1067 # Then write the numerators for the matrix elements 1068 num_list = matrix_element.get('color_matrix').\ 1069 get_line_numerators(index, denominator) 1070 1071 for k in xrange(0, len(num_list), n): 1072 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1073 (index + 1, k + 1, min(k + n, len(num_list)), 1074 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 1075 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1076 ret_list.append("C %s" % repr(my_cs)) 1077 return ret_list
1078 1079
1080 - def get_den_factor_line(self, matrix_element):
1081 """Return the denominator factor line for this matrix element""" 1082 1083 return "DATA IDEN/%2r/" % \ 1084 matrix_element.get_denominator_factor()
1085
1086 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1087 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1088 which configs (diagrams).""" 1089 1090 ret_list = [] 1091 1092 booldict = {False: ".false.", True: ".true."} 1093 1094 if not matrix_element.get('color_basis'): 1095 # No color, so only one color factor. Simply write a ".true." 1096 # for each config (i.e., each diagram with only 3 particle 1097 # vertices 1098 configs = len(mapconfigs) 1099 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1100 (num_matrix_element, configs, 1101 ','.join([".true." for i in range(configs)]))) 1102 return ret_list 1103 1104 # There is a color basis - create a list showing which JAMPs have 1105 # contributions to which configs 1106 1107 # Only want to include leading color flows, so find max_Nc 1108 color_basis = matrix_element.get('color_basis') 1109 1110 # We don't want to include the power of Nc's which come from the potential 1111 # loop color trace (i.e. in the case of a closed fermion loop for example) 1112 # so we subtract it here when computing max_Nc 1113 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1114 color_basis.values()],[])) 1115 1116 # Crate dictionary between diagram number and JAMP number 1117 diag_jamp = {} 1118 for ijamp, col_basis_elem in \ 1119 enumerate(sorted(matrix_element.get('color_basis').keys())): 1120 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1121 # Only use color flows with Nc == max_Nc. However, notice that 1122 # we don't want to include the Nc power coming from the loop 1123 # in this counting. 1124 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1125 diag_num = diag_tuple[0] + 1 1126 # Add this JAMP number to this diag_num 1127 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1128 [ijamp+1] 1129 1130 colamps = ijamp + 1 1131 for iconfig, num_diag in enumerate(mapconfigs): 1132 if num_diag == 0: 1133 continue 1134 1135 # List of True or False 1136 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1137 # Add line 1138 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1139 (iconfig+1, num_matrix_element, colamps, 1140 ','.join(["%s" % booldict[b] for b in \ 1141 bool_list]))) 1142 1143 return ret_list
1144
1145 - def get_amp2_lines(self, matrix_element, config_map = []):
1146 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1147 1148 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1149 # Get minimum legs in a vertex 1150 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1151 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1152 minvert = min(vert_list) if vert_list!=[] else 0 1153 1154 ret_lines = [] 1155 if config_map: 1156 # In this case, we need to sum up all amplitudes that have 1157 # identical topologies, as given by the config_map (which 1158 # gives the topology/config for each of the diagrams 1159 diagrams = matrix_element.get('diagrams') 1160 # Combine the diagrams with identical topologies 1161 config_to_diag_dict = {} 1162 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1163 if config_map[idiag] == 0: 1164 continue 1165 try: 1166 config_to_diag_dict[config_map[idiag]].append(idiag) 1167 except KeyError: 1168 config_to_diag_dict[config_map[idiag]] = [idiag] 1169 # Write out the AMP2s summing squares of amplitudes belonging 1170 # to eiher the same diagram or different diagrams with 1171 # identical propagator properties. Note that we need to use 1172 # AMP2 number corresponding to the first diagram number used 1173 # for that AMP2. 1174 for config in sorted(config_to_diag_dict.keys()): 1175 1176 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1177 {"num": (config_to_diag_dict[config][0] + 1)} 1178 1179 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1180 sum([diagrams[idiag].get('amplitudes') for \ 1181 idiag in config_to_diag_dict[config]], [])]) 1182 1183 # Not using \sum |M|^2 anymore since this creates troubles 1184 # when ckm is not diagonal due to the JIM mechanism. 1185 if '+' in amp: 1186 line += "(%s)*dconjg(%s)" % (amp, amp) 1187 else: 1188 line += "%s*dconjg(%s)" % (amp, amp) 1189 ret_lines.append(line) 1190 else: 1191 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1192 # Ignore any diagrams with 4-particle vertices. 1193 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1194 continue 1195 # Now write out the expression for AMP2, meaning the sum of 1196 # squared amplitudes belonging to the same diagram 1197 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1198 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1199 {"num": a.get('number')} for a in \ 1200 diag.get('amplitudes')]) 1201 ret_lines.append(line) 1202 1203 return ret_lines
1204 1205 #=========================================================================== 1206 # Returns the data statements initializing the coeffictients for the JAMP 1207 # decomposition. It is used when the JAMP initialization is decided to be 1208 # done through big arrays containing the projection coefficients. 1209 #===========================================================================
1210 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1211 n=50, Nc_value=3):
1212 """This functions return the lines defining the DATA statement setting 1213 the coefficients building the JAMPS out of the AMPS. Split rows in 1214 bunches of size n. 1215 One can specify the color_basis from which the color amplitudes originates 1216 so that there are commentaries telling what color structure each JAMP 1217 corresponds to.""" 1218 1219 if(not isinstance(color_amplitudes,list) or 1220 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1221 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_coefs" 1222 1223 res_list = [] 1224 my_cs = color.ColorString() 1225 for index, coeff_list in enumerate(color_amplitudes): 1226 # Create the list of the complete numerical coefficient. 1227 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1228 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1229 coefficient in coeff_list] 1230 # Create the list of the numbers of the contributing amplitudes. 1231 # Mutliply by -1 for those which have an imaginary coefficient. 1232 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1233 for coefficient in coeff_list] 1234 # Find the common denominator. 1235 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1236 num_list=[(coefficient*commondenom).numerator \ 1237 for coefficient in coefs_list] 1238 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1239 index+1,len(num_list))) 1240 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1241 index+1,commondenom)) 1242 if color_basis: 1243 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1244 res_list.append("C %s" % repr(my_cs)) 1245 for k in xrange(0, len(num_list), n): 1246 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1247 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1248 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1249 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1250 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1251 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1252 pass 1253 return res_list
1254 1255
1256 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1257 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1258 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1259 defined as a matrix element or directly as a color_amplitudes dictionary. 1260 The split_order_amps specifies the group of amplitudes sharing the same 1261 amplitude orders which should be put in together in a given set of JAMPS. 1262 The split_order_amps is supposed to have the format of the second output 1263 of the function get_split_orders_mapping function in helas_objects.py. 1264 The split_order_names is optional (it should correspond to the process 1265 'split_orders' attribute) and only present to provide comments in the 1266 JAMP definitions in the code.""" 1267 1268 # Let the user call get_JAMP_lines_split_order directly from a 1269 error_msg="Malformed '%s' argument passed to the "+\ 1270 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1271 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1272 color_amplitudes=col_amps.get_color_amplitudes() 1273 elif(isinstance(col_amps,list)): 1274 if(col_amps and isinstance(col_amps[0],list)): 1275 color_amplitudes=col_amps 1276 else: 1277 raise MadGraph5Error, error_msg%'col_amps' 1278 else: 1279 raise MadGraph5Error, error_msg%'col_amps' 1280 1281 # Verify the sanity of the split_order_amps and split_order_names args 1282 if isinstance(split_order_amps,list): 1283 for elem in split_order_amps: 1284 if len(elem)!=2: 1285 raise MadGraph5Error, error_msg%'split_order_amps' 1286 # Check the first element of the two lists to make sure they are 1287 # integers, although in principle they should all be integers. 1288 if not isinstance(elem[0],tuple) or \ 1289 not isinstance(elem[1],tuple) or \ 1290 not isinstance(elem[0][0],int) or \ 1291 not isinstance(elem[1][0],int): 1292 raise MadGraph5Error, error_msg%'split_order_amps' 1293 else: 1294 raise MadGraph5Error, error_msg%'split_order_amps' 1295 1296 if not split_order_names is None: 1297 if isinstance(split_order_names,list): 1298 # Should specify the same number of names as there are elements 1299 # in the key of the split_order_amps. 1300 if len(split_order_names)!=len(split_order_amps[0][0]): 1301 raise MadGraph5Error, error_msg%'split_order_names' 1302 # Check the first element of the list to be a string 1303 if not isinstance(split_order_names[0],str): 1304 raise MadGraph5Error, error_msg%'split_order_names' 1305 else: 1306 raise MadGraph5Error, error_msg%'split_order_names' 1307 1308 # Now scan all contributing orders to be individually computed and 1309 # construct the list of color_amplitudes for JAMP to be constructed 1310 # accordingly. 1311 res_list=[] 1312 for i, amp_order in enumerate(split_order_amps): 1313 col_amps_order = [] 1314 for jamp in color_amplitudes: 1315 col_amps_order.append(filter(lambda col_amp: 1316 col_amp[1] in amp_order[1],jamp)) 1317 if split_order_names: 1318 res_list.append('C JAMPs contributing to orders '+' '.join( 1319 ['%s=%i'%order for order in zip(split_order_names, 1320 amp_order[0])])) 1321 if self.opt['export_format'] in ['madloop_matchbox']: 1322 res_list.extend(self.get_JAMP_lines(col_amps_order, 1323 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1324 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1325 else: 1326 res_list.extend(self.get_JAMP_lines(col_amps_order, 1327 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1328 1329 return res_list
1330 1331
1332 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1333 split=-1):
1334 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1335 defined as a matrix element or directly as a color_amplitudes dictionary, 1336 Jamp_formatLC should be define to allow to add LeadingColor computation 1337 (usefull for MatchBox) 1338 The split argument defines how the JAMP lines should be split in order 1339 not to be too long.""" 1340 1341 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1342 # the color amplitudes lists. 1343 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1344 color_amplitudes=col_amps.get_color_amplitudes() 1345 elif(isinstance(col_amps,list)): 1346 if(col_amps and isinstance(col_amps[0],list)): 1347 color_amplitudes=col_amps 1348 else: 1349 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1350 else: 1351 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1352 1353 1354 res_list = [] 1355 for i, coeff_list in enumerate(color_amplitudes): 1356 # It might happen that coeff_list is empty if this function was 1357 # called from get_JAMP_lines_split_order (i.e. if some color flow 1358 # does not contribute at all for a given order). 1359 # In this case we simply set it to 0. 1360 if coeff_list==[]: 1361 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1362 continue 1363 # Break the JAMP definition into 'n=split' pieces to avoid having 1364 # arbitrarly long lines. 1365 first=True 1366 n = (len(coeff_list)+1 if split<=0 else split) 1367 while coeff_list!=[]: 1368 coefs=coeff_list[:n] 1369 coeff_list=coeff_list[n:] 1370 res = ((JAMP_format+"=") % str(i + 1)) + \ 1371 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1372 1373 first=False 1374 # Optimization: if all contributions to that color basis element have 1375 # the same coefficient (up to a sign), put it in front 1376 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1377 common_factor = False 1378 diff_fracs = list(set(list_fracs)) 1379 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1380 common_factor = True 1381 global_factor = diff_fracs[0] 1382 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1383 1384 # loop for JAMP 1385 for (coefficient, amp_number) in coefs: 1386 if not coefficient: 1387 continue 1388 if common_factor: 1389 res = (res + "%s" + AMP_format) % \ 1390 (self.coeff(coefficient[0], 1391 coefficient[1] / abs(coefficient[1]), 1392 coefficient[2], 1393 coefficient[3]), 1394 str(amp_number)) 1395 else: 1396 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1397 coefficient[1], 1398 coefficient[2], 1399 coefficient[3]), 1400 str(amp_number)) 1401 1402 if common_factor: 1403 res = res + ')' 1404 1405 res_list.append(res) 1406 1407 return res_list
1408
1409 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1410 """Generate the PDF lines for the auto_dsig.f file""" 1411 1412 processes = matrix_element.get('processes') 1413 model = processes[0].get('model') 1414 1415 pdf_definition_lines = "" 1416 pdf_data_lines = "" 1417 pdf_lines = "" 1418 1419 if ninitial == 1: 1420 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1421 for i, proc in enumerate(processes): 1422 process_line = proc.base_string() 1423 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1424 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1425 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1426 else: 1427 # Pick out all initial state particles for the two beams 1428 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1429 p in processes]))), 1430 sorted(list(set([p.get_initial_pdg(2) for \ 1431 p in processes])))] 1432 1433 # Prepare all variable names 1434 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1435 sum(initial_states,[])]) 1436 for key,val in pdf_codes.items(): 1437 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1438 1439 # Set conversion from PDG code to number used in PDF calls 1440 pdgtopdf = {21: 0, 22: 7} 1441 1442 # Fill in missing entries of pdgtopdf 1443 for pdg in sum(initial_states,[]): 1444 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 1445 pdgtopdf[pdg] = pdg 1446 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 1447 # If any particle has pdg code 7, we need to use something else 1448 pdgtopdf[pdg] = 6000000 + pdg 1449 1450 # Get PDF variable declarations for all initial states 1451 for i in [0,1]: 1452 pdf_definition_lines += "DOUBLE PRECISION " + \ 1453 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1454 for pdg in \ 1455 initial_states[i]]) + \ 1456 "\n" 1457 1458 # Get PDF data lines for all initial states 1459 for i in [0,1]: 1460 pdf_data_lines += "DATA " + \ 1461 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1462 for pdg in initial_states[i]]) + \ 1463 "/%d*1D0/" % len(initial_states[i]) + \ 1464 "\n" 1465 1466 # Get PDF lines for all different initial states 1467 for i, init_states in enumerate(initial_states): 1468 if subproc_group: 1469 pdf_lines = pdf_lines + \ 1470 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1471 % (i + 1, i + 1) 1472 else: 1473 pdf_lines = pdf_lines + \ 1474 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1475 % (i + 1, i + 1) 1476 1477 for nbi,initial_state in enumerate(init_states): 1478 if initial_state in pdf_codes.keys(): 1479 if subproc_group: 1480 pdf_lines = pdf_lines + \ 1481 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP, 1," + \ 1482 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1483 (pdf_codes[initial_state], 1484 i + 1, i + 1, pdgtopdf[initial_state], 1485 i + 1, i + 1) 1486 else: 1487 pdf_lines = pdf_lines + \ 1488 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP, %d," + \ 1489 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1490 (pdf_codes[initial_state], 1491 i + 1, i + 1, pdgtopdf[initial_state], 1492 i + 1, 1493 i + 1, i + 1) 1494 pdf_lines = pdf_lines + "ENDIF\n" 1495 1496 # Add up PDFs for the different initial state particles 1497 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1498 for proc in processes: 1499 process_line = proc.base_string() 1500 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1501 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1502 for ibeam in [1, 2]: 1503 initial_state = proc.get_initial_pdg(ibeam) 1504 if initial_state in pdf_codes.keys(): 1505 pdf_lines = pdf_lines + "%s%d*" % \ 1506 (pdf_codes[initial_state], ibeam) 1507 else: 1508 pdf_lines = pdf_lines + "1d0*" 1509 # Remove last "*" from pdf_lines 1510 pdf_lines = pdf_lines[:-1] + "\n" 1511 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1512 1513 # Remove last line break from the return variables 1514 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1515 1516 #=========================================================================== 1517 # write_props_file 1518 #===========================================================================
1519 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1520 """Write the props.inc file for MadEvent. Needs input from 1521 write_configs_file.""" 1522 1523 lines = [] 1524 1525 particle_dict = matrix_element.get('processes')[0].get('model').\ 1526 get('particle_dict') 1527 1528 for iconf, configs in enumerate(s_and_t_channels): 1529 for vertex in configs[0] + configs[1][:-1]: 1530 leg = vertex.get('legs')[-1] 1531 if leg.get('id') not in particle_dict: 1532 # Fake propagator used in multiparticle vertices 1533 mass = 'zero' 1534 width = 'zero' 1535 pow_part = 0 1536 else: 1537 particle = particle_dict[leg.get('id')] 1538 # Get mass 1539 if particle.get('mass').lower() == 'zero': 1540 mass = particle.get('mass') 1541 else: 1542 mass = "abs(%s)" % particle.get('mass') 1543 # Get width 1544 if particle.get('width').lower() == 'zero': 1545 width = particle.get('width') 1546 else: 1547 width = "abs(%s)" % particle.get('width') 1548 1549 pow_part = 1 + int(particle.is_boson()) 1550 1551 lines.append("prmass(%d,%d) = %s" % \ 1552 (leg.get('number'), iconf + 1, mass)) 1553 lines.append("prwidth(%d,%d) = %s" % \ 1554 (leg.get('number'), iconf + 1, width)) 1555 lines.append("pow(%d,%d) = %d" % \ 1556 (leg.get('number'), iconf + 1, pow_part)) 1557 1558 # Write the file 1559 writer.writelines(lines) 1560 1561 return True
1562 1563 #=========================================================================== 1564 # write_configs_file 1565 #===========================================================================
1566 - def write_configs_file(self, writer, matrix_element):
1567 """Write the configs.inc file for MadEvent""" 1568 1569 # Extract number of external particles 1570 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1571 1572 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1573 mapconfigs = [c[0] for c in configs] 1574 model = matrix_element.get('processes')[0].get('model') 1575 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1576 [[c[1]] for c in configs], 1577 mapconfigs, 1578 nexternal, ninitial, 1579 model)
1580 1581 #=========================================================================== 1582 # write_configs_file_from_diagrams 1583 #===========================================================================
1584 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1585 nexternal, ninitial, model):
1586 """Write the actual configs.inc file. 1587 1588 configs is the diagrams corresponding to configs (each 1589 diagrams is a list of corresponding diagrams for all 1590 subprocesses, with None if there is no corresponding diagrams 1591 for a given process). 1592 mapconfigs gives the diagram number for each config. 1593 1594 For s-channels, we need to output one PDG for each subprocess in 1595 the subprocess group, in order to be able to pick the right 1596 one for multiprocesses.""" 1597 1598 lines = [] 1599 1600 s_and_t_channels = [] 1601 1602 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1603 for config in configs if [d for d in config if d][0].\ 1604 get_vertex_leg_numbers()!=[]] 1605 minvert = min(vert_list) if vert_list!=[] else 0 1606 1607 # Number of subprocesses 1608 nsubprocs = len(configs[0]) 1609 1610 nconfigs = 0 1611 1612 new_pdg = model.get_first_non_pdg() 1613 1614 for iconfig, helas_diags in enumerate(configs): 1615 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1616 [0].get_vertex_leg_numbers()) : 1617 # Only 3-vertices allowed in configs.inc except for vertices 1618 # which originate from a shrunk loop. 1619 continue 1620 nconfigs += 1 1621 1622 # Need s- and t-channels for all subprocesses, including 1623 # those that don't contribute to this config 1624 empty_verts = [] 1625 stchannels = [] 1626 for h in helas_diags: 1627 if h: 1628 # get_s_and_t_channels gives vertices starting from 1629 # final state external particles and working inwards 1630 stchannels.append(h.get('amplitudes')[0].\ 1631 get_s_and_t_channels(ninitial, model, new_pdg)) 1632 else: 1633 stchannels.append((empty_verts, None)) 1634 1635 # For t-channels, just need the first non-empty one 1636 tchannels = [t for s,t in stchannels if t != None][0] 1637 1638 # For s_and_t_channels (to be used later) use only first config 1639 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1640 tchannels]) 1641 1642 # Make sure empty_verts is same length as real vertices 1643 if any([s for s,t in stchannels]): 1644 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1645 1646 # Reorganize s-channel vertices to get a list of all 1647 # subprocesses for each vertex 1648 schannels = zip(*[s for s,t in stchannels]) 1649 else: 1650 schannels = [] 1651 1652 allchannels = schannels 1653 if len(tchannels) > 1: 1654 # Write out tchannels only if there are any non-trivial ones 1655 allchannels = schannels + tchannels 1656 1657 # Write out propagators for s-channel and t-channel vertices 1658 1659 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1660 # Correspondance between the config and the diagram = amp2 1661 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1662 mapconfigs[iconfig])) 1663 1664 for verts in allchannels: 1665 if verts in schannels: 1666 vert = [v for v in verts if v][0] 1667 else: 1668 vert = verts 1669 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1670 last_leg = vert.get('legs')[-1] 1671 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1672 (last_leg.get('number'), nconfigs, len(daughters), 1673 ",".join([str(d) for d in daughters]))) 1674 if verts in schannels: 1675 pdgs = [] 1676 for v in verts: 1677 if v: 1678 pdgs.append(v.get('legs')[-1].get('id')) 1679 else: 1680 pdgs.append(0) 1681 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1682 (last_leg.get('number'), nconfigs, nsubprocs, 1683 ",".join([str(d) for d in pdgs]))) 1684 lines.append("data tprid(%d,%d)/0/" % \ 1685 (last_leg.get('number'), nconfigs)) 1686 elif verts in tchannels[:-1]: 1687 lines.append("data tprid(%d,%d)/%d/" % \ 1688 (last_leg.get('number'), nconfigs, 1689 abs(last_leg.get('id')))) 1690 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1691 (last_leg.get('number'), nconfigs, nsubprocs, 1692 ",".join(['0'] * nsubprocs))) 1693 1694 # Write out number of configs 1695 lines.append("# Number of configs") 1696 lines.append("data mapconfig(0)/%d/" % nconfigs) 1697 1698 # Write the file 1699 writer.writelines(lines) 1700 1701 return s_and_t_channels
1702 1703 #=========================================================================== 1704 # Global helper methods 1705 #=========================================================================== 1706
1707 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1708 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1709 1710 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1711 1712 if total_coeff == 1: 1713 if is_imaginary: 1714 return '+imag1*' 1715 else: 1716 return '+' 1717 elif total_coeff == -1: 1718 if is_imaginary: 1719 return '-imag1*' 1720 else: 1721 return '-' 1722 1723 res_str = '%+iD0' % total_coeff.numerator 1724 1725 if total_coeff.denominator != 1: 1726 # Check if total_coeff is an integer 1727 res_str = res_str + '/%iD0' % total_coeff.denominator 1728 1729 if is_imaginary: 1730 res_str = res_str + '*imag1' 1731 1732 return res_str + '*'
1733 1734
1735 - def set_fortran_compiler(self, default_compiler, force=False):
1736 """Set compiler based on what's available on the system""" 1737 1738 # Check for compiler 1739 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1740 f77_compiler = default_compiler['fortran'] 1741 elif misc.which('gfortran'): 1742 f77_compiler = 'gfortran' 1743 elif misc.which('g77'): 1744 f77_compiler = 'g77' 1745 elif misc.which('f77'): 1746 f77_compiler = 'f77' 1747 elif default_compiler['fortran']: 1748 logger.warning('No Fortran Compiler detected! Please install one') 1749 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1750 else: 1751 raise MadGraph5Error, 'No Fortran Compiler detected! Please install one' 1752 logger.info('Use Fortran compiler ' + f77_compiler) 1753 1754 1755 # Check for compiler. 1. set default. 1756 if default_compiler['f2py']: 1757 f2py_compiler = default_compiler['f2py'] 1758 else: 1759 f2py_compiler = '' 1760 # Try to find the correct one. 1761 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1762 f2py_compiler = default_compiler['f2py'] 1763 elif misc.which('f2py'): 1764 f2py_compiler = 'f2py' 1765 elif sys.version_info[1] == 6: 1766 if misc.which('f2py-2.6'): 1767 f2py_compiler = 'f2py-2.6' 1768 elif misc.which('f2py2.6'): 1769 f2py_compiler = 'f2py2.6' 1770 elif sys.version_info[1] == 7: 1771 if misc.which('f2py-2.7'): 1772 f2py_compiler = 'f2py-2.7' 1773 elif misc.which('f2py2.7'): 1774 f2py_compiler = 'f2py2.7' 1775 1776 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1777 1778 1779 self.replace_make_opt_f_compiler(to_replace) 1780 # Replace also for Template but not for cluster 1781 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1782 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1783 1784 return f77_compiler
1785 1786 # an alias for backward compatibility 1787 set_compiler = set_fortran_compiler 1788 1789
1790 - def set_cpp_compiler(self, default_compiler, force=False):
1791 """Set compiler based on what's available on the system""" 1792 1793 # Check for compiler 1794 if default_compiler and misc.which(default_compiler): 1795 compiler = default_compiler 1796 elif misc.which('g++'): 1797 #check if clang version 1798 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1799 stderr=subprocess.PIPE) 1800 out, _ = p.communicate() 1801 if 'clang' in out and misc.which('clang'): 1802 compiler = 'clang' 1803 else: 1804 compiler = 'g++' 1805 elif misc.which('c++'): 1806 compiler = 'c++' 1807 elif misc.which('clang'): 1808 compiler = 'clang' 1809 elif default_compiler: 1810 logger.warning('No c++ Compiler detected! Please install one') 1811 compiler = default_compiler # maybe misc fail so try with it 1812 else: 1813 raise MadGraph5Error, 'No c++ Compiler detected! Please install one' 1814 logger.info('Use c++ compiler ' + compiler) 1815 self.replace_make_opt_c_compiler(compiler) 1816 # Replace also for Template but not for cluster 1817 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite and \ 1818 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 1819 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1820 1821 return compiler
1822 1823
1824 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1825 """Set FC=compiler in Source/make_opts""" 1826 1827 assert isinstance(compilers, dict) 1828 1829 mod = False #avoid to rewrite the file if not needed 1830 if not root_dir: 1831 root_dir = self.dir_path 1832 1833 compiler= compilers['fortran'] 1834 f2py_compiler = compilers['f2py'] 1835 if not f2py_compiler: 1836 f2py_compiler = 'f2py' 1837 for_update= {'DEFAULT_F_COMPILER':compiler, 1838 'DEFAULT_F2PY_COMPILER':f2py_compiler} 1839 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1840 1841 try: 1842 common_run_interface.CommonRunCmd.update_make_opts_full( 1843 make_opts, for_update) 1844 except IOError: 1845 if root_dir == self.dir_path: 1846 logger.info('Fail to set compiler. Trying to continue anyway.')
1847
1848 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1849 """Set CXX=compiler in Source/make_opts. 1850 The version is also checked, in order to set some extra flags 1851 if the compiler is clang (on MACOS)""" 1852 1853 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 1854 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 1855 1856 1857 # list of the variable to set in the make_opts file 1858 for_update= {'DEFAULT_CPP_COMPILER':compiler, 1859 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 1860 'STDLIB': '-lc++' if is_lc else '-lstdc++', 1861 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 1862 } 1863 1864 # for MOJAVE remove the MACFLAG: 1865 if is_clang: 1866 import platform 1867 version, _, _ = platform.mac_ver() 1868 if not version:# not linux 1869 version = 14 # set version to remove MACFLAG 1870 else: 1871 version = int(version.split('.')[1]) 1872 if version >= 14: 1873 for_update['MACFLAG'] = '-mmacosx-version-min=10.8' if is_lc else '' 1874 1875 if not root_dir: 1876 root_dir = self.dir_path 1877 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1878 1879 try: 1880 common_run_interface.CommonRunCmd.update_make_opts_full( 1881 make_opts, for_update) 1882 except IOError: 1883 if root_dir == self.dir_path: 1884 logger.info('Fail to set compiler. Trying to continue anyway.') 1885 1886 return
1887
1888 #=============================================================================== 1889 # ProcessExporterFortranSA 1890 #=============================================================================== 1891 -class ProcessExporterFortranSA(ProcessExporterFortran):
1892 """Class to take care of exporting a set of matrix elements to 1893 MadGraph v4 StandAlone format.""" 1894 1895 matrix_template = "matrix_standalone_v4.inc" 1896
1897 - def __init__(self, *args,**opts):
1898 """add the format information compare to standard init""" 1899 1900 if 'format' in opts: 1901 self.format = opts['format'] 1902 del opts['format'] 1903 else: 1904 self.format = 'standalone' 1905 1906 self.prefix_info = {} 1907 ProcessExporterFortran.__init__(self, *args, **opts)
1908
1909 - def copy_template(self, model):
1910 """Additional actions needed for setup of Template 1911 """ 1912 1913 #First copy the full template tree if dir_path doesn't exit 1914 if os.path.isdir(self.dir_path): 1915 return 1916 1917 logger.info('initialize a new standalone directory: %s' % \ 1918 os.path.basename(self.dir_path)) 1919 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1920 1921 # Create the directory structure 1922 os.mkdir(self.dir_path) 1923 os.mkdir(pjoin(self.dir_path, 'Source')) 1924 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1925 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1926 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1927 os.mkdir(pjoin(self.dir_path, 'bin')) 1928 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1929 os.mkdir(pjoin(self.dir_path, 'lib')) 1930 os.mkdir(pjoin(self.dir_path, 'Cards')) 1931 1932 # Information at top-level 1933 #Write version info 1934 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1935 try: 1936 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1937 except IOError: 1938 MG5_version = misc.get_pkg_info() 1939 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1940 "5." + MG5_version['version']) 1941 1942 1943 # Add file in SubProcesses 1944 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1945 pjoin(self.dir_path, 'SubProcesses', 'makefileP')) 1946 1947 if self.format == 'standalone': 1948 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1949 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1950 1951 # Add file in Source 1952 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1953 pjoin(self.dir_path, 'Source')) 1954 # add the makefile 1955 filename = pjoin(self.dir_path,'Source','makefile') 1956 self.write_source_makefile(writers.FileWriter(filename))
1957 1958 #=========================================================================== 1959 # export model files 1960 #===========================================================================
1961 - def export_model_files(self, model_path):
1962 """export the model dependent files for V4 model""" 1963 1964 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1965 # Add the routine update_as_param in v4 model 1966 # This is a function created in the UFO 1967 text=""" 1968 subroutine update_as_param() 1969 call setpara('param_card.dat',.false.) 1970 return 1971 end 1972 """ 1973 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1974 ff.write(text) 1975 ff.close() 1976 1977 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1978 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1979 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1980 fsock.write(text) 1981 fsock.close() 1982 1983 self.make_model_symbolic_link()
1984 1985 #=========================================================================== 1986 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 1987 #===========================================================================
1988 - def write_procdef_mg5(self, file_pos, modelname, process_str):
1989 """ write an equivalent of the MG4 proc_card in order that all the Madevent 1990 Perl script of MadEvent4 are still working properly for pure MG5 run. 1991 Not needed for StandAlone so just return 1992 """ 1993 1994 return
1995 1996 1997 #=========================================================================== 1998 # Make the Helas and Model directories for Standalone directory 1999 #===========================================================================
2000 - def make(self):
2001 """Run make in the DHELAS and MODEL directories, to set up 2002 everything for running standalone 2003 """ 2004 2005 source_dir = pjoin(self.dir_path, "Source") 2006 logger.info("Running make for Helas") 2007 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2008 logger.info("Running make for Model") 2009 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
2010 2011 #=========================================================================== 2012 # Create proc_card_mg5.dat for Standalone directory 2013 #===========================================================================
2014 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2015 """Finalize Standalone MG4 directory by 2016 generation proc_card_mg5.dat 2017 generate a global makefile 2018 """ 2019 2020 compiler = {'fortran': mg5options['fortran_compiler'], 2021 'cpp': mg5options['cpp_compiler'], 2022 'f2py': mg5options['f2py_compiler']} 2023 2024 self.compiler_choice(compiler) 2025 self.make() 2026 2027 # Write command history as proc_card_mg5 2028 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 2029 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2030 history.write(output_file) 2031 2032 ProcessExporterFortran.finalize(self, matrix_elements, 2033 history, mg5options, flaglist) 2034 open(pjoin(self.dir_path,'__init__.py'),'w') 2035 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 2036 2037 if 'mode' in self.opt and self.opt['mode'] == "reweight": 2038 #add the module to hande the NLO weight 2039 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 2040 pjoin(self.dir_path, 'Source')) 2041 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 2042 pjoin(self.dir_path, 'Source', 'PDF')) 2043 self.write_pdf_opendata() 2044 2045 if self.prefix_info: 2046 self.write_f2py_splitter() 2047 self.write_f2py_makefile() 2048 self.write_f2py_check_sa(matrix_elements, 2049 pjoin(self.dir_path,'SubProcesses','check_sa.py')) 2050 else: 2051 # create a single makefile to compile all the subprocesses 2052 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 2053 deppython = '' 2054 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 2055 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 2056 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 2057 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 2058 text+='all: %s\n\techo \'done\'' % deppython 2059 2060 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 2061 ff.write(text) 2062 ff.close()
2063
2064 - def write_f2py_splitter(self):
2065 """write a function to call the correct matrix element""" 2066 2067 template = """ 2068 %(python_information)s 2069 subroutine smatrixhel(pdgs, npdg, p, ALPHAS, SCALE2, nhel, ANS) 2070 IMPLICIT NONE 2071 2072 CF2PY double precision, intent(in), dimension(0:3,npdg) :: p 2073 CF2PY integer, intent(in), dimension(npdg) :: pdgs 2074 CF2PY integer, intent(in) :: npdg 2075 CF2PY double precision, intent(out) :: ANS 2076 CF2PY double precision, intent(in) :: ALPHAS 2077 CF2PY double precision, intent(in) :: SCALE2 2078 integer pdgs(*) 2079 integer npdg, nhel 2080 double precision p(*) 2081 double precision ANS, ALPHAS, PI,SCALE2 2082 include 'coupl.inc' 2083 2084 PI = 3.141592653589793D0 2085 G = 2* DSQRT(ALPHAS*PI) 2086 CALL UPDATE_AS_PARAM() 2087 if (scale2.ne.0d0) stop 1 2088 2089 %(smatrixhel)s 2090 2091 return 2092 end 2093 2094 SUBROUTINE INITIALISE(PATH) 2095 C ROUTINE FOR F2PY to read the benchmark point. 2096 IMPLICIT NONE 2097 CHARACTER*512 PATH 2098 CF2PY INTENT(IN) :: PATH 2099 CALL SETPARA(PATH) !first call to setup the paramaters 2100 RETURN 2101 END 2102 2103 subroutine get_pdg_order(PDG) 2104 IMPLICIT NONE 2105 CF2PY INTEGER, intent(out) :: PDG(%(nb_me)i,%(maxpart)i) 2106 INTEGER PDG(%(nb_me)i,%(maxpart)i), PDGS(%(nb_me)i,%(maxpart)i) 2107 DATA PDGS/ %(pdgs)s / 2108 PDG = PDGS 2109 RETURN 2110 END 2111 2112 subroutine get_prefix(PREFIX) 2113 IMPLICIT NONE 2114 CF2PY CHARACTER*20, intent(out) :: PREFIX(%(nb_me)i) 2115 character*20 PREFIX(%(nb_me)i),PREF(%(nb_me)i) 2116 DATA PREF / '%(prefix)s'/ 2117 PREFIX = PREF 2118 RETURN 2119 END 2120 2121 2122 """ 2123 2124 allids = self.prefix_info.keys() 2125 allprefix = [self.prefix_info[key][0] for key in allids] 2126 min_nexternal = min([len(ids) for ids in allids]) 2127 max_nexternal = max([len(ids) for ids in allids]) 2128 2129 info = [] 2130 for key, (prefix, tag) in self.prefix_info.items(): 2131 info.append('#PY %s : %s # %s' % (tag, key, prefix)) 2132 2133 2134 text = [] 2135 for n_ext in range(min_nexternal, max_nexternal+1): 2136 current = [ids for ids in allids if len(ids)==n_ext] 2137 if not current: 2138 continue 2139 if min_nexternal != max_nexternal: 2140 if n_ext == min_nexternal: 2141 text.append(' if (npdg.eq.%i)then' % n_ext) 2142 else: 2143 text.append(' else if (npdg.eq.%i)then' % n_ext) 2144 for ii,pdgs in enumerate(current): 2145 condition = '.and.'.join(['%i.eq.pdgs(%i)' %(pdg, i+1) for i, pdg in enumerate(pdgs)]) 2146 if ii==0: 2147 text.append( ' if(%s) then ! %i' % (condition, i)) 2148 else: 2149 text.append( ' else if(%s) then ! %i' % (condition,i)) 2150 text.append(' call %ssmatrixhel(p, nhel, ans)' % self.prefix_info[pdgs][0]) 2151 text.append(' endif') 2152 #close the function 2153 if min_nexternal != max_nexternal: 2154 text.append('endif') 2155 2156 formatting = {'python_information':'\n'.join(info), 2157 'smatrixhel': '\n'.join(text), 2158 'maxpart': max_nexternal, 2159 'nb_me': len(allids), 2160 'pdgs': ','.join(str(pdg[i]) if i<len(pdg) else '0' 2161 for i in range(max_nexternal) for pdg in allids), 2162 'prefix':'\',\''.join(allprefix) 2163 } 2164 formatting['lenprefix'] = len(formatting['prefix']) 2165 text = template % formatting 2166 fsock = writers.FortranWriter(pjoin(self.dir_path, 'SubProcesses', 'all_matrix.f'),'w') 2167 fsock.writelines(text) 2168 fsock.close()
2169
2170 - def write_f2py_check_sa(self, matrix_element, writer):
2171 """ Write the general check_sa.py in SubProcesses that calls all processes successively.""" 2172 # To be implemented. It is just an example file, i.e. not crucial. 2173 return
2174
2175 - def write_f2py_makefile(self):
2176 """ """ 2177 # Add file in SubProcesses 2178 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f2py'), 2179 pjoin(self.dir_path, 'SubProcesses', 'makefile'))
2180
2181 - def create_MA5_cards(self,*args,**opts):
2182 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2183 pass
2184
2185 - def compiler_choice(self, compiler):
2186 """ Different daughter classes might want different compilers. 2187 So this function is meant to be overloaded if desired.""" 2188 2189 self.set_compiler(compiler)
2190 2191 #=========================================================================== 2192 # generate_subprocess_directory 2193 #===========================================================================
2194 - def generate_subprocess_directory(self, matrix_element, 2195 fortran_model, number):
2196 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2197 including the necessary matrix.f and nexternal.inc files""" 2198 2199 cwd = os.getcwd() 2200 # Create the directory PN_xx_xxxxx in the specified path 2201 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2202 "P%s" % matrix_element.get('processes')[0].shell_string()) 2203 2204 if self.opt['sa_symmetry']: 2205 # avoid symmetric output 2206 for i,proc in enumerate(matrix_element.get('processes')): 2207 2208 tag = proc.get_tag() 2209 legs = proc.get('legs')[:] 2210 leg0 = proc.get('legs')[0] 2211 leg1 = proc.get('legs')[1] 2212 if not leg1.get('state'): 2213 proc.get('legs')[0] = leg1 2214 proc.get('legs')[1] = leg0 2215 flegs = proc.get('legs')[2:] 2216 for perm in itertools.permutations(flegs): 2217 for i,p in enumerate(perm): 2218 proc.get('legs')[i+2] = p 2219 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2220 "P%s" % proc.shell_string()) 2221 #restore original order 2222 proc.get('legs')[2:] = legs[2:] 2223 if os.path.exists(dirpath2): 2224 proc.get('legs')[:] = legs 2225 return 0 2226 proc.get('legs')[:] = legs 2227 2228 try: 2229 os.mkdir(dirpath) 2230 except os.error as error: 2231 logger.warning(error.strerror + " " + dirpath) 2232 2233 #try: 2234 # os.chdir(dirpath) 2235 #except os.error: 2236 # logger.error('Could not cd to directory %s' % dirpath) 2237 # return 0 2238 2239 logger.info('Creating files in directory %s' % dirpath) 2240 2241 # Extract number of external particles 2242 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2243 2244 # Create the matrix.f file and the nexternal.inc file 2245 if self.opt['export_format']=='standalone_msP': 2246 filename = pjoin(dirpath, 'matrix_prod.f') 2247 else: 2248 filename = pjoin(dirpath, 'matrix.f') 2249 2250 proc_prefix = '' 2251 if 'prefix' in self.cmd_options: 2252 if self.cmd_options['prefix'] == 'int': 2253 proc_prefix = 'M%s_' % number 2254 elif self.cmd_options['prefix'] == 'proc': 2255 proc_prefix = matrix_element.get('processes')[0].shell_string().split('_',1)[1] 2256 else: 2257 raise Exception, '--prefix options supports only \'int\' and \'proc\'' 2258 for proc in matrix_element.get('processes'): 2259 ids = [l.get('id') for l in proc.get('legs_with_decays')] 2260 self.prefix_info[tuple(ids)] = [proc_prefix, proc.get_tag()] 2261 2262 calls = self.write_matrix_element_v4( 2263 writers.FortranWriter(filename), 2264 matrix_element, 2265 fortran_model, 2266 proc_prefix=proc_prefix) 2267 2268 if self.opt['export_format'] == 'standalone_msP': 2269 filename = pjoin(dirpath,'configs_production.inc') 2270 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2271 writers.FortranWriter(filename), 2272 matrix_element) 2273 2274 filename = pjoin(dirpath,'props_production.inc') 2275 self.write_props_file(writers.FortranWriter(filename), 2276 matrix_element, 2277 s_and_t_channels) 2278 2279 filename = pjoin(dirpath,'nexternal_prod.inc') 2280 self.write_nexternal_madspin(writers.FortranWriter(filename), 2281 nexternal, ninitial) 2282 2283 if self.opt['export_format']=='standalone_msF': 2284 filename = pjoin(dirpath, 'helamp.inc') 2285 ncomb=matrix_element.get_helicity_combinations() 2286 self.write_helamp_madspin(writers.FortranWriter(filename), 2287 ncomb) 2288 2289 filename = pjoin(dirpath, 'nexternal.inc') 2290 self.write_nexternal_file(writers.FortranWriter(filename), 2291 nexternal, ninitial) 2292 2293 filename = pjoin(dirpath, 'pmass.inc') 2294 self.write_pmass_file(writers.FortranWriter(filename), 2295 matrix_element) 2296 2297 filename = pjoin(dirpath, 'ngraphs.inc') 2298 self.write_ngraphs_file(writers.FortranWriter(filename), 2299 len(matrix_element.get_all_amplitudes())) 2300 2301 # Generate diagrams 2302 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 2303 filename = pjoin(dirpath, "matrix.ps") 2304 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2305 get('diagrams'), 2306 filename, 2307 model=matrix_element.get('processes')[0].\ 2308 get('model'), 2309 amplitude=True) 2310 logger.info("Generating Feynman diagrams for " + \ 2311 matrix_element.get('processes')[0].nice_string()) 2312 plot.draw() 2313 2314 linkfiles = ['check_sa.f', 'coupl.inc'] 2315 2316 if proc_prefix and os.path.exists(pjoin(dirpath, '..', 'check_sa.f')): 2317 text = open(pjoin(dirpath, '..', 'check_sa.f')).read() 2318 pat = re.compile('smatrix', re.I) 2319 new_text, n = re.subn(pat, '%ssmatrix' % proc_prefix, text) 2320 with open(pjoin(dirpath, 'check_sa.f'),'w') as f: 2321 f.write(new_text) 2322 linkfiles.pop(0) 2323 2324 for file in linkfiles: 2325 ln('../%s' % file, cwd=dirpath) 2326 ln('../makefileP', name='makefile', cwd=dirpath) 2327 # Return to original PWD 2328 #os.chdir(cwd) 2329 2330 if not calls: 2331 calls = 0 2332 return calls
2333 2334 2335 #=========================================================================== 2336 # write_source_makefile 2337 #===========================================================================
2338 - def write_source_makefile(self, writer):
2339 """Write the nexternal.inc file for MG4""" 2340 2341 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2342 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2343 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2344 2345 replace_dict= {'libraries': set_of_lib, 2346 'model':model_line, 2347 'additional_dsample': '', 2348 'additional_dependencies':''} 2349 2350 text = open(path).read() % replace_dict 2351 2352 if writer: 2353 writer.write(text) 2354 2355 return replace_dict
2356 2357 #=========================================================================== 2358 # write_matrix_element_v4 2359 #===========================================================================
2360 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2361 write=True, proc_prefix=''):
2362 """Export a matrix element to a matrix.f file in MG4 standalone format 2363 if write is on False, just return the replace_dict and not write anything.""" 2364 2365 2366 if not matrix_element.get('processes') or \ 2367 not matrix_element.get('diagrams'): 2368 return 0 2369 2370 if writer: 2371 if not isinstance(writer, writers.FortranWriter): 2372 raise writers.FortranWriter.FortranWriterError(\ 2373 "writer not FortranWriter but %s" % type(writer)) 2374 # Set lowercase/uppercase Fortran code 2375 writers.FortranWriter.downcase = False 2376 2377 2378 if not self.opt.has_key('sa_symmetry'): 2379 self.opt['sa_symmetry']=False 2380 2381 2382 # The proc_id is for MadEvent grouping which is never used in SA. 2383 replace_dict = {'global_variable':'', 'amp2_lines':'', 2384 'proc_prefix':proc_prefix, 'proc_id':''} 2385 2386 # Extract helas calls 2387 helas_calls = fortran_model.get_matrix_element_calls(\ 2388 matrix_element) 2389 2390 replace_dict['helas_calls'] = "\n".join(helas_calls) 2391 2392 # Extract version number and date from VERSION file 2393 info_lines = self.get_mg5_info_lines() 2394 replace_dict['info_lines'] = info_lines 2395 2396 # Extract process info lines 2397 process_lines = self.get_process_info_lines(matrix_element) 2398 replace_dict['process_lines'] = process_lines 2399 2400 # Extract number of external particles 2401 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2402 replace_dict['nexternal'] = nexternal 2403 replace_dict['nincoming'] = ninitial 2404 2405 # Extract ncomb 2406 ncomb = matrix_element.get_helicity_combinations() 2407 replace_dict['ncomb'] = ncomb 2408 2409 # Extract helicity lines 2410 helicity_lines = self.get_helicity_lines(matrix_element) 2411 replace_dict['helicity_lines'] = helicity_lines 2412 2413 # Extract overall denominator 2414 # Averaging initial state color, spin, and identical FS particles 2415 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2416 2417 # Extract ngraphs 2418 ngraphs = matrix_element.get_number_of_amplitudes() 2419 replace_dict['ngraphs'] = ngraphs 2420 2421 # Extract nwavefuncs 2422 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2423 replace_dict['nwavefuncs'] = nwavefuncs 2424 2425 # Extract ncolor 2426 ncolor = max(1, len(matrix_element.get('color_basis'))) 2427 replace_dict['ncolor'] = ncolor 2428 2429 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2430 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2431 matrix_element.get_beams_hel_avg_factor() 2432 2433 # Extract color data lines 2434 color_data_lines = self.get_color_data_lines(matrix_element) 2435 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2436 2437 if self.opt['export_format']=='standalone_msP': 2438 # For MadSpin need to return the AMP2 2439 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2440 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2441 replace_dict['global_variable'] = \ 2442 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2443 2444 # JAMP definition, depends on the number of independent split orders 2445 split_orders=matrix_element.get('processes')[0].get('split_orders') 2446 2447 if len(split_orders)==0: 2448 replace_dict['nSplitOrders']='' 2449 # Extract JAMP lines 2450 jamp_lines = self.get_JAMP_lines(matrix_element) 2451 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2452 # set all amplitude order to weight 1 and only one squared order 2453 # contribution which is of course ALL_ORDERS=2. 2454 squared_orders = [(2,),] 2455 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2456 replace_dict['chosen_so_configs'] = '.TRUE.' 2457 replace_dict['nSqAmpSplitOrders']=1 2458 replace_dict['split_order_str_list']='' 2459 else: 2460 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2461 replace_dict['nAmpSplitOrders']=len(amp_orders) 2462 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2463 replace_dict['nSplitOrders']=len(split_orders) 2464 replace_dict['split_order_str_list']=str(split_orders) 2465 amp_so = self.get_split_orders_lines( 2466 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2467 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2468 replace_dict['ampsplitorders']='\n'.join(amp_so) 2469 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2470 jamp_lines = self.get_JAMP_lines_split_order(\ 2471 matrix_element,amp_orders,split_order_names=split_orders) 2472 2473 # Now setup the array specifying what squared split order is chosen 2474 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2475 matrix_element.get('processes')[0],squared_orders) 2476 2477 # For convenience we also write the driver check_sa_splitOrders.f 2478 # that explicitely writes out the contribution from each squared order. 2479 # The original driver still works and is compiled with 'make' while 2480 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2481 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2482 self.write_check_sa_splitOrders(squared_orders,split_orders, 2483 nexternal,ninitial,proc_prefix,check_sa_writer) 2484 2485 if write: 2486 writers.FortranWriter('nsqso_born.inc').writelines( 2487 """INTEGER NSQSO_BORN 2488 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2489 2490 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2491 2492 matrix_template = self.matrix_template 2493 if self.opt['export_format']=='standalone_msP' : 2494 matrix_template = 'matrix_standalone_msP_v4.inc' 2495 elif self.opt['export_format']=='standalone_msF': 2496 matrix_template = 'matrix_standalone_msF_v4.inc' 2497 elif self.opt['export_format']=='matchbox': 2498 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2499 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2500 2501 if len(split_orders)>0: 2502 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2503 logger.debug("Warning: The export format %s is not "+\ 2504 " available for individual ME evaluation of given coupl. orders."+\ 2505 " Only the total ME will be computed.", self.opt['export_format']) 2506 elif self.opt['export_format'] in ['madloop_matchbox']: 2507 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2508 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2509 else: 2510 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2511 2512 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2513 replace_dict['template_file2'] = pjoin(_file_path, \ 2514 'iolibs/template_files/split_orders_helping_functions.inc') 2515 if write and writer: 2516 path = replace_dict['template_file'] 2517 content = open(path).read() 2518 content = content % replace_dict 2519 # Write the file 2520 writer.writelines(content) 2521 # Add the helper functions. 2522 if len(split_orders)>0: 2523 content = '\n' + open(replace_dict['template_file2'])\ 2524 .read()%replace_dict 2525 writer.writelines(content) 2526 return len(filter(lambda call: call.find('#') != 0, helas_calls)) 2527 else: 2528 replace_dict['return_value'] = len(filter(lambda call: call.find('#') != 0, helas_calls)) 2529 return replace_dict # for subclass update
2530
2531 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2532 nincoming, proc_prefix, writer):
2533 """ Write out a more advanced version of the check_sa drivers that 2534 individually returns the matrix element for each contributing squared 2535 order.""" 2536 2537 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2538 'template_files', 'check_sa_splitOrders.f')).read() 2539 printout_sq_orders=[] 2540 for i, squared_order in enumerate(squared_orders): 2541 sq_orders=[] 2542 for j, sqo in enumerate(squared_order): 2543 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2544 printout_sq_orders.append(\ 2545 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2546 %(i+1,' '.join(sq_orders),i+1)) 2547 printout_sq_orders='\n'.join(printout_sq_orders) 2548 replace_dict = {'printout_sqorders':printout_sq_orders, 2549 'nSplitOrders':len(squared_orders), 2550 'nexternal':nexternal, 2551 'nincoming':nincoming, 2552 'proc_prefix':proc_prefix} 2553 2554 if writer: 2555 writer.writelines(check_sa_content % replace_dict) 2556 else: 2557 return replace_dict
2558
2559 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2560 """class to take care of exporting a set of matrix element for the Matchbox 2561 code in the case of Born only routine""" 2562 2563 default_opt = {'clean': False, 'complex_mass':False, 2564 'export_format':'matchbox', 'mp': False, 2565 'sa_symmetry': True} 2566 2567 #specific template of the born 2568 2569 2570 matrix_template = "matrix_standalone_matchbox.inc" 2571 2572 @staticmethod
2573 - def get_color_string_lines(matrix_element):
2574 """Return the color matrix definition lines for this matrix element. Split 2575 rows in chunks of size n.""" 2576 2577 if not matrix_element.get('color_matrix'): 2578 return "\n".join(["out = 1"]) 2579 2580 #start the real work 2581 color_denominators = matrix_element.get('color_matrix').\ 2582 get_line_denominators() 2583 matrix_strings = [] 2584 my_cs = color.ColorString() 2585 for i_color in xrange(len(color_denominators)): 2586 # Then write the numerators for the matrix elements 2587 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2588 t_str=repr(my_cs) 2589 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2590 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2591 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2592 all_matches = t_match.findall(t_str) 2593 output = {} 2594 arg=[] 2595 for match in all_matches: 2596 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2597 if ctype in ['ColorOne' ]: 2598 continue 2599 if ctype not in ['T', 'Tr' ]: 2600 raise MadGraph5Error, 'Color Structure not handled by Matchbox: %s' % ctype 2601 tmparg += ['0'] 2602 arg +=tmparg 2603 for j, v in enumerate(arg): 2604 output[(i_color,j)] = v 2605 2606 for key in output: 2607 if matrix_strings == []: 2608 #first entry 2609 matrix_strings.append(""" 2610 if (in1.eq.%s.and.in2.eq.%s)then 2611 out = %s 2612 """ % (key[0], key[1], output[key])) 2613 else: 2614 #not first entry 2615 matrix_strings.append(""" 2616 elseif (in1.eq.%s.and.in2.eq.%s)then 2617 out = %s 2618 """ % (key[0], key[1], output[key])) 2619 if len(matrix_strings): 2620 matrix_strings.append(" else \n out = - 1 \n endif") 2621 else: 2622 return "\n out = - 1 \n " 2623 return "\n".join(matrix_strings)
2624
2625 - def make(self,*args,**opts):
2626 pass
2627
2628 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2629 JAMP_formatLC=None):
2630 2631 """Adding leading color part of the colorflow""" 2632 2633 if not JAMP_formatLC: 2634 JAMP_formatLC= "LN%s" % JAMP_format 2635 2636 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2637 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2638 col_amps=col_amps.get_color_amplitudes() 2639 elif(isinstance(col_amps,list)): 2640 if(col_amps and isinstance(col_amps[0],list)): 2641 col_amps=col_amps 2642 else: 2643 raise MadGraph5Error, error_msg % 'col_amps' 2644 else: 2645 raise MadGraph5Error, error_msg % 'col_amps' 2646 2647 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2648 JAMP_format=JAMP_format, 2649 AMP_format=AMP_format, 2650 split=-1) 2651 2652 2653 # Filter the col_ampls to generate only those without any 1/NC terms 2654 2655 LC_col_amps = [] 2656 for coeff_list in col_amps: 2657 to_add = [] 2658 for (coefficient, amp_number) in coeff_list: 2659 if coefficient[3]==0: 2660 to_add.append( (coefficient, amp_number) ) 2661 LC_col_amps.append(to_add) 2662 2663 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2664 JAMP_format=JAMP_formatLC, 2665 AMP_format=AMP_format, 2666 split=-1) 2667 2668 return text
2669
2670 2671 2672 2673 #=============================================================================== 2674 # ProcessExporterFortranMW 2675 #=============================================================================== 2676 -class ProcessExporterFortranMW(ProcessExporterFortran):
2677 """Class to take care of exporting a set of matrix elements to 2678 MadGraph v4 - MadWeight format.""" 2679 2680 matrix_file="matrix_standalone_v4.inc" 2681
2682 - def copy_template(self, model):
2683 """Additional actions needed for setup of Template 2684 """ 2685 2686 super(ProcessExporterFortranMW, self).copy_template(model) 2687 2688 # Add the MW specific file 2689 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2690 pjoin(self.dir_path, 'Source','MadWeight'), True) 2691 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2692 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2693 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2694 pjoin(self.dir_path, 'Source','setrun.f')) 2695 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2696 pjoin(self.dir_path, 'Source','run.inc')) 2697 # File created from Template (Different in some child class) 2698 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2699 self.write_run_config_file(writers.FortranWriter(filename)) 2700 2701 try: 2702 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2703 stdout = os.open(os.devnull, os.O_RDWR), 2704 stderr = os.open(os.devnull, os.O_RDWR), 2705 cwd=self.dir_path) 2706 except OSError: 2707 # Probably madweight already called 2708 pass 2709 2710 # Copy the different python file in the Template 2711 self.copy_python_file() 2712 # create the appropriate cuts.f 2713 self.get_mw_cuts_version() 2714 2715 # add the makefile in Source directory 2716 filename = os.path.join(self.dir_path,'Source','makefile') 2717 self.write_source_makefile(writers.FortranWriter(filename))
2718 2719 2720 2721 2722 #=========================================================================== 2723 # convert_model 2724 #===========================================================================
2725 - def convert_model(self, model, wanted_lorentz = [], 2726 wanted_couplings = []):
2727 2728 super(ProcessExporterFortranMW,self).convert_model(model, 2729 wanted_lorentz, wanted_couplings) 2730 2731 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2732 try: 2733 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2734 except OSError as error: 2735 pass 2736 model_path = model.get('modelpath') 2737 # This is not safe if there is a '##' or '-' in the path. 2738 shutil.copytree(model_path, 2739 pjoin(self.dir_path,'bin','internal','ufomodel'), 2740 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2741 if hasattr(model, 'restrict_card'): 2742 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2743 'restrict_default.dat') 2744 if isinstance(model.restrict_card, check_param_card.ParamCard): 2745 model.restrict_card.write(out_path) 2746 else: 2747 files.cp(model.restrict_card, out_path)
2748 2749 #=========================================================================== 2750 # generate_subprocess_directory 2751 #===========================================================================
2752 - def copy_python_file(self):
2753 """copy the python file require for the Template""" 2754 2755 # madevent interface 2756 cp(_file_path+'/interface/madweight_interface.py', 2757 self.dir_path+'/bin/internal/madweight_interface.py') 2758 cp(_file_path+'/interface/extended_cmd.py', 2759 self.dir_path+'/bin/internal/extended_cmd.py') 2760 cp(_file_path+'/interface/common_run_interface.py', 2761 self.dir_path+'/bin/internal/common_run_interface.py') 2762 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2763 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2764 cp(_file_path+'/iolibs/save_load_object.py', 2765 self.dir_path+'/bin/internal/save_load_object.py') 2766 cp(_file_path+'/madevent/gen_crossxhtml.py', 2767 self.dir_path+'/bin/internal/gen_crossxhtml.py') 2768 cp(_file_path+'/madevent/sum_html.py', 2769 self.dir_path+'/bin/internal/sum_html.py') 2770 cp(_file_path+'/various/FO_analyse_card.py', 2771 self.dir_path+'/bin/internal/FO_analyse_card.py') 2772 cp(_file_path+'/iolibs/file_writers.py', 2773 self.dir_path+'/bin/internal/file_writers.py') 2774 #model file 2775 cp(_file_path+'../models/check_param_card.py', 2776 self.dir_path+'/bin/internal/check_param_card.py') 2777 2778 #madevent file 2779 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2780 cp(_file_path+'/various/lhe_parser.py', 2781 self.dir_path+'/bin/internal/lhe_parser.py') 2782 2783 cp(_file_path+'/various/banner.py', 2784 self.dir_path+'/bin/internal/banner.py') 2785 cp(_file_path+'/various/shower_card.py', 2786 self.dir_path+'/bin/internal/shower_card.py') 2787 cp(_file_path+'/various/cluster.py', 2788 self.dir_path+'/bin/internal/cluster.py') 2789 2790 # logging configuration 2791 cp(_file_path+'/interface/.mg5_logging.conf', 2792 self.dir_path+'/bin/internal/me5_logging.conf') 2793 cp(_file_path+'/interface/coloring_logging.py', 2794 self.dir_path+'/bin/internal/coloring_logging.py')
2795 2796 2797 #=========================================================================== 2798 # Change the version of cuts.f to the one compatible with MW 2799 #===========================================================================
2800 - def get_mw_cuts_version(self, outpath=None):
2801 """create the appropriate cuts.f 2802 This is based on the one associated to ME output but: 2803 1) No clustering (=> remove initcluster/setclscales) 2804 2) Adding the definition of cut_bw at the file. 2805 """ 2806 2807 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2808 2809 text = StringIO() 2810 #1) remove all dependencies in ickkw >1: 2811 nb_if = 0 2812 for line in template: 2813 if 'if(xqcut.gt.0d0' in line: 2814 nb_if = 1 2815 if nb_if == 0: 2816 text.write(line) 2817 continue 2818 if re.search(r'if\(.*\)\s*then', line): 2819 nb_if += 1 2820 elif 'endif' in line: 2821 nb_if -= 1 2822 2823 #2) add fake cut_bw (have to put the true one later) 2824 text.write(""" 2825 logical function cut_bw(p) 2826 include 'madweight_param.inc' 2827 double precision p(*) 2828 if (bw_cut) then 2829 cut_bw = .true. 2830 else 2831 stop 1 2832 endif 2833 return 2834 end 2835 """) 2836 2837 final = text.getvalue() 2838 #3) remove the call to initcluster: 2839 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2840 template = template.replace('genps.inc', 'maxparticles.inc') 2841 #Now we can write it 2842 if not outpath: 2843 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2844 elif isinstance(outpath, str): 2845 fsock = open(outpath, 'w') 2846 else: 2847 fsock = outpath 2848 fsock.write(template)
2849 2850 2851 2852 #=========================================================================== 2853 # Make the Helas and Model directories for Standalone directory 2854 #===========================================================================
2855 - def make(self):
2856 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2857 everything for running madweight 2858 """ 2859 2860 source_dir = os.path.join(self.dir_path, "Source") 2861 logger.info("Running make for Helas") 2862 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2863 logger.info("Running make for Model") 2864 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2865 logger.info("Running make for PDF") 2866 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2867 logger.info("Running make for CERNLIB") 2868 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2869 logger.info("Running make for GENERIC") 2870 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2871 logger.info("Running make for blocks") 2872 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2873 logger.info("Running make for tools") 2874 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2875 2876 #=========================================================================== 2877 # Create proc_card_mg5.dat for MadWeight directory 2878 #===========================================================================
2879 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2880 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2881 2882 compiler = {'fortran': mg5options['fortran_compiler'], 2883 'cpp': mg5options['cpp_compiler'], 2884 'f2py': mg5options['f2py_compiler']} 2885 2886 2887 2888 #proc_charac 2889 self.create_proc_charac() 2890 2891 # Write maxparticles.inc based on max of ME's/subprocess groups 2892 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2893 self.write_maxparticles_file(writers.FortranWriter(filename), 2894 matrix_elements) 2895 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2896 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2897 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2898 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2899 2900 self.set_compiler(compiler) 2901 self.make() 2902 2903 # Write command history as proc_card_mg5 2904 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2905 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2906 history.write(output_file) 2907 2908 ProcessExporterFortran.finalize(self, matrix_elements, 2909 history, mg5options, flaglist)
2910 2911 2912 2913 #=========================================================================== 2914 # create the run_card for MW 2915 #===========================================================================
2916 - def create_run_card(self, matrix_elements, history):
2917 """ """ 2918 2919 run_card = banner_mod.RunCard() 2920 2921 # pass to default for MW 2922 run_card["run_tag"] = "\'not_use\'" 2923 run_card["fixed_ren_scale"] = "T" 2924 run_card["fixed_fac_scale"] = "T" 2925 run_card.remove_all_cut() 2926 2927 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 2928 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2929 python_template=True) 2930 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 2931 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2932 python_template=True)
2933 2934 #=========================================================================== 2935 # export model files 2936 #===========================================================================
2937 - def export_model_files(self, model_path):
2938 """export the model dependent files for V4 model""" 2939 2940 super(ProcessExporterFortranMW,self).export_model_files(model_path) 2941 # Add the routine update_as_param in v4 model 2942 # This is a function created in the UFO 2943 text=""" 2944 subroutine update_as_param() 2945 call setpara('param_card.dat',.false.) 2946 return 2947 end 2948 """ 2949 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2950 ff.write(text) 2951 ff.close() 2952 2953 # Modify setrun.f 2954 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 2955 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 2956 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 2957 fsock.write(text) 2958 fsock.close() 2959 2960 # Modify initialization.f 2961 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 2962 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 2963 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 2964 fsock.write(text) 2965 fsock.close() 2966 2967 2968 self.make_model_symbolic_link()
2969 2970 #=========================================================================== 2971 # generate_subprocess_directory 2972 #===========================================================================
2973 - def generate_subprocess_directory(self, matrix_element, 2974 fortran_model,number):
2975 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 2976 including the necessary matrix.f and nexternal.inc files""" 2977 2978 cwd = os.getcwd() 2979 # Create the directory PN_xx_xxxxx in the specified path 2980 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 2981 "P%s" % matrix_element.get('processes')[0].shell_string()) 2982 2983 try: 2984 os.mkdir(dirpath) 2985 except os.error as error: 2986 logger.warning(error.strerror + " " + dirpath) 2987 2988 #try: 2989 # os.chdir(dirpath) 2990 #except os.error: 2991 # logger.error('Could not cd to directory %s' % dirpath) 2992 # return 0 2993 2994 logger.info('Creating files in directory %s' % dirpath) 2995 2996 # Extract number of external particles 2997 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2998 2999 # Create the matrix.f file and the nexternal.inc file 3000 filename = pjoin(dirpath,'matrix.f') 3001 calls,ncolor = self.write_matrix_element_v4( 3002 writers.FortranWriter(filename), 3003 matrix_element, 3004 fortran_model) 3005 3006 filename = pjoin(dirpath, 'auto_dsig.f') 3007 self.write_auto_dsig_file(writers.FortranWriter(filename), 3008 matrix_element) 3009 3010 filename = pjoin(dirpath, 'configs.inc') 3011 mapconfigs, s_and_t_channels = self.write_configs_file(\ 3012 writers.FortranWriter(filename), 3013 matrix_element) 3014 3015 filename = pjoin(dirpath, 'nexternal.inc') 3016 self.write_nexternal_file(writers.FortranWriter(filename), 3017 nexternal, ninitial) 3018 3019 filename = pjoin(dirpath, 'leshouche.inc') 3020 self.write_leshouche_file(writers.FortranWriter(filename), 3021 matrix_element) 3022 3023 filename = pjoin(dirpath, 'props.inc') 3024 self.write_props_file(writers.FortranWriter(filename), 3025 matrix_element, 3026 s_and_t_channels) 3027 3028 filename = pjoin(dirpath, 'pmass.inc') 3029 self.write_pmass_file(writers.FortranWriter(filename), 3030 matrix_element) 3031 3032 filename = pjoin(dirpath, 'ngraphs.inc') 3033 self.write_ngraphs_file(writers.FortranWriter(filename), 3034 len(matrix_element.get_all_amplitudes())) 3035 3036 filename = pjoin(dirpath, 'maxamps.inc') 3037 self.write_maxamps_file(writers.FortranWriter(filename), 3038 len(matrix_element.get('diagrams')), 3039 ncolor, 3040 len(matrix_element.get('processes')), 3041 1) 3042 3043 filename = pjoin(dirpath, 'phasespace.inc') 3044 self.write_phasespace_file(writers.FortranWriter(filename), 3045 len(matrix_element.get('diagrams')), 3046 ) 3047 3048 # Generate diagrams 3049 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3050 filename = pjoin(dirpath, "matrix.ps") 3051 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3052 get('diagrams'), 3053 filename, 3054 model=matrix_element.get('processes')[0].\ 3055 get('model'), 3056 amplitude='') 3057 logger.info("Generating Feynman diagrams for " + \ 3058 matrix_element.get('processes')[0].nice_string()) 3059 plot.draw() 3060 3061 #import genps.inc and maxconfigs.inc into Subprocesses 3062 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 3063 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 3064 3065 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 3066 3067 for file in linkfiles: 3068 ln('../%s' % file, starting_dir=cwd) 3069 3070 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 3071 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 3072 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 3073 ln('phasespace.inc', '../', log=True, cwd=dirpath) 3074 # Return to original PWD 3075 #os.chdir(cwd) 3076 3077 if not calls: 3078 calls = 0 3079 return calls
3080 3081 #=========================================================================== 3082 # write_matrix_element_v4 3083 #===========================================================================
3084 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
3085 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 3086 3087 if not matrix_element.get('processes') or \ 3088 not matrix_element.get('diagrams'): 3089 return 0 3090 3091 if writer: 3092 if not isinstance(writer, writers.FortranWriter): 3093 raise writers.FortranWriter.FortranWriterError(\ 3094 "writer not FortranWriter") 3095 3096 # Set lowercase/uppercase Fortran code 3097 writers.FortranWriter.downcase = False 3098 3099 replace_dict = {} 3100 3101 # Extract version number and date from VERSION file 3102 info_lines = self.get_mg5_info_lines() 3103 replace_dict['info_lines'] = info_lines 3104 3105 # Extract process info lines 3106 process_lines = self.get_process_info_lines(matrix_element) 3107 replace_dict['process_lines'] = process_lines 3108 3109 # Set proc_id 3110 replace_dict['proc_id'] = proc_id 3111 3112 # Extract number of external particles 3113 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3114 replace_dict['nexternal'] = nexternal 3115 3116 # Extract ncomb 3117 ncomb = matrix_element.get_helicity_combinations() 3118 replace_dict['ncomb'] = ncomb 3119 3120 # Extract helicity lines 3121 helicity_lines = self.get_helicity_lines(matrix_element) 3122 replace_dict['helicity_lines'] = helicity_lines 3123 3124 # Extract overall denominator 3125 # Averaging initial state color, spin, and identical FS particles 3126 den_factor_line = self.get_den_factor_line(matrix_element) 3127 replace_dict['den_factor_line'] = den_factor_line 3128 3129 # Extract ngraphs 3130 ngraphs = matrix_element.get_number_of_amplitudes() 3131 replace_dict['ngraphs'] = ngraphs 3132 3133 # Extract nwavefuncs 3134 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3135 replace_dict['nwavefuncs'] = nwavefuncs 3136 3137 # Extract ncolor 3138 ncolor = max(1, len(matrix_element.get('color_basis'))) 3139 replace_dict['ncolor'] = ncolor 3140 3141 # Extract color data lines 3142 color_data_lines = self.get_color_data_lines(matrix_element) 3143 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3144 3145 # Extract helas calls 3146 helas_calls = fortran_model.get_matrix_element_calls(\ 3147 matrix_element) 3148 3149 replace_dict['helas_calls'] = "\n".join(helas_calls) 3150 3151 # Extract JAMP lines 3152 jamp_lines = self.get_JAMP_lines(matrix_element) 3153 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3154 3155 replace_dict['template_file'] = os.path.join(_file_path, \ 3156 'iolibs/template_files/%s' % self.matrix_file) 3157 replace_dict['template_file2'] = '' 3158 3159 if writer: 3160 file = open(replace_dict['template_file']).read() 3161 file = file % replace_dict 3162 # Write the file 3163 writer.writelines(file) 3164 return len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor 3165 else: 3166 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor)
3167 3168 #=========================================================================== 3169 # write_source_makefile 3170 #===========================================================================
3171 - def write_source_makefile(self, writer):
3172 """Write the nexternal.inc file for madweight""" 3173 3174 3175 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 3176 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 3177 text = open(path).read() % {'libraries': set_of_lib} 3178 writer.write(text) 3179 3180 return True
3181
3182 - def write_phasespace_file(self, writer, nb_diag):
3183 """ """ 3184 3185 template = """ include 'maxparticles.inc' 3186 integer max_branches 3187 parameter (max_branches=max_particles-1) 3188 integer max_configs 3189 parameter (max_configs=%(nb_diag)s) 3190 3191 c channel position 3192 integer config_pos,perm_pos 3193 common /to_config/config_pos,perm_pos 3194 3195 """ 3196 3197 writer.write(template % {'nb_diag': nb_diag})
3198 3199 3200 #=========================================================================== 3201 # write_auto_dsig_file 3202 #===========================================================================
3203 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3204 """Write the auto_dsig.f file for the differential cross section 3205 calculation, includes pdf call information (MadWeight format)""" 3206 3207 if not matrix_element.get('processes') or \ 3208 not matrix_element.get('diagrams'): 3209 return 0 3210 3211 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3212 3213 if ninitial < 1 or ninitial > 2: 3214 raise writers.FortranWriter.FortranWriterError, \ 3215 """Need ninitial = 1 or 2 to write auto_dsig file""" 3216 3217 replace_dict = {} 3218 3219 # Extract version number and date from VERSION file 3220 info_lines = self.get_mg5_info_lines() 3221 replace_dict['info_lines'] = info_lines 3222 3223 # Extract process info lines 3224 process_lines = self.get_process_info_lines(matrix_element) 3225 replace_dict['process_lines'] = process_lines 3226 3227 # Set proc_id 3228 replace_dict['proc_id'] = proc_id 3229 replace_dict['numproc'] = 1 3230 3231 # Set dsig_line 3232 if ninitial == 1: 3233 # No conversion, since result of decay should be given in GeV 3234 dsig_line = "pd(0)*dsiguu" 3235 else: 3236 # Convert result (in GeV) to pb 3237 dsig_line = "pd(0)*conv*dsiguu" 3238 3239 replace_dict['dsig_line'] = dsig_line 3240 3241 # Extract pdf lines 3242 pdf_vars, pdf_data, pdf_lines = \ 3243 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3244 replace_dict['pdf_vars'] = pdf_vars 3245 replace_dict['pdf_data'] = pdf_data 3246 replace_dict['pdf_lines'] = pdf_lines 3247 3248 # Lines that differ between subprocess group and regular 3249 if proc_id: 3250 replace_dict['numproc'] = int(proc_id) 3251 replace_dict['passcuts_begin'] = "" 3252 replace_dict['passcuts_end'] = "" 3253 # Set lines for subprocess group version 3254 # Set define_iconfigs_lines 3255 replace_dict['define_subdiag_lines'] = \ 3256 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3257 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3258 else: 3259 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3260 replace_dict['passcuts_end'] = "ENDIF" 3261 replace_dict['define_subdiag_lines'] = "" 3262 3263 if writer: 3264 file = open(os.path.join(_file_path, \ 3265 'iolibs/template_files/auto_dsig_mw.inc')).read() 3266 3267 file = file % replace_dict 3268 # Write the file 3269 writer.writelines(file) 3270 else: 3271 return replace_dict
3272 #=========================================================================== 3273 # write_configs_file 3274 #===========================================================================
3275 - def write_configs_file(self, writer, matrix_element):
3276 """Write the configs.inc file for MadEvent""" 3277 3278 # Extract number of external particles 3279 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3280 3281 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3282 mapconfigs = [c[0] for c in configs] 3283 model = matrix_element.get('processes')[0].get('model') 3284 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3285 [[c[1]] for c in configs], 3286 mapconfigs, 3287 nexternal, ninitial,matrix_element, model)
3288 3289 #=========================================================================== 3290 # write_run_configs_file 3291 #===========================================================================
3292 - def write_run_config_file(self, writer):
3293 """Write the run_configs.inc file for MadWeight""" 3294 3295 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3296 text = open(path).read() % {'chanperjob':'5'} 3297 writer.write(text) 3298 return True
3299 3300 #=========================================================================== 3301 # write_configs_file_from_diagrams 3302 #===========================================================================
3303 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3304 nexternal, ninitial, matrix_element, model):
3305 """Write the actual configs.inc file. 3306 3307 configs is the diagrams corresponding to configs (each 3308 diagrams is a list of corresponding diagrams for all 3309 subprocesses, with None if there is no corresponding diagrams 3310 for a given process). 3311 mapconfigs gives the diagram number for each config. 3312 3313 For s-channels, we need to output one PDG for each subprocess in 3314 the subprocess group, in order to be able to pick the right 3315 one for multiprocesses.""" 3316 3317 lines = [] 3318 3319 particle_dict = matrix_element.get('processes')[0].get('model').\ 3320 get('particle_dict') 3321 3322 s_and_t_channels = [] 3323 3324 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3325 for config in configs if [d for d in config if d][0].\ 3326 get_vertex_leg_numbers()!=[]] 3327 3328 minvert = min(vert_list) if vert_list!=[] else 0 3329 # Number of subprocesses 3330 nsubprocs = len(configs[0]) 3331 3332 nconfigs = 0 3333 3334 new_pdg = model.get_first_non_pdg() 3335 3336 for iconfig, helas_diags in enumerate(configs): 3337 if any([vert > minvert for vert in 3338 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3339 # Only 3-vertices allowed in configs.inc 3340 continue 3341 nconfigs += 1 3342 3343 # Need s- and t-channels for all subprocesses, including 3344 # those that don't contribute to this config 3345 empty_verts = [] 3346 stchannels = [] 3347 for h in helas_diags: 3348 if h: 3349 # get_s_and_t_channels gives vertices starting from 3350 # final state external particles and working inwards 3351 stchannels.append(h.get('amplitudes')[0].\ 3352 get_s_and_t_channels(ninitial,model,new_pdg)) 3353 else: 3354 stchannels.append((empty_verts, None)) 3355 3356 # For t-channels, just need the first non-empty one 3357 tchannels = [t for s,t in stchannels if t != None][0] 3358 3359 # For s_and_t_channels (to be used later) use only first config 3360 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3361 tchannels]) 3362 3363 # Make sure empty_verts is same length as real vertices 3364 if any([s for s,t in stchannels]): 3365 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3366 3367 # Reorganize s-channel vertices to get a list of all 3368 # subprocesses for each vertex 3369 schannels = zip(*[s for s,t in stchannels]) 3370 else: 3371 schannels = [] 3372 3373 allchannels = schannels 3374 if len(tchannels) > 1: 3375 # Write out tchannels only if there are any non-trivial ones 3376 allchannels = schannels + tchannels 3377 3378 # Write out propagators for s-channel and t-channel vertices 3379 3380 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3381 # Correspondance between the config and the diagram = amp2 3382 lines.append("* %d %d " % (nconfigs, 3383 mapconfigs[iconfig])) 3384 3385 for verts in allchannels: 3386 if verts in schannels: 3387 vert = [v for v in verts if v][0] 3388 else: 3389 vert = verts 3390 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3391 last_leg = vert.get('legs')[-1] 3392 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3393 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3394 # (last_leg.get('number'), nconfigs, len(daughters), 3395 # ",".join([str(d) for d in daughters]))) 3396 3397 if last_leg.get('id') == 21 and 21 not in particle_dict: 3398 # Fake propagator used in multiparticle vertices 3399 mass = 'zero' 3400 width = 'zero' 3401 pow_part = 0 3402 else: 3403 if (last_leg.get('id')!=7): 3404 particle = particle_dict[last_leg.get('id')] 3405 # Get mass 3406 mass = particle.get('mass') 3407 # Get width 3408 width = particle.get('width') 3409 else : # fake propagator used in multiparticle vertices 3410 mass= 'zero' 3411 width= 'zero' 3412 3413 line=line+" "+mass+" "+width+" " 3414 3415 if verts in schannels: 3416 pdgs = [] 3417 for v in verts: 3418 if v: 3419 pdgs.append(v.get('legs')[-1].get('id')) 3420 else: 3421 pdgs.append(0) 3422 lines.append(line+" S "+str(last_leg.get('id'))) 3423 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3424 # (last_leg.get('number'), nconfigs, nsubprocs, 3425 # ",".join([str(d) for d in pdgs]))) 3426 # lines.append("data tprid(%d,%d)/0/" % \ 3427 # (last_leg.get('number'), nconfigs)) 3428 elif verts in tchannels[:-1]: 3429 lines.append(line+" T "+str(last_leg.get('id'))) 3430 # lines.append("data tprid(%d,%d)/%d/" % \ 3431 # (last_leg.get('number'), nconfigs, 3432 # abs(last_leg.get('id')))) 3433 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3434 # (last_leg.get('number'), nconfigs, nsubprocs, 3435 # ",".join(['0'] * nsubprocs))) 3436 3437 # Write out number of configs 3438 # lines.append("# Number of configs") 3439 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3440 lines.append(" * ") # a line with just a star indicates this is the end of file 3441 # Write the file 3442 writer.writelines(lines) 3443 3444 return s_and_t_channels
3445
3446 3447 3448 #=============================================================================== 3449 # ProcessExporterFortranME 3450 #=============================================================================== 3451 -class ProcessExporterFortranME(ProcessExporterFortran):
3452 """Class to take care of exporting a set of matrix elements to 3453 MadEvent format.""" 3454 3455 matrix_file = "matrix_madevent_v4.inc" 3456 3457 # helper function for customise helas writter 3458 @staticmethod
3459 - def custom_helas_call(call, arg):
3460 if arg['mass'] == '%(M)s,%(W)s,': 3461 arg['mass'] = '%(M)s, fk_%(W)s,' 3462 elif '%(W)s' in arg['mass']: 3463 raise Exception 3464 return call, arg
3465
3466 - def copy_template(self, model):
3467 """Additional actions needed for setup of Template 3468 """ 3469 3470 super(ProcessExporterFortranME, self).copy_template(model) 3471 3472 # File created from Template (Different in some child class) 3473 filename = pjoin(self.dir_path,'Source','run_config.inc') 3474 self.write_run_config_file(writers.FortranWriter(filename)) 3475 3476 # The next file are model dependant (due to SLAH convention) 3477 self.model_name = model.get('name') 3478 # Add the symmetry.f 3479 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3480 self.write_symmetry(writers.FortranWriter(filename)) 3481 # 3482 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3483 self.write_addmothers(writers.FortranWriter(filename)) 3484 # Copy the different python file in the Template 3485 self.copy_python_file()
3486 3487 3488 3489 3490 3491 3492 #=========================================================================== 3493 # generate_subprocess_directory 3494 #===========================================================================
3495 - def copy_python_file(self):
3496 """copy the python file require for the Template""" 3497 3498 # madevent interface 3499 cp(_file_path+'/interface/madevent_interface.py', 3500 self.dir_path+'/bin/internal/madevent_interface.py') 3501 cp(_file_path+'/interface/extended_cmd.py', 3502 self.dir_path+'/bin/internal/extended_cmd.py') 3503 cp(_file_path+'/interface/common_run_interface.py', 3504 self.dir_path+'/bin/internal/common_run_interface.py') 3505 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3506 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3507 cp(_file_path+'/iolibs/save_load_object.py', 3508 self.dir_path+'/bin/internal/save_load_object.py') 3509 cp(_file_path+'/iolibs/file_writers.py', 3510 self.dir_path+'/bin/internal/file_writers.py') 3511 #model file 3512 cp(_file_path+'../models/check_param_card.py', 3513 self.dir_path+'/bin/internal/check_param_card.py') 3514 3515 #copy all the file present in madevent directory 3516 for name in os.listdir(pjoin(_file_path, 'madevent')): 3517 if name not in ['__init__.py'] and name.endswith('.py'): 3518 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3519 3520 #madevent file 3521 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3522 cp(_file_path+'/various/lhe_parser.py', 3523 self.dir_path+'/bin/internal/lhe_parser.py') 3524 cp(_file_path+'/various/banner.py', 3525 self.dir_path+'/bin/internal/banner.py') 3526 cp(_file_path+'/various/histograms.py', 3527 self.dir_path+'/bin/internal/histograms.py') 3528 cp(_file_path+'/various/plot_djrs.py', 3529 self.dir_path+'/bin/internal/plot_djrs.py') 3530 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3531 3532 cp(_file_path+'/various/cluster.py', 3533 self.dir_path+'/bin/internal/cluster.py') 3534 cp(_file_path+'/madevent/combine_runs.py', 3535 self.dir_path+'/bin/internal/combine_runs.py') 3536 # logging configuration 3537 cp(_file_path+'/interface/.mg5_logging.conf', 3538 self.dir_path+'/bin/internal/me5_logging.conf') 3539 cp(_file_path+'/interface/coloring_logging.py', 3540 self.dir_path+'/bin/internal/coloring_logging.py') 3541 # shower card and FO_analyse_card. 3542 # Although not needed, it is imported by banner.py 3543 cp(_file_path+'/various/shower_card.py', 3544 self.dir_path+'/bin/internal/shower_card.py') 3545 cp(_file_path+'/various/FO_analyse_card.py', 3546 self.dir_path+'/bin/internal/FO_analyse_card.py')
3547 3548
3549 - def convert_model(self, model, wanted_lorentz = [], 3550 wanted_couplings = []):
3551 3552 super(ProcessExporterFortranME,self).convert_model(model, 3553 wanted_lorentz, wanted_couplings) 3554 3555 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3556 try: 3557 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3558 except OSError as error: 3559 pass 3560 model_path = model.get('modelpath') 3561 # This is not safe if there is a '##' or '-' in the path. 3562 shutil.copytree(model_path, 3563 pjoin(self.dir_path,'bin','internal','ufomodel'), 3564 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3565 if hasattr(model, 'restrict_card'): 3566 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3567 'restrict_default.dat') 3568 if isinstance(model.restrict_card, check_param_card.ParamCard): 3569 model.restrict_card.write(out_path) 3570 else: 3571 files.cp(model.restrict_card, out_path)
3572 3573 #=========================================================================== 3574 # export model files 3575 #===========================================================================
3576 - def export_model_files(self, model_path):
3577 """export the model dependent files""" 3578 3579 super(ProcessExporterFortranME,self).export_model_files(model_path) 3580 3581 # Add the routine update_as_param in v4 model 3582 # This is a function created in the UFO 3583 text=""" 3584 subroutine update_as_param() 3585 call setpara('param_card.dat',.false.) 3586 return 3587 end 3588 """ 3589 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3590 ff.write(text) 3591 ff.close() 3592 3593 # Add the symmetry.f 3594 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3595 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3596 3597 # Modify setrun.f 3598 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3599 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3600 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3601 fsock.write(text) 3602 fsock.close() 3603 3604 self.make_model_symbolic_link()
3605 3606 #=========================================================================== 3607 # generate_subprocess_directory 3608 #===========================================================================
3609 - def generate_subprocess_directory(self, matrix_element, 3610 fortran_model, 3611 me_number):
3612 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3613 including the necessary matrix.f and various helper files""" 3614 3615 cwd = os.getcwd() 3616 path = pjoin(self.dir_path, 'SubProcesses') 3617 3618 3619 if not self.model: 3620 self.model = matrix_element.get('processes')[0].get('model') 3621 3622 3623 3624 #os.chdir(path) 3625 # Create the directory PN_xx_xxxxx in the specified path 3626 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3627 try: 3628 os.mkdir(pjoin(path,subprocdir)) 3629 except os.error as error: 3630 logger.warning(error.strerror + " " + subprocdir) 3631 3632 #try: 3633 # os.chdir(subprocdir) 3634 #except os.error: 3635 # logger.error('Could not cd to directory %s' % subprocdir) 3636 # return 0 3637 3638 logger.info('Creating files in directory %s' % subprocdir) 3639 Ppath = pjoin(path, subprocdir) 3640 3641 # Extract number of external particles 3642 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3643 3644 # Add the driver.f 3645 ncomb = matrix_element.get_helicity_combinations() 3646 filename = pjoin(Ppath,'driver.f') 3647 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3648 v5=self.opt['v5_model']) 3649 3650 # Create the matrix.f file, auto_dsig.f file and all inc files 3651 filename = pjoin(Ppath, 'matrix.f') 3652 calls, ncolor = \ 3653 self.write_matrix_element_v4(writers.FortranWriter(filename), 3654 matrix_element, fortran_model, subproc_number = me_number) 3655 3656 filename = pjoin(Ppath, 'auto_dsig.f') 3657 self.write_auto_dsig_file(writers.FortranWriter(filename), 3658 matrix_element) 3659 3660 filename = pjoin(Ppath, 'configs.inc') 3661 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3662 writers.FortranWriter(filename), 3663 matrix_element) 3664 3665 filename = pjoin(Ppath, 'config_nqcd.inc') 3666 self.write_config_nqcd_file(writers.FortranWriter(filename), 3667 nqcd_list) 3668 3669 filename = pjoin(Ppath, 'config_subproc_map.inc') 3670 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3671 s_and_t_channels) 3672 3673 filename = pjoin(Ppath, 'coloramps.inc') 3674 self.write_coloramps_file(writers.FortranWriter(filename), 3675 mapconfigs, 3676 matrix_element) 3677 3678 filename = pjoin(Ppath, 'get_color.f') 3679 self.write_colors_file(writers.FortranWriter(filename), 3680 matrix_element) 3681 3682 filename = pjoin(Ppath, 'decayBW.inc') 3683 self.write_decayBW_file(writers.FortranWriter(filename), 3684 s_and_t_channels) 3685 3686 filename = pjoin(Ppath, 'dname.mg') 3687 self.write_dname_file(writers.FileWriter(filename), 3688 "P"+matrix_element.get('processes')[0].shell_string()) 3689 3690 filename = pjoin(Ppath, 'iproc.dat') 3691 self.write_iproc_file(writers.FortranWriter(filename), 3692 me_number) 3693 3694 filename = pjoin(Ppath, 'leshouche.inc') 3695 self.write_leshouche_file(writers.FortranWriter(filename), 3696 matrix_element) 3697 3698 filename = pjoin(Ppath, 'maxamps.inc') 3699 self.write_maxamps_file(writers.FortranWriter(filename), 3700 len(matrix_element.get('diagrams')), 3701 ncolor, 3702 len(matrix_element.get('processes')), 3703 1) 3704 3705 filename = pjoin(Ppath, 'mg.sym') 3706 self.write_mg_sym_file(writers.FortranWriter(filename), 3707 matrix_element) 3708 3709 filename = pjoin(Ppath, 'ncombs.inc') 3710 self.write_ncombs_file(writers.FortranWriter(filename), 3711 nexternal) 3712 3713 filename = pjoin(Ppath, 'nexternal.inc') 3714 self.write_nexternal_file(writers.FortranWriter(filename), 3715 nexternal, ninitial) 3716 3717 filename = pjoin(Ppath, 'ngraphs.inc') 3718 self.write_ngraphs_file(writers.FortranWriter(filename), 3719 len(mapconfigs)) 3720 3721 3722 filename = pjoin(Ppath, 'pmass.inc') 3723 self.write_pmass_file(writers.FortranWriter(filename), 3724 matrix_element) 3725 3726 filename = pjoin(Ppath, 'props.inc') 3727 self.write_props_file(writers.FortranWriter(filename), 3728 matrix_element, 3729 s_and_t_channels) 3730 3731 # Find config symmetries and permutations 3732 symmetry, perms, ident_perms = \ 3733 diagram_symmetry.find_symmetry(matrix_element) 3734 3735 filename = pjoin(Ppath, 'symswap.inc') 3736 self.write_symswap_file(writers.FortranWriter(filename), 3737 ident_perms) 3738 3739 filename = pjoin(Ppath, 'symfact_orig.dat') 3740 self.write_symfact_file(open(filename, 'w'), symmetry) 3741 3742 # Generate diagrams 3743 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3744 filename = pjoin(Ppath, "matrix.ps") 3745 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3746 get('diagrams'), 3747 filename, 3748 model=matrix_element.get('processes')[0].\ 3749 get('model'), 3750 amplitude=True) 3751 logger.info("Generating Feynman diagrams for " + \ 3752 matrix_element.get('processes')[0].nice_string()) 3753 plot.draw() 3754 3755 self.link_files_in_SubProcess(Ppath) 3756 3757 #import nexternal/leshouche in Source 3758 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3759 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3760 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3761 # Return to SubProcesses dir 3762 #os.chdir(os.path.pardir) 3763 3764 # Add subprocess to subproc.mg 3765 filename = pjoin(path, 'subproc.mg') 3766 files.append_to_file(filename, 3767 self.write_subproc, 3768 subprocdir) 3769 3770 # Return to original dir 3771 #os.chdir(cwd) 3772 3773 # Generate info page 3774 gen_infohtml.make_info_html(self.dir_path) 3775 3776 3777 if not calls: 3778 calls = 0 3779 return calls
3780 3781 link_Sub_files = ['addmothers.f', 3782 'cluster.f', 3783 'cluster.inc', 3784 'coupl.inc', 3785 'cuts.f', 3786 'cuts.inc', 3787 'genps.f', 3788 'genps.inc', 3789 'idenparts.f', 3790 'initcluster.f', 3791 'makefile', 3792 'message.inc', 3793 'myamp.f', 3794 'reweight.f', 3795 'run.inc', 3796 'maxconfigs.inc', 3797 'maxparticles.inc', 3798 'run_config.inc', 3799 'lhe_event_infos.inc', 3800 'setcuts.f', 3801 'setscales.f', 3802 'sudakov.inc', 3803 'symmetry.f', 3804 'unwgt.f', 3805 'dummy_fct.f' 3806 ] 3807 3821 3822
3823 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3824 """Finalize ME v4 directory by creating jpeg diagrams, html 3825 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3826 3827 if 'nojpeg' in flaglist: 3828 makejpg = False 3829 else: 3830 makejpg = True 3831 if 'online' in flaglist: 3832 online = True 3833 else: 3834 online = False 3835 3836 compiler = {'fortran': mg5options['fortran_compiler'], 3837 'cpp': mg5options['cpp_compiler'], 3838 'f2py': mg5options['f2py_compiler']} 3839 3840 # indicate that the output type is not grouped 3841 if not isinstance(self, ProcessExporterFortranMEGroup): 3842 self.proc_characteristic['grouped_matrix'] = False 3843 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 3844 # indicate the PDG of all initial particle 3845 try: 3846 pdgs1 = [p.get_initial_pdg(1) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3847 pdgs2 = [p.get_initial_pdg(2) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3848 except AttributeError: 3849 pdgs1 = [p.get_initial_pdg(1) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3850 pdgs2 = [p.get_initial_pdg(2) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3851 self.proc_characteristic['pdg_initial1'] = pdgs1 3852 self.proc_characteristic['pdg_initial2'] = pdgs2 3853 3854 3855 modelname = self.opt['model'] 3856 if modelname == 'mssm' or modelname.startswith('mssm-'): 3857 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3858 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3859 check_param_card.convert_to_mg5card(param_card, mg5_param) 3860 check_param_card.check_valid_param_card(mg5_param) 3861 3862 # Add the combine_events.f modify param_card path/number of @X 3863 filename = pjoin(self.dir_path,'Source','combine_events.f') 3864 try: 3865 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3866 except AttributeError: 3867 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3868 nb_proc = len(set(nb_proc)) 3869 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3870 # Write maxconfigs.inc based on max of ME's/subprocess groups 3871 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3872 self.write_maxconfigs_file(writers.FortranWriter(filename), 3873 matrix_elements) 3874 3875 # Write maxparticles.inc based on max of ME's/subprocess groups 3876 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3877 self.write_maxparticles_file(writers.FortranWriter(filename), 3878 matrix_elements) 3879 3880 # Touch "done" file 3881 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3882 3883 # Check for compiler 3884 self.set_compiler(compiler) 3885 self.set_cpp_compiler(compiler['cpp']) 3886 3887 3888 old_pos = os.getcwd() 3889 subpath = pjoin(self.dir_path, 'SubProcesses') 3890 3891 P_dir_list = [proc for proc in os.listdir(subpath) 3892 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3893 3894 devnull = os.open(os.devnull, os.O_RDWR) 3895 # Convert the poscript in jpg files (if authorize) 3896 if makejpg: 3897 try: 3898 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3899 except Exception, error: 3900 pass 3901 3902 if misc.which('gs'): 3903 logger.info("Generate jpeg diagrams") 3904 for Pdir in P_dir_list: 3905 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3906 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3907 3908 logger.info("Generate web pages") 3909 # Create the WebPage using perl script 3910 3911 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3912 stdout = devnull,cwd=pjoin(self.dir_path)) 3913 3914 #os.chdir(os.path.pardir) 3915 3916 obj = gen_infohtml.make_info_html(self.dir_path) 3917 3918 if online: 3919 nb_channel = obj.rep_rule['nb_gen_diag'] 3920 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 3921 #add the information to proc_charac 3922 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 3923 3924 # Write command history as proc_card_mg5 3925 if os.path.isdir(pjoin(self.dir_path,'Cards')): 3926 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 3927 history.write(output_file) 3928 3929 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3930 stdout = devnull) 3931 3932 #crate the proc_characteristic file 3933 self.create_proc_charac(matrix_elements, history) 3934 3935 # create the run_card 3936 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 3937 3938 # Run "make" to generate madevent.tar.gz file 3939 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 3940 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 3941 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 3942 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 3943 stdout = devnull, cwd=self.dir_path) 3944 3945 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3946 stdout = devnull, cwd=self.dir_path)
3947 3948 3949 3950 3951 3952 3953 #return to the initial dir 3954 #os.chdir(old_pos) 3955 3956 #=========================================================================== 3957 # write_matrix_element_v4 3958 #===========================================================================
3959 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 3960 proc_id = "", config_map = [], subproc_number = ""):
3961 """Export a matrix element to a matrix.f file in MG4 madevent format""" 3962 3963 if not matrix_element.get('processes') or \ 3964 not matrix_element.get('diagrams'): 3965 return 0 3966 3967 if writer: 3968 if not isinstance(writer, writers.FortranWriter): 3969 raise writers.FortranWriter.FortranWriterError(\ 3970 "writer not FortranWriter") 3971 # Set lowercase/uppercase Fortran code 3972 writers.FortranWriter.downcase = False 3973 3974 # The proc prefix is not used for MadEvent output so it can safely be set 3975 # to an empty string. 3976 replace_dict = {'proc_prefix':''} 3977 3978 # Extract helas calls 3979 helas_calls = fortran_model.get_matrix_element_calls(\ 3980 matrix_element) 3981 3982 3983 replace_dict['helas_calls'] = "\n".join(helas_calls) 3984 3985 3986 #adding the support for the fake width (forbidding too small width) 3987 mass_width = matrix_element.get_all_mass_widths() 3988 width_list = set([e[1] for e in mass_width]) 3989 3990 replace_dict['fake_width_declaration'] = \ 3991 (' double precision fk_%s \n' * len(width_list)) % tuple(width_list) 3992 replace_dict['fake_width_declaration'] += \ 3993 (' save fk_%s \n' * len(width_list)) % tuple(width_list) 3994 fk_w_defs = [] 3995 one_def = ' fk_%(w)s = SIGN(MAX(ABS(%(w)s), ABS(%(m)s*small_width_treatment)), %(w)s)' 3996 for m, w in mass_width: 3997 if w == 'zero': 3998 if ' fk_zero = 0d0' not in fk_w_defs: 3999 fk_w_defs.append(' fk_zero = 0d0') 4000 continue 4001 fk_w_defs.append(one_def %{'m':m, 'w':w}) 4002 replace_dict['fake_width_definitions'] = '\n'.join(fk_w_defs) 4003 4004 # Extract version number and date from VERSION file 4005 info_lines = self.get_mg5_info_lines() 4006 replace_dict['info_lines'] = info_lines 4007 4008 # Extract process info lines 4009 process_lines = self.get_process_info_lines(matrix_element) 4010 replace_dict['process_lines'] = process_lines 4011 4012 # Set proc_id 4013 replace_dict['proc_id'] = proc_id 4014 4015 # Extract ncomb 4016 ncomb = matrix_element.get_helicity_combinations() 4017 replace_dict['ncomb'] = ncomb 4018 4019 # Extract helicity lines 4020 helicity_lines = self.get_helicity_lines(matrix_element) 4021 replace_dict['helicity_lines'] = helicity_lines 4022 4023 # Extract IC line 4024 ic_line = self.get_ic_line(matrix_element) 4025 replace_dict['ic_line'] = ic_line 4026 4027 # Extract overall denominator 4028 # Averaging initial state color, spin, and identical FS particles 4029 den_factor_line = self.get_den_factor_line(matrix_element) 4030 replace_dict['den_factor_line'] = den_factor_line 4031 4032 # Extract ngraphs 4033 ngraphs = matrix_element.get_number_of_amplitudes() 4034 replace_dict['ngraphs'] = ngraphs 4035 4036 # Extract ndiags 4037 ndiags = len(matrix_element.get('diagrams')) 4038 replace_dict['ndiags'] = ndiags 4039 4040 # Set define_iconfigs_lines 4041 replace_dict['define_iconfigs_lines'] = \ 4042 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 4043 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 4044 4045 if proc_id: 4046 # Set lines for subprocess group version 4047 # Set define_iconfigs_lines 4048 replace_dict['define_iconfigs_lines'] += \ 4049 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4050 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4051 # Set set_amp2_line 4052 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 4053 proc_id 4054 else: 4055 # Standard running 4056 # Set set_amp2_line 4057 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 4058 4059 # Extract nwavefuncs 4060 nwavefuncs = matrix_element.get_number_of_wavefunctions() 4061 replace_dict['nwavefuncs'] = nwavefuncs 4062 4063 # Extract ncolor 4064 ncolor = max(1, len(matrix_element.get('color_basis'))) 4065 replace_dict['ncolor'] = ncolor 4066 4067 # Extract color data lines 4068 color_data_lines = self.get_color_data_lines(matrix_element) 4069 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 4070 4071 4072 # Set the size of Wavefunction 4073 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 4074 replace_dict['wavefunctionsize'] = 18 4075 else: 4076 replace_dict['wavefunctionsize'] = 6 4077 4078 # Extract amp2 lines 4079 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 4080 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 4081 4082 # The JAMP definition depends on the splitting order 4083 split_orders=matrix_element.get('processes')[0].get('split_orders') 4084 if len(split_orders)>0: 4085 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 4086 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 4087 matrix_element.get('processes')[0],squared_orders) 4088 else: 4089 # Consider the output of a dummy order 'ALL_ORDERS' for which we 4090 # set all amplitude order to weight 1 and only one squared order 4091 # contribution which is of course ALL_ORDERS=2. 4092 squared_orders = [(2,),] 4093 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 4094 replace_dict['chosen_so_configs'] = '.TRUE.' 4095 4096 replace_dict['nAmpSplitOrders']=len(amp_orders) 4097 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 4098 replace_dict['split_order_str_list']=str(split_orders) 4099 replace_dict['nSplitOrders']=max(len(split_orders),1) 4100 amp_so = self.get_split_orders_lines( 4101 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 4102 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 4103 replace_dict['ampsplitorders']='\n'.join(amp_so) 4104 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 4105 4106 4107 # Extract JAMP lines 4108 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 4109 jamp_lines = self.get_JAMP_lines_split_order(\ 4110 matrix_element,amp_orders,split_order_names= 4111 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 4112 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 4113 4114 replace_dict['template_file'] = pjoin(_file_path, \ 4115 'iolibs/template_files/%s' % self.matrix_file) 4116 replace_dict['template_file2'] = pjoin(_file_path, \ 4117 'iolibs/template_files/split_orders_helping_functions.inc') 4118 if writer: 4119 file = open(replace_dict['template_file']).read() 4120 file = file % replace_dict 4121 # Add the split orders helper functions. 4122 file = file + '\n' + open(replace_dict['template_file2'])\ 4123 .read()%replace_dict 4124 # Write the file 4125 writer.writelines(file) 4126 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor 4127 else: 4128 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor) 4129 return replace_dict
4130 4131 #=========================================================================== 4132 # write_auto_dsig_file 4133 #===========================================================================
4134 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
4135 """Write the auto_dsig.f file for the differential cross section 4136 calculation, includes pdf call information""" 4137 4138 if not matrix_element.get('processes') or \ 4139 not matrix_element.get('diagrams'): 4140 return 0 4141 4142 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 4143 self.proc_characteristic['ninitial'] = ninitial 4144 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 4145 4146 # Add information relevant for MLM matching: 4147 # Maximum QCD power in all the contributions 4148 max_qcd_order = 0 4149 for diag in matrix_element.get('diagrams'): 4150 orders = diag.calculate_orders() 4151 if 'QCD' in orders: 4152 max_qcd_order = max(max_qcd_order,orders['QCD']) 4153 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 4154 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 4155 proc.get('model').get_particle(id).get('color')>1]) 4156 for proc in matrix_element.get('processes')) 4157 # Maximum number of final state light jets to be matched 4158 self.proc_characteristic['max_n_matched_jets'] = max( 4159 self.proc_characteristic['max_n_matched_jets'], 4160 min(max_qcd_order,max_n_light_final_partons)) 4161 4162 # List of default pdgs to be considered for the CKKWl merging cut 4163 self.proc_characteristic['colored_pdgs'] = \ 4164 sorted(list(set([abs(p.get('pdg_code')) for p in 4165 matrix_element.get('processes')[0].get('model').get('particles') if 4166 p.get('color')>1]))) 4167 4168 if ninitial < 1 or ninitial > 2: 4169 raise writers.FortranWriter.FortranWriterError, \ 4170 """Need ninitial = 1 or 2 to write auto_dsig file""" 4171 4172 replace_dict = {} 4173 4174 # Extract version number and date from VERSION file 4175 info_lines = self.get_mg5_info_lines() 4176 replace_dict['info_lines'] = info_lines 4177 4178 # Extract process info lines 4179 process_lines = self.get_process_info_lines(matrix_element) 4180 replace_dict['process_lines'] = process_lines 4181 4182 # Set proc_id 4183 replace_dict['proc_id'] = proc_id 4184 replace_dict['numproc'] = 1 4185 4186 # Set dsig_line 4187 if ninitial == 1: 4188 # No conversion, since result of decay should be given in GeV 4189 dsig_line = "pd(0)*dsiguu" 4190 else: 4191 # Convert result (in GeV) to pb 4192 dsig_line = "pd(0)*conv*dsiguu" 4193 4194 replace_dict['dsig_line'] = dsig_line 4195 4196 # Extract pdf lines 4197 pdf_vars, pdf_data, pdf_lines = \ 4198 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 4199 replace_dict['pdf_vars'] = pdf_vars 4200 replace_dict['pdf_data'] = pdf_data 4201 replace_dict['pdf_lines'] = pdf_lines 4202 4203 # Lines that differ between subprocess group and regular 4204 if proc_id: 4205 replace_dict['numproc'] = int(proc_id) 4206 replace_dict['passcuts_begin'] = "" 4207 replace_dict['passcuts_end'] = "" 4208 # Set lines for subprocess group version 4209 # Set define_iconfigs_lines 4210 replace_dict['define_subdiag_lines'] = \ 4211 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4212 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4213 replace_dict['cutsdone'] = "" 4214 else: 4215 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 4216 replace_dict['passcuts_end'] = "ENDIF" 4217 replace_dict['define_subdiag_lines'] = "" 4218 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 4219 4220 if not isinstance(self, ProcessExporterFortranMEGroup): 4221 ncomb=matrix_element.get_helicity_combinations() 4222 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4223 else: 4224 replace_dict['read_write_good_hel'] = "" 4225 4226 context = {'read_write_good_hel':True} 4227 4228 if writer: 4229 file = open(pjoin(_file_path, \ 4230 'iolibs/template_files/auto_dsig_v4.inc')).read() 4231 file = file % replace_dict 4232 4233 # Write the file 4234 writer.writelines(file, context=context) 4235 else: 4236 return replace_dict, context
4237 #=========================================================================== 4238 # write_coloramps_file 4239 #===========================================================================
4240 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
4241 """Write the coloramps.inc file for MadEvent""" 4242 4243 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 4244 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 4245 (max(len(matrix_element.get('color_basis').keys()), 1), 4246 len(mapconfigs))) 4247 4248 4249 # Write the file 4250 writer.writelines(lines) 4251 4252 return True
4253 4254 #=========================================================================== 4255 # write_colors_file 4256 #===========================================================================
4257 - def write_colors_file(self, writer, matrix_elements):
4258 """Write the get_color.f file for MadEvent, which returns color 4259 for all particles used in the matrix element.""" 4260 4261 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4262 matrix_elements = [matrix_elements] 4263 4264 model = matrix_elements[0].get('processes')[0].get('model') 4265 4266 # We need the both particle and antiparticle wf_ids, since the identity 4267 # depends on the direction of the wf. 4268 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4269 for wf in d.get('wavefunctions')],[]) \ 4270 for d in me.get('diagrams')], []) \ 4271 for me in matrix_elements], [])) 4272 4273 leg_ids = set(sum([sum([sum([[l.get('id'), 4274 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4275 for l in p.get_legs_with_decays()], []) \ 4276 for p in me.get('processes')], []) \ 4277 for me in matrix_elements], [])) 4278 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4279 4280 lines = """function get_color(ipdg) 4281 implicit none 4282 integer get_color, ipdg 4283 4284 if(ipdg.eq.%d)then 4285 get_color=%d 4286 return 4287 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4288 4289 for part_id in particle_ids[1:]: 4290 lines += """else if(ipdg.eq.%d)then 4291 get_color=%d 4292 return 4293 """ % (part_id, model.get_particle(part_id).get_color()) 4294 # Dummy particle for multiparticle vertices with pdg given by 4295 # first code not in the model 4296 lines += """else if(ipdg.eq.%d)then 4297 c This is dummy particle used in multiparticle vertices 4298 get_color=2 4299 return 4300 """ % model.get_first_non_pdg() 4301 lines += """else 4302 write(*,*)'Error: No color given for pdg ',ipdg 4303 get_color=0 4304 return 4305 endif 4306 end 4307 """ 4308 4309 # Write the file 4310 writer.writelines(lines) 4311 4312 return True
4313 4314 #=========================================================================== 4315 # write_config_nqcd_file 4316 #===========================================================================
4317 - def write_config_nqcd_file(self, writer, nqcd_list):
4318 """Write the config_nqcd.inc with the number of QCD couplings 4319 for each config""" 4320 4321 lines = [] 4322 for iconf, n in enumerate(nqcd_list): 4323 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4324 4325 # Write the file 4326 writer.writelines(lines) 4327 4328 return True
4329 4330 #=========================================================================== 4331 # write_maxconfigs_file 4332 #===========================================================================
4333 - def write_maxconfigs_file(self, writer, matrix_elements):
4334 """Write the maxconfigs.inc file for MadEvent""" 4335 4336 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4337 maxconfigs = max([me.get_num_configs() for me in \ 4338 matrix_elements.get('matrix_elements')]) 4339 else: 4340 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4341 4342 lines = "integer lmaxconfigs\n" 4343 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4344 4345 # Write the file 4346 writer.writelines(lines) 4347 4348 return True
4349 4350 #=========================================================================== 4351 # read_write_good_hel 4352 #===========================================================================
4353 - def read_write_good_hel(self, ncomb):
4354 """return the code to read/write the good_hel common_block""" 4355 4356 convert = {'ncomb' : ncomb} 4357 output = """ 4358 subroutine write_good_hel(stream_id) 4359 implicit none 4360 integer stream_id 4361 INTEGER NCOMB 4362 PARAMETER ( NCOMB=%(ncomb)d) 4363 LOGICAL GOODHEL(NCOMB) 4364 INTEGER NTRY 4365 common/BLOCK_GOODHEL/NTRY,GOODHEL 4366 write(stream_id,*) GOODHEL 4367 return 4368 end 4369 4370 4371 subroutine read_good_hel(stream_id) 4372 implicit none 4373 include 'genps.inc' 4374 integer stream_id 4375 INTEGER NCOMB 4376 PARAMETER ( NCOMB=%(ncomb)d) 4377 LOGICAL GOODHEL(NCOMB) 4378 INTEGER NTRY 4379 common/BLOCK_GOODHEL/NTRY,GOODHEL 4380 read(stream_id,*) GOODHEL 4381 NTRY = MAXTRIES + 1 4382 return 4383 end 4384 4385 subroutine init_good_hel() 4386 implicit none 4387 INTEGER NCOMB 4388 PARAMETER ( NCOMB=%(ncomb)d) 4389 LOGICAL GOODHEL(NCOMB) 4390 INTEGER NTRY 4391 INTEGER I 4392 4393 do i=1,NCOMB 4394 GOODHEL(I) = .false. 4395 enddo 4396 NTRY = 0 4397 end 4398 4399 integer function get_maxsproc() 4400 implicit none 4401 get_maxsproc = 1 4402 return 4403 end 4404 4405 """ % convert 4406 4407 return output
4408 4409 #=========================================================================== 4410 # write_config_subproc_map_file 4411 #===========================================================================
4412 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4413 """Write a dummy config_subproc.inc file for MadEvent""" 4414 4415 lines = [] 4416 4417 for iconfig in range(len(s_and_t_channels)): 4418 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4419 (iconfig + 1)) 4420 4421 # Write the file 4422 writer.writelines(lines) 4423 4424 return True
4425 4426 #=========================================================================== 4427 # write_configs_file 4428 #===========================================================================
4429 - def write_configs_file(self, writer, matrix_element):
4430 """Write the configs.inc file for MadEvent""" 4431 4432 # Extract number of external particles 4433 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4434 4435 model = matrix_element.get('processes')[0].get('model') 4436 configs = [(i+1, d) for (i, d) in \ 4437 enumerate(matrix_element.get('diagrams'))] 4438 mapconfigs = [c[0] for c in configs] 4439 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4440 [[c[1]] for c in configs], 4441 mapconfigs, 4442 nexternal, ninitial, 4443 model)
4444 4445 #=========================================================================== 4446 # write_run_configs_file 4447 #===========================================================================
4448 - def write_run_config_file(self, writer):
4449 """Write the run_configs.inc file for MadEvent""" 4450 4451 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4452 4453 if self.proc_characteristic['loop_induced']: 4454 job_per_chan = 1 4455 else: 4456 job_per_chan = 5 4457 4458 if writer: 4459 text = open(path).read() % {'chanperjob': job_per_chan} 4460 writer.write(text) 4461 return True 4462 else: 4463 return {'chanperjob': job_per_chan}
4464 4465 #=========================================================================== 4466 # write_configs_file_from_diagrams 4467 #===========================================================================
4468 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4469 nexternal, ninitial, model):
4470 """Write the actual configs.inc file. 4471 4472 configs is the diagrams corresponding to configs (each 4473 diagrams is a list of corresponding diagrams for all 4474 subprocesses, with None if there is no corresponding diagrams 4475 for a given process). 4476 mapconfigs gives the diagram number for each config. 4477 4478 For s-channels, we need to output one PDG for each subprocess in 4479 the subprocess group, in order to be able to pick the right 4480 one for multiprocesses.""" 4481 4482 lines = [] 4483 4484 s_and_t_channels = [] 4485 4486 nqcd_list = [] 4487 4488 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4489 for config in configs if [d for d in config if d][0].\ 4490 get_vertex_leg_numbers()!=[]] 4491 minvert = min(vert_list) if vert_list!=[] else 0 4492 4493 # Number of subprocesses 4494 nsubprocs = len(configs[0]) 4495 4496 nconfigs = 0 4497 4498 new_pdg = model.get_first_non_pdg() 4499 4500 for iconfig, helas_diags in enumerate(configs): 4501 if any([vert > minvert for vert in 4502 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4503 # Only 3-vertices allowed in configs.inc 4504 continue 4505 nconfigs += 1 4506 4507 # Need s- and t-channels for all subprocesses, including 4508 # those that don't contribute to this config 4509 empty_verts = [] 4510 stchannels = [] 4511 for h in helas_diags: 4512 if h: 4513 # get_s_and_t_channels gives vertices starting from 4514 # final state external particles and working inwards 4515 stchannels.append(h.get('amplitudes')[0].\ 4516 get_s_and_t_channels(ninitial, model, 4517 new_pdg)) 4518 else: 4519 stchannels.append((empty_verts, None)) 4520 4521 # For t-channels, just need the first non-empty one 4522 tchannels = [t for s,t in stchannels if t != None][0] 4523 4524 # For s_and_t_channels (to be used later) use only first config 4525 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4526 tchannels]) 4527 4528 # Make sure empty_verts is same length as real vertices 4529 if any([s for s,t in stchannels]): 4530 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4531 4532 # Reorganize s-channel vertices to get a list of all 4533 # subprocesses for each vertex 4534 schannels = zip(*[s for s,t in stchannels]) 4535 else: 4536 schannels = [] 4537 4538 allchannels = schannels 4539 if len(tchannels) > 1: 4540 # Write out tchannels only if there are any non-trivial ones 4541 allchannels = schannels + tchannels 4542 4543 # Write out propagators for s-channel and t-channel vertices 4544 4545 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4546 # Correspondance between the config and the diagram = amp2 4547 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4548 mapconfigs[iconfig])) 4549 # Number of QCD couplings in this diagram 4550 nqcd = 0 4551 for h in helas_diags: 4552 if h: 4553 try: 4554 nqcd = h.calculate_orders()['QCD'] 4555 except KeyError: 4556 pass 4557 break 4558 else: 4559 continue 4560 4561 nqcd_list.append(nqcd) 4562 4563 for verts in allchannels: 4564 if verts in schannels: 4565 vert = [v for v in verts if v][0] 4566 else: 4567 vert = verts 4568 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4569 last_leg = vert.get('legs')[-1] 4570 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4571 (last_leg.get('number'), nconfigs, len(daughters), 4572 ",".join([str(d) for d in daughters]))) 4573 if verts in schannels: 4574 pdgs = [] 4575 for v in verts: 4576 if v: 4577 pdgs.append(v.get('legs')[-1].get('id')) 4578 else: 4579 pdgs.append(0) 4580 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4581 (last_leg.get('number'), nconfigs, nsubprocs, 4582 ",".join([str(d) for d in pdgs]))) 4583 lines.append("data tprid(%d,%d)/0/" % \ 4584 (last_leg.get('number'), nconfigs)) 4585 elif verts in tchannels[:-1]: 4586 lines.append("data tprid(%d,%d)/%d/" % \ 4587 (last_leg.get('number'), nconfigs, 4588 abs(last_leg.get('id')))) 4589 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4590 (last_leg.get('number'), nconfigs, nsubprocs, 4591 ",".join(['0'] * nsubprocs))) 4592 4593 # Write out number of configs 4594 lines.append("# Number of configs") 4595 lines.append("data mapconfig(0)/%d/" % nconfigs) 4596 4597 # Write the file 4598 writer.writelines(lines) 4599 4600 return s_and_t_channels, nqcd_list
4601 4602 #=========================================================================== 4603 # write_decayBW_file 4604 #===========================================================================
4605 - def write_decayBW_file(self, writer, s_and_t_channels):
4606 """Write the decayBW.inc file for MadEvent""" 4607 4608 lines = [] 4609 4610 booldict = {None: "0", True: "1", False: "2"} 4611 4612 for iconf, config in enumerate(s_and_t_channels): 4613 schannels = config[0] 4614 for vertex in schannels: 4615 # For the resulting leg, pick out whether it comes from 4616 # decay or not, as given by the onshell flag 4617 leg = vertex.get('legs')[-1] 4618 lines.append("data gForceBW(%d,%d)/%s/" % \ 4619 (leg.get('number'), iconf + 1, 4620 booldict[leg.get('onshell')])) 4621 4622 # Write the file 4623 writer.writelines(lines) 4624 4625 return True
4626 4627 #=========================================================================== 4628 # write_dname_file 4629 #===========================================================================
4630 - def write_dname_file(self, writer, dir_name):
4631 """Write the dname.mg file for MG4""" 4632 4633 line = "DIRNAME=%s" % dir_name 4634 4635 # Write the file 4636 writer.write(line + "\n") 4637 4638 return True
4639 4640 #=========================================================================== 4641 # write_driver 4642 #===========================================================================
4643 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4644 """Write the SubProcess/driver.f file for MG4""" 4645 4646 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4647 4648 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4649 card = 'Source/MODEL/MG5_param.dat' 4650 else: 4651 card = 'param_card.dat' 4652 # Requiring each helicity configuration to be probed by 10 points for 4653 # matrix element before using the resulting grid for MC over helicity 4654 # sampling. 4655 # We multiply this by 2 because each grouped subprocess is called at most 4656 # twice for each IMIRROR. 4657 replace_dict = {'param_card_name':card, 4658 'ncomb':ncomb, 4659 'hel_init_points':n_grouped_proc*10*2} 4660 if not v5: 4661 replace_dict['secondparam']=',.true.' 4662 else: 4663 replace_dict['secondparam']='' 4664 4665 if writer: 4666 text = open(path).read() % replace_dict 4667 writer.write(text) 4668 return True 4669 else: 4670 return replace_dict
4671 4672 #=========================================================================== 4673 # write_addmothers 4674 #===========================================================================
4675 - def write_addmothers(self, writer):
4676 """Write the SubProcess/addmothers.f""" 4677 4678 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4679 4680 text = open(path).read() % {'iconfig': 'diag_number'} 4681 writer.write(text) 4682 4683 return True
4684 4685 4686 #=========================================================================== 4687 # write_combine_events 4688 #===========================================================================
4689 - def write_combine_events(self, writer, nb_proc=100):
4690 """Write the SubProcess/driver.f file for MG4""" 4691 4692 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4693 4694 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4695 card = 'Source/MODEL/MG5_param.dat' 4696 else: 4697 card = 'param_card.dat' 4698 4699 #set maxpup (number of @X in the process card) 4700 4701 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4702 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4703 writer.write(text) 4704 4705 return True
4706 4707 4708 #=========================================================================== 4709 # write_symmetry 4710 #===========================================================================
4711 - def write_symmetry(self, writer, v5=True):
4712 """Write the SubProcess/driver.f file for ME""" 4713 4714 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4715 4716 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4717 card = 'Source/MODEL/MG5_param.dat' 4718 else: 4719 card = 'param_card.dat' 4720 4721 if v5: 4722 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 4723 else: 4724 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 4725 4726 if writer: 4727 text = open(path).read() 4728 text = text % replace_dict 4729 writer.write(text) 4730 return True 4731 else: 4732 return replace_dict
4733 4734 4735 4736 #=========================================================================== 4737 # write_iproc_file 4738 #===========================================================================
4739 - def write_iproc_file(self, writer, me_number):
4740 """Write the iproc.dat file for MG4""" 4741 line = "%d" % (me_number + 1) 4742 4743 # Write the file 4744 for line_to_write in writer.write_line(line): 4745 writer.write(line_to_write) 4746 return True
4747 4748 #=========================================================================== 4749 # write_mg_sym_file 4750 #===========================================================================
4751 - def write_mg_sym_file(self, writer, matrix_element):
4752 """Write the mg.sym file for MadEvent.""" 4753 4754 lines = [] 4755 4756 # Extract process with all decays included 4757 final_legs = filter(lambda leg: leg.get('state') == True, 4758 matrix_element.get('processes')[0].get_legs_with_decays()) 4759 4760 ninitial = len(filter(lambda leg: leg.get('state') == False, 4761 matrix_element.get('processes')[0].get('legs'))) 4762 4763 identical_indices = {} 4764 4765 # Extract identical particle info 4766 for i, leg in enumerate(final_legs): 4767 if leg.get('id') in identical_indices: 4768 identical_indices[leg.get('id')].append(\ 4769 i + ninitial + 1) 4770 else: 4771 identical_indices[leg.get('id')] = [i + ninitial + 1] 4772 4773 # Remove keys which have only one particle 4774 for key in identical_indices.keys(): 4775 if len(identical_indices[key]) < 2: 4776 del identical_indices[key] 4777 4778 # Write mg.sym file 4779 lines.append(str(len(identical_indices.keys()))) 4780 for key in identical_indices.keys(): 4781 lines.append(str(len(identical_indices[key]))) 4782 for number in identical_indices[key]: 4783 lines.append(str(number)) 4784 4785 # Write the file 4786 writer.writelines(lines) 4787 4788 return True
4789 4790 #=========================================================================== 4791 # write_mg_sym_file 4792 #===========================================================================
4793 - def write_default_mg_sym_file(self, writer):
4794 """Write the mg.sym file for MadEvent.""" 4795 4796 lines = "0" 4797 4798 # Write the file 4799 writer.writelines(lines) 4800 4801 return True
4802 4803 #=========================================================================== 4804 # write_ncombs_file 4805 #===========================================================================
4806 - def write_ncombs_file(self, writer, nexternal):
4807 """Write the ncombs.inc file for MadEvent.""" 4808 4809 # ncomb (used for clustering) is 2^nexternal 4810 file = " integer n_max_cl\n" 4811 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4812 4813 # Write the file 4814 writer.writelines(file) 4815 4816 return True
4817 4818 #=========================================================================== 4819 # write_processes_file 4820 #===========================================================================
4821 - def write_processes_file(self, writer, subproc_group):
4822 """Write the processes.dat file with info about the subprocesses 4823 in this group.""" 4824 4825 lines = [] 4826 4827 for ime, me in \ 4828 enumerate(subproc_group.get('matrix_elements')): 4829 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4830 ",".join(p.base_string() for p in \ 4831 me.get('processes')))) 4832 if me.get('has_mirror_process'): 4833 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4834 for proc in mirror_procs: 4835 legs = copy.copy(proc.get('legs_with_decays')) 4836 legs.insert(0, legs.pop(1)) 4837 proc.set("legs_with_decays", legs) 4838 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4839 mirror_procs)) 4840 else: 4841 lines.append("mirror none") 4842 4843 # Write the file 4844 writer.write("\n".join(lines)) 4845 4846 return True
4847 4848 #=========================================================================== 4849 # write_symswap_file 4850 #===========================================================================
4851 - def write_symswap_file(self, writer, ident_perms):
4852 """Write the file symswap.inc for MG4 by comparing diagrams using 4853 the internal matrix element value functionality.""" 4854 4855 lines = [] 4856 4857 # Write out lines for symswap.inc file (used to permute the 4858 # external leg momenta 4859 for iperm, perm in enumerate(ident_perms): 4860 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4861 (iperm+1, ",".join([str(i+1) for i in perm]))) 4862 lines.append("data nsym/%d/" % len(ident_perms)) 4863 4864 # Write the file 4865 writer.writelines(lines) 4866 4867 return True
4868 4869 #=========================================================================== 4870 # write_symfact_file 4871 #===========================================================================
4872 - def write_symfact_file(self, writer, symmetry):
4873 """Write the files symfact.dat for MG4 by comparing diagrams using 4874 the internal matrix element value functionality.""" 4875 4876 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4877 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4878 # Write out lines for symswap.inc file (used to permute the 4879 # external leg momenta 4880 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4881 # Write the file 4882 writer.write('\n'.join(lines)) 4883 writer.write('\n') 4884 4885 return True
4886 4887 #=========================================================================== 4888 # write_symperms_file 4889 #===========================================================================
4890 - def write_symperms_file(self, writer, perms):
4891 """Write the symperms.inc file for subprocess group, used for 4892 symmetric configurations""" 4893 4894 lines = [] 4895 for iperm, perm in enumerate(perms): 4896 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4897 (iperm+1, ",".join([str(i+1) for i in perm]))) 4898 4899 # Write the file 4900 writer.writelines(lines) 4901 4902 return True
4903 4904 #=========================================================================== 4905 # write_subproc 4906 #===========================================================================
4907 - def write_subproc(self, writer, subprocdir):
4908 """Append this subprocess to the subproc.mg file for MG4""" 4909 4910 # Write line to file 4911 writer.write(subprocdir + "\n") 4912 4913 return True
4914
4915 #=============================================================================== 4916 # ProcessExporterFortranMEGroup 4917 #=============================================================================== 4918 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
4919 """Class to take care of exporting a set of matrix elements to 4920 MadEvent subprocess group format.""" 4921 4922 matrix_file = "matrix_madevent_group_v4.inc" 4923 grouped_mode = 'madevent' 4924 #=========================================================================== 4925 # generate_subprocess_directory 4926 #===========================================================================
4927 - def generate_subprocess_directory(self, subproc_group, 4928 fortran_model, 4929 group_number):
4930 """Generate the Pn directory for a subprocess group in MadEvent, 4931 including the necessary matrix_N.f files, configs.inc and various 4932 other helper files.""" 4933 4934 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 4935 "subproc_group object not SubProcessGroup" 4936 4937 if not self.model: 4938 self.model = subproc_group.get('matrix_elements')[0].\ 4939 get('processes')[0].get('model') 4940 4941 cwd = os.getcwd() 4942 path = pjoin(self.dir_path, 'SubProcesses') 4943 4944 os.chdir(path) 4945 pathdir = os.getcwd() 4946 4947 # Create the directory PN in the specified path 4948 subprocdir = "P%d_%s" % (subproc_group.get('number'), 4949 subproc_group.get('name')) 4950 try: 4951 os.mkdir(subprocdir) 4952 except os.error as error: 4953 logger.warning(error.strerror + " " + subprocdir) 4954 4955 try: 4956 os.chdir(subprocdir) 4957 except os.error: 4958 logger.error('Could not cd to directory %s' % subprocdir) 4959 return 0 4960 4961 logger.info('Creating files in directory %s' % subprocdir) 4962 4963 # Create the matrix.f files, auto_dsig.f files and all inc files 4964 # for all subprocesses in the group 4965 4966 maxamps = 0 4967 maxflows = 0 4968 tot_calls = 0 4969 4970 matrix_elements = subproc_group.get('matrix_elements') 4971 4972 # Add the driver.f, all grouped ME's must share the same number of 4973 # helicity configuration 4974 ncomb = matrix_elements[0].get_helicity_combinations() 4975 for me in matrix_elements[1:]: 4976 if ncomb!=me.get_helicity_combinations(): 4977 raise MadGraph5Error, "All grouped processes must share the "+\ 4978 "same number of helicity configurations." 4979 4980 filename = 'driver.f' 4981 self.write_driver(writers.FortranWriter(filename),ncomb, 4982 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 4983 4984 for ime, matrix_element in \ 4985 enumerate(matrix_elements): 4986 filename = 'matrix%d.f' % (ime+1) 4987 calls, ncolor = \ 4988 self.write_matrix_element_v4(writers.FortranWriter(filename), 4989 matrix_element, 4990 fortran_model, 4991 proc_id=str(ime+1), 4992 config_map=subproc_group.get('diagram_maps')[ime], 4993 subproc_number=group_number) 4994 4995 filename = 'auto_dsig%d.f' % (ime+1) 4996 self.write_auto_dsig_file(writers.FortranWriter(filename), 4997 matrix_element, 4998 str(ime+1)) 4999 5000 # Keep track of needed quantities 5001 tot_calls += int(calls) 5002 maxflows = max(maxflows, ncolor) 5003 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 5004 5005 # Draw diagrams 5006 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 5007 filename = "matrix%d.ps" % (ime+1) 5008 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 5009 get('diagrams'), 5010 filename, 5011 model = \ 5012 matrix_element.get('processes')[0].\ 5013 get('model'), 5014 amplitude=True) 5015 logger.info("Generating Feynman diagrams for " + \ 5016 matrix_element.get('processes')[0].nice_string()) 5017 plot.draw() 5018 5019 # Extract number of external particles 5020 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 5021 5022 # Generate a list of diagrams corresponding to each configuration 5023 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 5024 # If a subprocess has no diagrams for this config, the number is 0 5025 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 5026 5027 filename = 'auto_dsig.f' 5028 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 5029 subproc_group) 5030 5031 filename = 'coloramps.inc' 5032 self.write_coloramps_file(writers.FortranWriter(filename), 5033 subproc_diagrams_for_config, 5034 maxflows, 5035 matrix_elements) 5036 5037 filename = 'get_color.f' 5038 self.write_colors_file(writers.FortranWriter(filename), 5039 matrix_elements) 5040 5041 filename = 'config_subproc_map.inc' 5042 self.write_config_subproc_map_file(writers.FortranWriter(filename), 5043 subproc_diagrams_for_config) 5044 5045 filename = 'configs.inc' 5046 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 5047 writers.FortranWriter(filename), 5048 subproc_group, 5049 subproc_diagrams_for_config) 5050 5051 filename = 'config_nqcd.inc' 5052 self.write_config_nqcd_file(writers.FortranWriter(filename), 5053 nqcd_list) 5054 5055 filename = 'decayBW.inc' 5056 self.write_decayBW_file(writers.FortranWriter(filename), 5057 s_and_t_channels) 5058 5059 filename = 'dname.mg' 5060 self.write_dname_file(writers.FortranWriter(filename), 5061 subprocdir) 5062 5063 filename = 'iproc.dat' 5064 self.write_iproc_file(writers.FortranWriter(filename), 5065 group_number) 5066 5067 filename = 'leshouche.inc' 5068 self.write_leshouche_file(writers.FortranWriter(filename), 5069 subproc_group) 5070 5071 filename = 'maxamps.inc' 5072 self.write_maxamps_file(writers.FortranWriter(filename), 5073 maxamps, 5074 maxflows, 5075 max([len(me.get('processes')) for me in \ 5076 matrix_elements]), 5077 len(matrix_elements)) 5078 5079 # Note that mg.sym is not relevant for this case 5080 filename = 'mg.sym' 5081 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 5082 5083 filename = 'mirrorprocs.inc' 5084 self.write_mirrorprocs(writers.FortranWriter(filename), 5085 subproc_group) 5086 5087 filename = 'ncombs.inc' 5088 self.write_ncombs_file(writers.FortranWriter(filename), 5089 nexternal) 5090 5091 filename = 'nexternal.inc' 5092 self.write_nexternal_file(writers.FortranWriter(filename), 5093 nexternal, ninitial) 5094 5095 filename = 'ngraphs.inc' 5096 self.write_ngraphs_file(writers.FortranWriter(filename), 5097 nconfigs) 5098 5099 filename = 'pmass.inc' 5100 self.write_pmass_file(writers.FortranWriter(filename), 5101 matrix_element) 5102 5103 filename = 'props.inc' 5104 self.write_props_file(writers.FortranWriter(filename), 5105 matrix_element, 5106 s_and_t_channels) 5107 5108 filename = 'processes.dat' 5109 files.write_to_file(filename, 5110 self.write_processes_file, 5111 subproc_group) 5112 5113 # Find config symmetries and permutations 5114 symmetry, perms, ident_perms = \ 5115 diagram_symmetry.find_symmetry(subproc_group) 5116 5117 filename = 'symswap.inc' 5118 self.write_symswap_file(writers.FortranWriter(filename), 5119 ident_perms) 5120 5121 filename = 'symfact_orig.dat' 5122 self.write_symfact_file(open(filename, 'w'), symmetry) 5123 5124 filename = 'symperms.inc' 5125 self.write_symperms_file(writers.FortranWriter(filename), 5126 perms) 5127 5128 # Generate jpgs -> pass in make_html 5129 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 5130 5131 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 5132 5133 #import nexternal/leshouch in Source 5134 ln('nexternal.inc', '../../Source', log=False) 5135 ln('leshouche.inc', '../../Source', log=False) 5136 ln('maxamps.inc', '../../Source', log=False) 5137 5138 # Return to SubProcesses dir) 5139 os.chdir(pathdir) 5140 5141 # Add subprocess to subproc.mg 5142 filename = 'subproc.mg' 5143 files.append_to_file(filename, 5144 self.write_subproc, 5145 subprocdir) 5146 5147 # Return to original dir 5148 os.chdir(cwd) 5149 5150 if not tot_calls: 5151 tot_calls = 0 5152 return tot_calls
5153 5154 #=========================================================================== 5155 # write_super_auto_dsig_file 5156 #===========================================================================
5157 - def write_super_auto_dsig_file(self, writer, subproc_group):
5158 """Write the auto_dsig.f file selecting between the subprocesses 5159 in subprocess group mode""" 5160 5161 replace_dict = {} 5162 5163 # Extract version number and date from VERSION file 5164 info_lines = self.get_mg5_info_lines() 5165 replace_dict['info_lines'] = info_lines 5166 5167 matrix_elements = subproc_group.get('matrix_elements') 5168 5169 # Extract process info lines 5170 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 5171 matrix_elements]) 5172 replace_dict['process_lines'] = process_lines 5173 5174 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 5175 replace_dict['nexternal'] = nexternal 5176 5177 replace_dict['nsprocs'] = 2*len(matrix_elements) 5178 5179 # Generate dsig definition line 5180 dsig_def_line = "DOUBLE PRECISION " + \ 5181 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 5182 range(len(matrix_elements))]) 5183 replace_dict["dsig_def_line"] = dsig_def_line 5184 5185 # Generate dsig process lines 5186 call_dsig_proc_lines = [] 5187 for iproc in range(len(matrix_elements)): 5188 call_dsig_proc_lines.append(\ 5189 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 5190 {"num": iproc + 1, 5191 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 5192 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 5193 5194 ncomb=matrix_elements[0].get_helicity_combinations() 5195 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 5196 5197 if writer: 5198 file = open(pjoin(_file_path, \ 5199 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 5200 file = file % replace_dict 5201 5202 # Write the file 5203 writer.writelines(file) 5204 else: 5205 return replace_dict
5206 5207 #=========================================================================== 5208 # write_mirrorprocs 5209 #===========================================================================
5210 - def write_mirrorprocs(self, writer, subproc_group):
5211 """Write the mirrorprocs.inc file determining which processes have 5212 IS mirror process in subprocess group mode.""" 5213 5214 lines = [] 5215 bool_dict = {True: '.true.', False: '.false.'} 5216 matrix_elements = subproc_group.get('matrix_elements') 5217 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 5218 (len(matrix_elements), 5219 ",".join([bool_dict[me.get('has_mirror_process')] for \ 5220 me in matrix_elements]))) 5221 # Write the file 5222 writer.writelines(lines)
5223 5224 #=========================================================================== 5225 # write_addmothers 5226 #===========================================================================
5227 - def write_addmothers(self, writer):
5228 """Write the SubProcess/addmothers.f""" 5229 5230 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5231 5232 text = open(path).read() % {'iconfig': 'lconfig'} 5233 writer.write(text) 5234 5235 return True
5236 5237 5238 #=========================================================================== 5239 # write_coloramps_file 5240 #===========================================================================
5241 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 5242 matrix_elements):
5243 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 5244 5245 # Create a map from subprocess (matrix element) to a list of 5246 # the diagrams corresponding to each config 5247 5248 lines = [] 5249 5250 subproc_to_confdiag = {} 5251 for config in diagrams_for_config: 5252 for subproc, diag in enumerate(config): 5253 try: 5254 subproc_to_confdiag[subproc].append(diag) 5255 except KeyError: 5256 subproc_to_confdiag[subproc] = [diag] 5257 5258 for subproc in sorted(subproc_to_confdiag.keys()): 5259 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5260 matrix_elements[subproc], 5261 subproc + 1)) 5262 5263 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5264 (maxflows, 5265 len(diagrams_for_config), 5266 len(matrix_elements))) 5267 5268 # Write the file 5269 writer.writelines(lines) 5270 5271 return True
5272 5273 #=========================================================================== 5274 # write_config_subproc_map_file 5275 #===========================================================================
5276 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5277 """Write the config_subproc_map.inc file for subprocess groups""" 5278 5279 lines = [] 5280 # Output only configs that have some corresponding diagrams 5281 iconfig = 0 5282 for config in config_subproc_map: 5283 if set(config) == set([0]): 5284 continue 5285 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 5286 (iconfig + 1, len(config), 5287 ",".join([str(i) for i in config]))) 5288 iconfig += 1 5289 # Write the file 5290 writer.writelines(lines) 5291 5292 return True
5293 5294 #=========================================================================== 5295 # read_write_good_hel 5296 #===========================================================================
5297 - def read_write_good_hel(self, ncomb):
5298 """return the code to read/write the good_hel common_block""" 5299 5300 convert = {'ncomb' : ncomb} 5301 5302 output = """ 5303 subroutine write_good_hel(stream_id) 5304 implicit none 5305 integer stream_id 5306 INTEGER NCOMB 5307 PARAMETER ( NCOMB=%(ncomb)d) 5308 LOGICAL GOODHEL(NCOMB, 2) 5309 INTEGER NTRY(2) 5310 common/BLOCK_GOODHEL/NTRY,GOODHEL 5311 write(stream_id,*) GOODHEL 5312 return 5313 end 5314 5315 5316 subroutine read_good_hel(stream_id) 5317 implicit none 5318 include 'genps.inc' 5319 integer stream_id 5320 INTEGER NCOMB 5321 PARAMETER ( NCOMB=%(ncomb)d) 5322 LOGICAL GOODHEL(NCOMB, 2) 5323 INTEGER NTRY(2) 5324 common/BLOCK_GOODHEL/NTRY,GOODHEL 5325 read(stream_id,*) GOODHEL 5326 NTRY(1) = MAXTRIES + 1 5327 NTRY(2) = MAXTRIES + 1 5328 return 5329 end 5330 5331 subroutine init_good_hel() 5332 implicit none 5333 INTEGER NCOMB 5334 PARAMETER ( NCOMB=%(ncomb)d) 5335 LOGICAL GOODHEL(NCOMB, 2) 5336 INTEGER NTRY(2) 5337 INTEGER I 5338 5339 do i=1,NCOMB 5340 GOODHEL(I,1) = .false. 5341 GOODHEL(I,2) = .false. 5342 enddo 5343 NTRY(1) = 0 5344 NTRY(2) = 0 5345 end 5346 5347 integer function get_maxsproc() 5348 implicit none 5349 include 'maxamps.inc' 5350 5351 get_maxsproc = maxsproc 5352 return 5353 end 5354 5355 """ % convert 5356 5357 return output
5358 5359 5360 5361 #=========================================================================== 5362 # write_configs_file 5363 #===========================================================================
5364 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
5365 """Write the configs.inc file with topology information for a 5366 subprocess group. Use the first subprocess with a diagram for each 5367 configuration.""" 5368 5369 matrix_elements = subproc_group.get('matrix_elements') 5370 model = matrix_elements[0].get('processes')[0].get('model') 5371 5372 diagrams = [] 5373 config_numbers = [] 5374 for iconfig, config in enumerate(diagrams_for_config): 5375 # Check if any diagrams correspond to this config 5376 if set(config) == set([0]): 5377 continue 5378 subproc_diags = [] 5379 for s,d in enumerate(config): 5380 if d: 5381 subproc_diags.append(matrix_elements[s].\ 5382 get('diagrams')[d-1]) 5383 else: 5384 subproc_diags.append(None) 5385 diagrams.append(subproc_diags) 5386 config_numbers.append(iconfig + 1) 5387 5388 # Extract number of external particles 5389 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 5390 5391 return len(diagrams), \ 5392 self.write_configs_file_from_diagrams(writer, diagrams, 5393 config_numbers, 5394 nexternal, ninitial, 5395 model)
5396 5397 #=========================================================================== 5398 # write_run_configs_file 5399 #===========================================================================
5400 - def write_run_config_file(self, writer):
5401 """Write the run_configs.inc file for MadEvent""" 5402 5403 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 5404 if self.proc_characteristic['loop_induced']: 5405 job_per_chan = 1 5406 else: 5407 job_per_chan = 2 5408 text = open(path).read() % {'chanperjob':job_per_chan} 5409 writer.write(text) 5410 return True
5411 5412 5413 #=========================================================================== 5414 # write_leshouche_file 5415 #===========================================================================
5416 - def write_leshouche_file(self, writer, subproc_group):
5417 """Write the leshouche.inc file for MG4""" 5418 5419 all_lines = [] 5420 5421 for iproc, matrix_element in \ 5422 enumerate(subproc_group.get('matrix_elements')): 5423 all_lines.extend(self.get_leshouche_lines(matrix_element, 5424 iproc)) 5425 # Write the file 5426 writer.writelines(all_lines) 5427 return True
5428 5429
5430 - def finalize(self,*args, **opts):
5431 5432 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 5433 #ensure that the grouping information is on the correct value 5434 self.proc_characteristic['grouped_matrix'] = True
5435 5436 5437 #=============================================================================== 5438 # UFO_model_to_mg4 5439 #=============================================================================== 5440 5441 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
5442 5443 -class UFO_model_to_mg4(object):
5444 """ A converter of the UFO-MG5 Model to the MG4 format """ 5445 5446 # The list below shows the only variables the user is allowed to change by 5447 # himself for each PS point. If he changes any other, then calling 5448 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 5449 # correctly account for the change. 5450 PS_dependent_key = ['aS','MU_R'] 5451 mp_complex_format = 'complex*32' 5452 mp_real_format = 'real*16' 5453 # Warning, it is crucial none of the couplings/parameters of the model 5454 # starts with this prefix. I should add a check for this. 5455 # You can change it as the global variable to check_param_card.ParamCard 5456 mp_prefix = check_param_card.ParamCard.mp_prefix 5457
5458 - def __init__(self, model, output_path, opt=None):
5459 """ initialization of the objects """ 5460 5461 self.model = model 5462 self.model_name = model['name'] 5463 self.dir_path = output_path 5464 5465 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5466 'loop_induced': False} 5467 if opt: 5468 self.opt.update(opt) 5469 5470 self.coups_dep = [] # (name, expression, type) 5471 self.coups_indep = [] # (name, expression, type) 5472 self.params_dep = [] # (name, expression, type) 5473 self.params_indep = [] # (name, expression, type) 5474 self.params_ext = [] # external parameter 5475 self.p_to_f = parsers.UFOExpressionParserFortran(self.model) 5476 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran(self.model)
5477
5479 """modify the parameter if some of them are identical up to the case""" 5480 5481 lower_dict={} 5482 duplicate = set() 5483 keys = self.model['parameters'].keys() 5484 for key in keys: 5485 for param in self.model['parameters'][key]: 5486 lower_name = param.name.lower() 5487 if not lower_name: 5488 continue 5489 try: 5490 lower_dict[lower_name].append(param) 5491 except KeyError,error: 5492 lower_dict[lower_name] = [param] 5493 else: 5494 duplicate.add(lower_name) 5495 logger.debug('%s is define both as lower case and upper case.' 5496 % lower_name) 5497 if not duplicate: 5498 return 5499 5500 re_expr = r'''\b(%s)\b''' 5501 to_change = [] 5502 change={} 5503 for value in duplicate: 5504 for i, var in enumerate(lower_dict[value]): 5505 to_change.append(var.name) 5506 new_name = '%s%s' % (var.name.lower(), 5507 ('__%d'%(i+1) if i>0 else '')) 5508 change[var.name] = new_name 5509 var.name = new_name 5510 5511 # Apply the modification to the map_CTcoup_CTparam of the model 5512 # if it has one (giving for each coupling the CT parameters whcih 5513 # are necessary and which should be exported to the model. 5514 if hasattr(self.model,'map_CTcoup_CTparam'): 5515 for coup, ctparams in self.model.map_CTcoup_CTparam: 5516 for i, ctparam in enumerate(ctparams): 5517 try: 5518 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5519 except KeyError: 5520 pass 5521 5522 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5523 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5524 5525 # change parameters 5526 for key in keys: 5527 if key == ('external',): 5528 continue 5529 for param in self.model['parameters'][key]: 5530 param.expr = rep_pattern.sub(replace, param.expr) 5531 5532 # change couplings 5533 for key in self.model['couplings'].keys(): 5534 for coup in self.model['couplings'][key]: 5535 coup.expr = rep_pattern.sub(replace, coup.expr) 5536 5537 # change mass/width 5538 for part in self.model['particles']: 5539 if str(part.get('mass')) in to_change: 5540 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5541 if str(part.get('width')) in to_change: 5542 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5543
5544 - def refactorize(self, wanted_couplings = []):
5545 """modify the couplings to fit with MG4 convention """ 5546 5547 # Keep only separation in alphaS 5548 keys = self.model['parameters'].keys() 5549 keys.sort(key=len) 5550 for key in keys: 5551 to_add = [o for o in self.model['parameters'][key] if o.name] 5552 5553 if key == ('external',): 5554 self.params_ext += to_add 5555 elif any([(k in key) for k in self.PS_dependent_key]): 5556 self.params_dep += to_add 5557 else: 5558 self.params_indep += to_add 5559 # same for couplings 5560 keys = self.model['couplings'].keys() 5561 keys.sort(key=len) 5562 for key, coup_list in self.model['couplings'].items(): 5563 if any([(k in key) for k in self.PS_dependent_key]): 5564 self.coups_dep += [c for c in coup_list if 5565 (not wanted_couplings or c.name in \ 5566 wanted_couplings)] 5567 else: 5568 self.coups_indep += [c for c in coup_list if 5569 (not wanted_couplings or c.name in \ 5570 wanted_couplings)] 5571 5572 # MG4 use G and not aS as it basic object for alphas related computation 5573 #Pass G in the independant list 5574 if 'G' in self.params_dep: 5575 index = self.params_dep.index('G') 5576 G = self.params_dep.pop(index) 5577 # G.expr = '2*cmath.sqrt(as*pi)' 5578 # self.params_indep.insert(0, self.params_dep.pop(index)) 5579 # No need to add it if not defined 5580 5581 if 'aS' not in self.params_ext: 5582 logger.critical('aS not define as external parameter adding it!') 5583 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5584 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5585 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5586 - def build(self, wanted_couplings = [], full=True):
5587 """modify the couplings to fit with MG4 convention and creates all the 5588 different files""" 5589 5590 self.pass_parameter_to_case_insensitive() 5591 self.refactorize(wanted_couplings) 5592 5593 # write the files 5594 if full: 5595 if wanted_couplings: 5596 # extract the wanted ct parameters 5597 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5598 self.write_all()
5599 5600
5601 - def open(self, name, comment='c', format='default'):
5602 """ Open the file name in the correct directory and with a valid 5603 header.""" 5604 5605 file_path = pjoin(self.dir_path, name) 5606 5607 if format == 'fortran': 5608 fsock = writers.FortranWriter(file_path, 'w') 5609 else: 5610 fsock = open(file_path, 'w') 5611 5612 file.writelines(fsock, comment * 77 + '\n') 5613 file.writelines(fsock,'%(comment)s written by the UFO converter\n' % \ 5614 {'comment': comment + (6 - len(comment)) * ' '}) 5615 file.writelines(fsock, comment * 77 + '\n\n') 5616 return fsock
5617 5618
5619 - def write_all(self):
5620 """ write all the files """ 5621 #write the part related to the external parameter 5622 self.create_ident_card() 5623 self.create_param_read() 5624 5625 #write the definition of the parameter 5626 self.create_input() 5627 self.create_intparam_def(dp=True,mp=False) 5628 if self.opt['mp']: 5629 self.create_intparam_def(dp=False,mp=True) 5630 5631 # definition of the coupling. 5632 self.create_actualize_mp_ext_param_inc() 5633 self.create_coupl_inc() 5634 self.create_write_couplings() 5635 self.create_couplings() 5636 5637 # the makefile 5638 self.create_makeinc() 5639 self.create_param_write() 5640 5641 # The model functions 5642 self.create_model_functions_inc() 5643 self.create_model_functions_def() 5644 5645 # The param_card.dat 5646 self.create_param_card() 5647 5648 5649 # All the standard files 5650 self.copy_standard_file()
5651 5652 ############################################################################ 5653 ## ROUTINE CREATING THE FILES ############################################ 5654 ############################################################################ 5655
5656 - def copy_standard_file(self):
5657 """Copy the standard files for the fortran model.""" 5658 5659 #copy the library files 5660 file_to_link = ['formats.inc','printout.f', \ 5661 'rw_para.f', 'testprog.f'] 5662 5663 for filename in file_to_link: 5664 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5665 self.dir_path) 5666 5667 file = open(os.path.join(MG5DIR,\ 5668 'models/template_files/fortran/rw_para.f')).read() 5669 5670 includes=["include \'coupl.inc\'","include \'input.inc\'", 5671 "include \'model_functions.inc\'"] 5672 if self.opt['mp']: 5673 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5674 # In standalone and madloop we do no use the compiled param card but 5675 # still parse the .dat one so we must load it. 5676 if self.opt['loop_induced']: 5677 #loop induced follow MadEvent way to handle the card. 5678 load_card = '' 5679 lha_read_filename='lha_read.f' 5680 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5681 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5682 lha_read_filename='lha_read_mp.f' 5683 elif self.opt['export_format'].startswith('standalone') \ 5684 or self.opt['export_format'] in ['madweight', 'plugin']\ 5685 or self.opt['export_format'].startswith('matchbox'): 5686 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5687 lha_read_filename='lha_read.f' 5688 else: 5689 load_card = '' 5690 lha_read_filename='lha_read.f' 5691 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5692 os.path.join(self.dir_path,'lha_read.f')) 5693 5694 file=file%{'includes':'\n '.join(includes), 5695 'load_card':load_card} 5696 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5697 writer.writelines(file) 5698 writer.close() 5699 5700 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5701 or self.opt['loop_induced']: 5702 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5703 self.dir_path + '/makefile') 5704 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5705 path = pjoin(self.dir_path, 'makefile') 5706 text = open(path).read() 5707 text = text.replace('madevent','aMCatNLO') 5708 open(path, 'w').writelines(text) 5709 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5710 'madloop','madloop_optimized', 'standalone_rw', 5711 'madweight','matchbox','madloop_matchbox', 'plugin']: 5712 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5713 self.dir_path + '/makefile') 5714 #elif self.opt['export_format'] in []: 5715 #pass 5716 else: 5717 raise MadGraph5Error('Unknown format')
5718
5719 - def create_coupl_inc(self):
5720 """ write coupling.inc """ 5721 5722 fsock = self.open('coupl.inc', format='fortran') 5723 if self.opt['mp']: 5724 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5725 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5726 format='fortran') 5727 5728 # Write header 5729 header = """double precision G 5730 common/strong/ G 5731 5732 double complex gal(2) 5733 common/weak/ gal 5734 5735 double precision MU_R 5736 common/rscale/ MU_R 5737 5738 double precision Nf 5739 parameter(Nf=%d) 5740 """ % self.model.get_nflav() 5741 5742 fsock.writelines(header) 5743 5744 if self.opt['mp']: 5745 header = """%(real_mp_format)s %(mp_prefix)sG 5746 common/MP_strong/ %(mp_prefix)sG 5747 5748 %(complex_mp_format)s %(mp_prefix)sgal(2) 5749 common/MP_weak/ %(mp_prefix)sgal 5750 5751 %(complex_mp_format)s %(mp_prefix)sMU_R 5752 common/MP_rscale/ %(mp_prefix)sMU_R 5753 5754 """ 5755 5756 5757 5758 5759 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5760 'complex_mp_format':self.mp_complex_format, 5761 'mp_prefix':self.mp_prefix}) 5762 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5763 'complex_mp_format':self.mp_complex_format, 5764 'mp_prefix':''}) 5765 5766 # Write the Mass definition/ common block 5767 masses = set() 5768 widths = set() 5769 if self.opt['complex_mass']: 5770 complex_mass = set() 5771 5772 for particle in self.model.get('particles'): 5773 #find masses 5774 one_mass = particle.get('mass') 5775 if one_mass.lower() != 'zero': 5776 masses.add(one_mass) 5777 5778 # find width 5779 one_width = particle.get('width') 5780 if one_width.lower() != 'zero': 5781 widths.add(one_width) 5782 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5783 complex_mass.add('CMASS_%s' % one_mass) 5784 5785 if masses: 5786 fsock.writelines('double precision '+','.join(masses)+'\n') 5787 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5788 if self.opt['mp']: 5789 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5790 ','.join(masses)+'\n') 5791 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5792 ','.join(masses)+'\n\n') 5793 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5794 self.mp_prefix+m for m in masses])+'\n') 5795 mp_fsock.writelines('common/MP_masses/ '+\ 5796 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5797 5798 if widths: 5799 fsock.writelines('double precision '+','.join(widths)+'\n') 5800 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5801 if self.opt['mp']: 5802 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5803 ','.join(widths)+'\n') 5804 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5805 ','.join(widths)+'\n\n') 5806 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5807 self.mp_prefix+w for w in widths])+'\n') 5808 mp_fsock.writelines('common/MP_widths/ '+\ 5809 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5810 5811 # Write the Couplings 5812 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5813 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5814 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5815 if self.opt['mp']: 5816 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5817 ','.join(coupling_list)+'\n') 5818 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5819 ','.join(coupling_list)+'\n\n') 5820 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5821 self.mp_prefix+c for c in coupling_list])+'\n') 5822 mp_fsock.writelines('common/MP_couplings/ '+\ 5823 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5824 5825 # Write complex mass for complex mass scheme (if activated) 5826 if self.opt['complex_mass'] and complex_mass: 5827 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5828 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5829 if self.opt['mp']: 5830 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5831 ','.join(complex_mass)+'\n') 5832 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5833 ','.join(complex_mass)+'\n\n') 5834 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5835 self.mp_prefix+cm for cm in complex_mass])+'\n') 5836 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5837 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5838
5839 - def create_write_couplings(self):
5840 """ write the file coupl_write.inc """ 5841 5842 fsock = self.open('coupl_write.inc', format='fortran') 5843 5844 fsock.writelines("""write(*,*) ' Couplings of %s' 5845 write(*,*) ' ---------------------------------' 5846 write(*,*) ' '""" % self.model_name) 5847 def format(coupl): 5848 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5849 5850 # Write the Couplings 5851 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5852 fsock.writelines('\n'.join(lines)) 5853 5854
5855 - def create_input(self):
5856 """create input.inc containing the definition of the parameters""" 5857 5858 fsock = self.open('input.inc', format='fortran') 5859 if self.opt['mp']: 5860 mp_fsock = self.open('mp_input.inc', format='fortran') 5861 5862 #find mass/ width since they are already define 5863 already_def = set() 5864 for particle in self.model.get('particles'): 5865 already_def.add(particle.get('mass').lower()) 5866 already_def.add(particle.get('width').lower()) 5867 if self.opt['complex_mass']: 5868 already_def.add('cmass_%s' % particle.get('mass').lower()) 5869 5870 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5871 name.lower() not in already_def 5872 5873 real_parameters = [param.name for param in self.params_dep + 5874 self.params_indep if param.type == 'real' 5875 and is_valid(param.name)] 5876 5877 real_parameters += [param.name for param in self.params_ext 5878 if param.type == 'real'and 5879 is_valid(param.name)] 5880 5881 # check the parameter is a CT parameter or not 5882 # if yes, just use the needed ones 5883 real_parameters = [param for param in real_parameters \ 5884 if self.check_needed_param(param)] 5885 5886 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 5887 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 5888 if self.opt['mp']: 5889 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5890 self.mp_prefix+p for p in real_parameters])+'\n') 5891 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 5892 self.mp_prefix+p for p in real_parameters])+'\n\n') 5893 5894 complex_parameters = [param.name for param in self.params_dep + 5895 self.params_indep if param.type == 'complex' and 5896 is_valid(param.name)] 5897 5898 # check the parameter is a CT parameter or not 5899 # if yes, just use the needed ones 5900 complex_parameters = [param for param in complex_parameters \ 5901 if self.check_needed_param(param)] 5902 5903 if complex_parameters: 5904 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 5905 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 5906 if self.opt['mp']: 5907 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5908 self.mp_prefix+p for p in complex_parameters])+'\n') 5909 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 5910 self.mp_prefix+p for p in complex_parameters])+'\n\n')
5911
5912 - def check_needed_param(self, param):
5913 """ Returns whether the parameter in argument is needed for this 5914 specific computation or not.""" 5915 5916 # If this is a leading order model or if there was no CT parameter 5917 # employed in this NLO model, one can directly return that the 5918 # parameter is needed since only CTParameters are filtered. 5919 if not hasattr(self, 'allCTparameters') or \ 5920 self.allCTparameters is None or self.usedCTparameters is None or \ 5921 len(self.allCTparameters)==0: 5922 return True 5923 5924 # We must allow the conjugate shorthand for the complex parameter as 5925 # well so we check wether either the parameter name or its name with 5926 # 'conjg__' substituted with '' is present in the list. 5927 # This is acceptable even if some parameter had an original name 5928 # including 'conjg__' in it, because at worst we export a parameter 5929 # was not needed. 5930 param = param.lower() 5931 cjg_param = param.replace('conjg__','',1) 5932 5933 # First make sure it is a CTparameter 5934 if param not in self.allCTparameters and \ 5935 cjg_param not in self.allCTparameters: 5936 return True 5937 5938 # Now check if it is in the list of CTparameters actually used 5939 return (param in self.usedCTparameters or \ 5940 cjg_param in self.usedCTparameters)
5941
5942 - def extract_needed_CTparam(self,wanted_couplings=[]):
5943 """ Extract what are the needed CT parameters given the wanted_couplings""" 5944 5945 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 5946 # Setting these lists to none wil disable the filtering in 5947 # check_needed_param 5948 self.allCTparameters = None 5949 self.usedCTparameters = None 5950 return 5951 5952 # All CTparameters appearin in all CT couplings 5953 allCTparameters=self.model.map_CTcoup_CTparam.values() 5954 # Define in this class the list of all CT parameters 5955 self.allCTparameters=list(\ 5956 set(itertools.chain.from_iterable(allCTparameters))) 5957 5958 # All used CT couplings 5959 w_coupls = [coupl.lower() for coupl in wanted_couplings] 5960 allUsedCTCouplings = [coupl for coupl in 5961 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 5962 5963 # Now define the list of all CT parameters that are actually used 5964 self.usedCTparameters=list(\ 5965 set(itertools.chain.from_iterable([ 5966 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 5967 ]))) 5968 5969 # Now at last, make these list case insensitive 5970 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 5971 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
5972
5973 - def create_intparam_def(self, dp=True, mp=False):
5974 """ create intparam_definition.inc setting the internal parameters. 5975 Output the double precision and/or the multiple precision parameters 5976 depending on the parameters dp and mp. If mp only, then the file names 5977 get the 'mp_' prefix. 5978 """ 5979 5980 fsock = self.open('%sintparam_definition.inc'% 5981 ('mp_' if mp and not dp else ''), format='fortran') 5982 5983 fsock.write_comments(\ 5984 "Parameters that should not be recomputed event by event.\n") 5985 fsock.writelines("if(readlha) then\n") 5986 if dp: 5987 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 5988 if mp: 5989 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 5990 5991 for param in self.params_indep: 5992 if param.name == 'ZERO': 5993 continue 5994 # check whether the parameter is a CT parameter 5995 # if yes,just used the needed ones 5996 if not self.check_needed_param(param.name): 5997 continue 5998 if dp: 5999 fsock.writelines("%s = %s\n" % (param.name, 6000 self.p_to_f.parse(param.expr))) 6001 if mp: 6002 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6003 self.mp_p_to_f.parse(param.expr))) 6004 6005 fsock.writelines('endif') 6006 6007 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 6008 if dp: 6009 fsock.writelines("aS = G**2/4/pi\n") 6010 if mp: 6011 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 6012 for param in self.params_dep: 6013 # check whether the parameter is a CT parameter 6014 # if yes,just used the needed ones 6015 if not self.check_needed_param(param.name): 6016 continue 6017 if dp: 6018 fsock.writelines("%s = %s\n" % (param.name, 6019 self.p_to_f.parse(param.expr))) 6020 elif mp: 6021 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6022 self.mp_p_to_f.parse(param.expr))) 6023 6024 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 6025 if ('aEWM1',) in self.model['parameters']: 6026 if dp: 6027 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 6028 gal(2) = 1d0 6029 """) 6030 elif mp: 6031 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 6032 %(mp_prefix)sgal(2) = 1d0 6033 """ %{'mp_prefix':self.mp_prefix}) 6034 pass 6035 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 6036 elif ('Gf',) in self.model['parameters']: 6037 if dp: 6038 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 6039 gal(2) = 1d0 6040 """) 6041 elif mp: 6042 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 6043 %(mp_prefix)sgal(2) = 1d0 6044 """ %{'mp_prefix':self.mp_prefix}) 6045 pass 6046 else: 6047 if dp: 6048 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 6049 fsock.writelines(""" gal(1) = 1d0 6050 gal(2) = 1d0 6051 """) 6052 elif mp: 6053 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 6054 %(mp_prefix)sgal(2) = 1e0_16 6055 """%{'mp_prefix':self.mp_prefix})
6056 6057
6058 - def create_couplings(self):
6059 """ create couplings.f and all couplingsX.f """ 6060 6061 nb_def_by_file = 25 6062 6063 self.create_couplings_main(nb_def_by_file) 6064 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6065 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6066 6067 for i in range(nb_coup_indep): 6068 # For the independent couplings, we compute the double and multiple 6069 # precision ones together 6070 data = self.coups_indep[nb_def_by_file * i: 6071 min(len(self.coups_indep), nb_def_by_file * (i+1))] 6072 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 6073 6074 for i in range(nb_coup_dep): 6075 # For the dependent couplings, we compute the double and multiple 6076 # precision ones in separate subroutines. 6077 data = self.coups_dep[nb_def_by_file * i: 6078 min(len(self.coups_dep), nb_def_by_file * (i+1))] 6079 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6080 dp=True,mp=False) 6081 if self.opt['mp']: 6082 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6083 dp=False,mp=True)
6084 6085
6086 - def create_couplings_main(self, nb_def_by_file=25):
6087 """ create couplings.f """ 6088 6089 fsock = self.open('couplings.f', format='fortran') 6090 6091 fsock.writelines("""subroutine coup() 6092 6093 implicit none 6094 double precision PI, ZERO 6095 logical READLHA 6096 parameter (PI=3.141592653589793d0) 6097 parameter (ZERO=0d0) 6098 include \'model_functions.inc\'""") 6099 if self.opt['mp']: 6100 fsock.writelines("""%s MP__PI, MP__ZERO 6101 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6102 parameter (MP__ZERO=0e0_16) 6103 include \'mp_input.inc\' 6104 include \'mp_coupl.inc\' 6105 """%self.mp_real_format) 6106 fsock.writelines("""include \'input.inc\' 6107 include \'coupl.inc\' 6108 READLHA = .true. 6109 include \'intparam_definition.inc\'""") 6110 if self.opt['mp']: 6111 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 6112 6113 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6114 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6115 6116 fsock.writelines('\n'.join(\ 6117 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 6118 6119 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6120 6121 fsock.writelines('\n'.join(\ 6122 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6123 for i in range(nb_coup_dep)])) 6124 if self.opt['mp']: 6125 fsock.writelines('\n'.join(\ 6126 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6127 for i in range(nb_coup_dep)])) 6128 fsock.writelines('''\n return \n end\n''') 6129 6130 fsock.writelines("""subroutine update_as_param() 6131 6132 implicit none 6133 double precision PI, ZERO 6134 logical READLHA 6135 parameter (PI=3.141592653589793d0) 6136 parameter (ZERO=0d0) 6137 include \'model_functions.inc\'""") 6138 fsock.writelines("""include \'input.inc\' 6139 include \'coupl.inc\' 6140 READLHA = .false.""") 6141 fsock.writelines(""" 6142 include \'intparam_definition.inc\'\n 6143 """) 6144 6145 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6146 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6147 6148 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6149 6150 fsock.writelines('\n'.join(\ 6151 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6152 for i in range(nb_coup_dep)])) 6153 fsock.writelines('''\n return \n end\n''') 6154 6155 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 6156 6157 implicit none 6158 double precision PI 6159 parameter (PI=3.141592653589793d0) 6160 double precision mu_r2, as2 6161 include \'model_functions.inc\'""") 6162 fsock.writelines("""include \'input.inc\' 6163 include \'coupl.inc\'""") 6164 fsock.writelines(""" 6165 if (mu_r2.gt.0d0) MU_R = mu_r2 6166 G = SQRT(4.0d0*PI*AS2) 6167 AS = as2 6168 6169 CALL UPDATE_AS_PARAM() 6170 """) 6171 fsock.writelines('''\n return \n end\n''') 6172 6173 if self.opt['mp']: 6174 fsock.writelines("""subroutine mp_update_as_param() 6175 6176 implicit none 6177 logical READLHA 6178 include \'model_functions.inc\'""") 6179 fsock.writelines("""%s MP__PI, MP__ZERO 6180 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6181 parameter (MP__ZERO=0e0_16) 6182 include \'mp_input.inc\' 6183 include \'mp_coupl.inc\' 6184 """%self.mp_real_format) 6185 fsock.writelines("""include \'input.inc\' 6186 include \'coupl.inc\' 6187 include \'actualize_mp_ext_params.inc\' 6188 READLHA = .false. 6189 include \'mp_intparam_definition.inc\'\n 6190 """) 6191 6192 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6193 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6194 6195 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6196 6197 fsock.writelines('\n'.join(\ 6198 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6199 for i in range(nb_coup_dep)])) 6200 fsock.writelines('''\n return \n end\n''')
6201
6202 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
6203 """ create couplings[nb_file].f containing information coming from data. 6204 Outputs the computation of the double precision and/or the multiple 6205 precision couplings depending on the parameters dp and mp. 6206 If mp is True and dp is False, then the prefix 'MP_' is appended to the 6207 filename and subroutine name. 6208 """ 6209 6210 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 6211 nb_file), format='fortran') 6212 fsock.writelines("""subroutine %scoup%s() 6213 6214 implicit none 6215 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 6216 if dp: 6217 fsock.writelines(""" 6218 double precision PI, ZERO 6219 parameter (PI=3.141592653589793d0) 6220 parameter (ZERO=0d0) 6221 include 'input.inc' 6222 include 'coupl.inc'""") 6223 if mp: 6224 fsock.writelines("""%s MP__PI, MP__ZERO 6225 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6226 parameter (MP__ZERO=0e0_16) 6227 include \'mp_input.inc\' 6228 include \'mp_coupl.inc\' 6229 """%self.mp_real_format) 6230 6231 for coupling in data: 6232 if dp: 6233 fsock.writelines('%s = %s' % (coupling.name, 6234 self.p_to_f.parse(coupling.expr))) 6235 if mp: 6236 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 6237 self.mp_p_to_f.parse(coupling.expr))) 6238 fsock.writelines('end')
6239
6240 - def create_model_functions_inc(self):
6241 """ Create model_functions.inc which contains the various declarations 6242 of auxiliary functions which might be used in the couplings expressions 6243 """ 6244 6245 additional_fct = [] 6246 # check for functions define in the UFO model 6247 ufo_fct = self.model.get('functions') 6248 if ufo_fct: 6249 for fct in ufo_fct: 6250 # already handle by default 6251 if fct.name not in ["complexconjugate", "re", "im", "sec", 6252 "csc", "asec", "acsc", "theta_function", "cond", 6253 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot", 6254 "grreglog","regsqrt"]: 6255 additional_fct.append(fct.name) 6256 6257 6258 fsock = self.open('model_functions.inc', format='fortran') 6259 fsock.writelines("""double complex cond 6260 double complex condif 6261 double complex reglog 6262 double complex reglogp 6263 double complex reglogm 6264 double complex recms 6265 double complex arg 6266 double complex grreglog 6267 double complex regsqrt 6268 %s 6269 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 6270 6271 6272 if self.opt['mp']: 6273 fsock.writelines("""%(complex_mp_format)s mp_cond 6274 %(complex_mp_format)s mp_condif 6275 %(complex_mp_format)s mp_reglog 6276 %(complex_mp_format)s mp_reglogp 6277 %(complex_mp_format)s mp_reglogm 6278 %(complex_mp_format)s mp_recms 6279 %(complex_mp_format)s mp_arg 6280 %(complex_mp_format)s mp_grreglog 6281 %(complex_mp_format)s mp_regsqrt 6282 %(additional)s 6283 """ %\ 6284 {"additional": "\n".join([" %s mp_%s" % (self.mp_complex_format, i) for i in additional_fct]), 6285 'complex_mp_format':self.mp_complex_format 6286 })
6287
6288 - def create_model_functions_def(self):
6289 """ Create model_functions.f which contains the various definitions 6290 of auxiliary functions which might be used in the couplings expressions 6291 Add the functions.f functions for formfactors support 6292 """ 6293 6294 fsock = self.open('model_functions.f', format='fortran') 6295 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 6296 implicit none 6297 double complex condition,truecase,falsecase 6298 if(condition.eq.(0.0d0,0.0d0)) then 6299 cond=truecase 6300 else 6301 cond=falsecase 6302 endif 6303 end 6304 6305 double complex function condif(condition,truecase,falsecase) 6306 implicit none 6307 logical condition 6308 double complex truecase,falsecase 6309 if(condition) then 6310 condif=truecase 6311 else 6312 condif=falsecase 6313 endif 6314 end 6315 6316 double complex function recms(condition,expr) 6317 implicit none 6318 logical condition 6319 double complex expr 6320 if(condition)then 6321 recms=expr 6322 else 6323 recms=dcmplx(dble(expr)) 6324 endif 6325 end 6326 6327 double complex function reglog(arg) 6328 implicit none 6329 double complex TWOPII 6330 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6331 double complex arg 6332 if(arg.eq.(0.0d0,0.0d0)) then 6333 reglog=(0.0d0,0.0d0) 6334 else 6335 reglog=log(arg) 6336 endif 6337 end 6338 6339 double complex function reglogp(arg) 6340 implicit none 6341 double complex TWOPII 6342 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6343 double complex arg 6344 if(arg.eq.(0.0d0,0.0d0))then 6345 reglogp=(0.0d0,0.0d0) 6346 else 6347 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 6348 reglogp=log(arg) + TWOPII 6349 else 6350 reglogp=log(arg) 6351 endif 6352 endif 6353 end 6354 6355 double complex function reglogm(arg) 6356 implicit none 6357 double complex TWOPII 6358 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6359 double complex arg 6360 if(arg.eq.(0.0d0,0.0d0))then 6361 reglogm=(0.0d0,0.0d0) 6362 else 6363 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 6364 reglogm=log(arg) - TWOPII 6365 else 6366 reglogm=log(arg) 6367 endif 6368 endif 6369 end 6370 6371 double complex function regsqrt(arg_in) 6372 implicit none 6373 double complex arg_in 6374 double complex arg 6375 arg=arg_in 6376 if(dabs(dimag(arg)).eq.0.0d0)then 6377 arg=dcmplx(dble(arg),0.0d0) 6378 endif 6379 if(dabs(dble(arg)).eq.0.0d0)then 6380 arg=dcmplx(0.0d0,dimag(arg)) 6381 endif 6382 regsqrt=sqrt(arg) 6383 end 6384 6385 double complex function grreglog(logsw,expr1_in,expr2_in) 6386 implicit none 6387 double complex TWOPII 6388 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6389 double complex expr1_in,expr2_in 6390 double complex expr1,expr2 6391 double precision logsw 6392 double precision imagexpr 6393 logical firstsheet 6394 expr1=expr1_in 6395 expr2=expr2_in 6396 if(dabs(dimag(expr1)).eq.0.0d0)then 6397 expr1=dcmplx(dble(expr1),0.0d0) 6398 endif 6399 if(dabs(dble(expr1)).eq.0.0d0)then 6400 expr1=dcmplx(0.0d0,dimag(expr1)) 6401 endif 6402 if(dabs(dimag(expr2)).eq.0.0d0)then 6403 expr2=dcmplx(dble(expr2),0.0d0) 6404 endif 6405 if(dabs(dble(expr2)).eq.0.0d0)then 6406 expr2=dcmplx(0.0d0,dimag(expr2)) 6407 endif 6408 if(expr1.eq.(0.0d0,0.0d0))then 6409 grreglog=(0.0d0,0.0d0) 6410 else 6411 imagexpr=dimag(expr1)*dimag(expr2) 6412 firstsheet=imagexpr.ge.0.0d0 6413 firstsheet=firstsheet.or.dble(expr1).ge.0.0d0 6414 firstsheet=firstsheet.or.dble(expr2).ge.0.0d0 6415 if(firstsheet)then 6416 grreglog=log(expr1) 6417 else 6418 if(dimag(expr1).gt.0.0d0)then 6419 grreglog=log(expr1) - logsw*TWOPII 6420 else 6421 grreglog=log(expr1) + logsw*TWOPII 6422 endif 6423 endif 6424 endif 6425 end 6426 6427 double complex function arg(comnum) 6428 implicit none 6429 double complex comnum 6430 double complex iim 6431 iim = (0.0d0,1.0d0) 6432 if(comnum.eq.(0.0d0,0.0d0)) then 6433 arg=(0.0d0,0.0d0) 6434 else 6435 arg=log(comnum/abs(comnum))/iim 6436 endif 6437 end""") 6438 if self.opt['mp']: 6439 fsock.writelines(""" 6440 6441 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 6442 implicit none 6443 %(complex_mp_format)s condition,truecase,falsecase 6444 if(condition.eq.(0.0e0_16,0.0e0_16)) then 6445 mp_cond=truecase 6446 else 6447 mp_cond=falsecase 6448 endif 6449 end 6450 6451 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 6452 implicit none 6453 logical condition 6454 %(complex_mp_format)s truecase,falsecase 6455 if(condition) then 6456 mp_condif=truecase 6457 else 6458 mp_condif=falsecase 6459 endif 6460 end 6461 6462 %(complex_mp_format)s function mp_recms(condition,expr) 6463 implicit none 6464 logical condition 6465 %(complex_mp_format)s expr 6466 if(condition)then 6467 mp_recms=expr 6468 else 6469 mp_recms=cmplx(real(expr),kind=16) 6470 endif 6471 end 6472 6473 %(complex_mp_format)s function mp_reglog(arg) 6474 implicit none 6475 %(complex_mp_format)s TWOPII 6476 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6477 %(complex_mp_format)s arg 6478 if(arg.eq.(0.0e0_16,0.0e0_16)) then 6479 mp_reglog=(0.0e0_16,0.0e0_16) 6480 else 6481 mp_reglog=log(arg) 6482 endif 6483 end 6484 6485 %(complex_mp_format)s function mp_reglogp(arg) 6486 implicit none 6487 %(complex_mp_format)s TWOPII 6488 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6489 %(complex_mp_format)s arg 6490 if(arg.eq.(0.0e0_16,0.0e0_16))then 6491 mp_reglogp=(0.0e0_16,0.0e0_16) 6492 else 6493 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 6494 mp_reglogp=log(arg) + TWOPII 6495 else 6496 mp_reglogp=log(arg) 6497 endif 6498 endif 6499 end 6500 6501 %(complex_mp_format)s function mp_reglogm(arg) 6502 implicit none 6503 %(complex_mp_format)s TWOPII 6504 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6505 %(complex_mp_format)s arg 6506 if(arg.eq.(0.0e0_16,0.0e0_16))then 6507 mp_reglogm=(0.0e0_16,0.0e0_16) 6508 else 6509 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 6510 mp_reglogm=log(arg) - TWOPII 6511 else 6512 mp_reglogm=log(arg) 6513 endif 6514 endif 6515 end 6516 6517 %(complex_mp_format)s function mp_regsqrt(arg_in) 6518 implicit none 6519 %(complex_mp_format)s arg_in 6520 %(complex_mp_format)s arg 6521 arg=arg_in 6522 if(abs(imagpart(arg)).eq.0.0e0_16)then 6523 arg=cmplx(real(arg,kind=16),0.0e0_16) 6524 endif 6525 if(abs(real(arg,kind=16)).eq.0.0e0_16)then 6526 arg=cmplx(0.0e0_16,imagpart(arg)) 6527 endif 6528 mp_regsqrt=sqrt(arg) 6529 end 6530 6531 6532 %(complex_mp_format)s function mp_grreglog(logsw,expr1_in,expr2_in) 6533 implicit none 6534 %(complex_mp_format)s TWOPII 6535 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6536 %(complex_mp_format)s expr1_in,expr2_in 6537 %(complex_mp_format)s expr1,expr2 6538 %(real_mp_format)s logsw 6539 %(real_mp_format)s imagexpr 6540 logical firstsheet 6541 expr1=expr1_in 6542 expr2=expr2_in 6543 if(abs(imagpart(expr1)).eq.0.0e0_16)then 6544 expr1=cmplx(real(expr1,kind=16),0.0e0_16) 6545 endif 6546 if(abs(real(expr1,kind=16)).eq.0.0e0_16)then 6547 expr1=cmplx(0.0e0_16,imagpart(expr1)) 6548 endif 6549 if(abs(imagpart(expr2)).eq.0.0e0_16)then 6550 expr2=cmplx(real(expr2,kind=16),0.0e0_16) 6551 endif 6552 if(abs(real(expr2,kind=16)).eq.0.0e0_16)then 6553 expr2=cmplx(0.0e0_16,imagpart(expr2)) 6554 endif 6555 if(expr1.eq.(0.0e0_16,0.0e0_16))then 6556 mp_grreglog=(0.0e0_16,0.0e0_16) 6557 else 6558 imagexpr=imagpart(expr1)*imagpart(expr2) 6559 firstsheet=imagexpr.ge.0.0e0_16 6560 firstsheet=firstsheet.or.real(expr1,kind=16).ge.0.0e0_16 6561 firstsheet=firstsheet.or.real(expr2,kind=16).ge.0.0e0_16 6562 if(firstsheet)then 6563 mp_grreglog=log(expr1) 6564 else 6565 if(imagpart(expr1).gt.0.0e0_16)then 6566 mp_grreglog=log(expr1) - logsw*TWOPII 6567 else 6568 mp_grreglog=log(expr1) + logsw*TWOPII 6569 endif 6570 endif 6571 endif 6572 end 6573 6574 %(complex_mp_format)s function mp_arg(comnum) 6575 implicit none 6576 %(complex_mp_format)s comnum 6577 %(complex_mp_format)s imm 6578 imm = (0.0e0_16,1.0e0_16) 6579 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 6580 mp_arg=(0.0e0_16,0.0e0_16) 6581 else 6582 mp_arg=log(comnum/abs(comnum))/imm 6583 endif 6584 end"""%{'complex_mp_format':self.mp_complex_format,'real_mp_format':self.mp_real_format}) 6585 6586 6587 #check for the file functions.f 6588 model_path = self.model.get('modelpath') 6589 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 6590 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 6591 input = pjoin(model_path,'Fortran','functions.f') 6592 file.writelines(fsock, open(input).read()) 6593 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 6594 6595 # check for functions define in the UFO model 6596 ufo_fct = self.model.get('functions') 6597 if ufo_fct: 6598 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 6599 for fct in ufo_fct: 6600 # already handle by default 6601 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 6602 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg", 6603 "grreglog","regsqrt"]: 6604 ufo_fct_template = """ 6605 double complex function %(name)s(%(args)s) 6606 implicit none 6607 double complex %(args)s 6608 %(definitions)s 6609 %(name)s = %(fct)s 6610 6611 return 6612 end 6613 """ 6614 str_fct = self.p_to_f.parse(fct.expr) 6615 if not self.p_to_f.to_define: 6616 definitions = [] 6617 else: 6618 definitions=[] 6619 for d in self.p_to_f.to_define: 6620 if d == 'pi': 6621 definitions.append(' double precision pi') 6622 definitions.append(' data pi /3.1415926535897932d0/') 6623 else: 6624 definitions.append(' double complex %s' % d) 6625 6626 text = ufo_fct_template % { 6627 'name': fct.name, 6628 'args': ", ".join(fct.arguments), 6629 'fct': str_fct, 6630 'definitions': '\n'.join(definitions) 6631 } 6632 6633 fsock.writelines(text) 6634 if self.opt['mp']: 6635 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 6636 for fct in ufo_fct: 6637 # already handle by default 6638 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 6639 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg", 6640 "grreglog","regsqrt"]: 6641 ufo_fct_template = """ 6642 %(complex_mp_format)s function mp_%(name)s(mp__%(args)s) 6643 implicit none 6644 %(complex_mp_format)s mp__%(args)s 6645 %(definitions)s 6646 mp_%(name)s = %(fct)s 6647 6648 return 6649 end 6650 """ 6651 str_fct = self.mp_p_to_f.parse(fct.expr) 6652 if not self.mp_p_to_f.to_define: 6653 definitions = [] 6654 else: 6655 definitions=[] 6656 for d in self.mp_p_to_f.to_define: 6657 if d == 'pi': 6658 definitions.append(' %s mp__pi' % self.mp_real_format) 6659 definitions.append(' data mp__pi /3.141592653589793238462643383279502884197e+00_16/') 6660 else: 6661 definitions.append(' %s mp_%s' % (self.mp_complex_format,d)) 6662 text = ufo_fct_template % { 6663 'name': fct.name, 6664 'args': ", mp__".join(fct.arguments), 6665 'fct': str_fct, 6666 'definitions': '\n'.join(definitions), 6667 'complex_mp_format': self.mp_complex_format 6668 } 6669 fsock.writelines(text) 6670 6671 6672 6673 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6674 6675 6676
6677 - def create_makeinc(self):
6678 """create makeinc.inc containing the file to compile """ 6679 6680 fsock = self.open('makeinc.inc', comment='#') 6681 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6682 text += ' model_functions.o ' 6683 6684 nb_coup_indep = 1 + len(self.coups_dep) // 25 6685 nb_coup_dep = 1 + len(self.coups_indep) // 25 6686 couplings_files=['couplings%s.o' % (i+1) \ 6687 for i in range(nb_coup_dep + nb_coup_indep) ] 6688 if self.opt['mp']: 6689 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6690 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6691 text += ' '.join(couplings_files) 6692 fsock.writelines(text)
6693
6694 - def create_param_write(self):
6695 """ create param_write """ 6696 6697 fsock = self.open('param_write.inc', format='fortran') 6698 6699 fsock.writelines("""write(*,*) ' External Params' 6700 write(*,*) ' ---------------------------------' 6701 write(*,*) ' '""") 6702 def format(name): 6703 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6704 6705 # Write the external parameter 6706 lines = [format(param.name) for param in self.params_ext] 6707 fsock.writelines('\n'.join(lines)) 6708 6709 fsock.writelines("""write(*,*) ' Internal Params' 6710 write(*,*) ' ---------------------------------' 6711 write(*,*) ' '""") 6712 lines = [format(data.name) for data in self.params_indep 6713 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6714 fsock.writelines('\n'.join(lines)) 6715 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6716 write(*,*) ' ----------------------------------------' 6717 write(*,*) ' '""") 6718 lines = [format(data.name) for data in self.params_dep \ 6719 if self.check_needed_param(data.name)] 6720 6721 fsock.writelines('\n'.join(lines)) 6722 6723 6724
6725 - def create_ident_card(self):
6726 """ create the ident_card.dat """ 6727 6728 def format(parameter): 6729 """return the line for the ident_card corresponding to this parameter""" 6730 colum = [parameter.lhablock.lower()] + \ 6731 [str(value) for value in parameter.lhacode] + \ 6732 [parameter.name] 6733 if not parameter.name: 6734 return '' 6735 return ' '.join(colum)+'\n'
6736 6737 fsock = self.open('ident_card.dat') 6738 6739 external_param = [format(param) for param in self.params_ext] 6740 fsock.writelines('\n'.join(external_param)) 6741
6742 - def create_actualize_mp_ext_param_inc(self):
6743 """ create the actualize_mp_ext_params.inc code """ 6744 6745 # In principle one should actualize all external, but for now, it is 6746 # hardcoded that only AS and MU_R can by dynamically changed by the user 6747 # so that we only update those ones. 6748 # Of course, to be on the safe side, one could decide to update all 6749 # external parameters. 6750 update_params_list=[p for p in self.params_ext if p.name in 6751 self.PS_dependent_key] 6752 6753 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6754 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6755 for param in update_params_list] 6756 # When read_lha is false, it is G which is taken in input and not AS, so 6757 # this is what should be reset here too. 6758 if 'aS' in [param.name for param in update_params_list]: 6759 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6760 6761 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6762 fsock.writelines('\n'.join(res_strings))
6763
6764 - def create_param_read(self):
6765 """create param_read""" 6766 6767 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6768 or self.opt['loop_induced']: 6769 fsock = self.open('param_read.inc', format='fortran') 6770 fsock.writelines(' include \'../param_card.inc\'') 6771 return 6772 6773 def format_line(parameter): 6774 """return the line for the ident_card corresponding to this 6775 parameter""" 6776 template = \ 6777 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6778 % {'name': parameter.name, 6779 'value': self.p_to_f.parse(str(parameter.value.real))} 6780 if self.opt['mp']: 6781 template = template+ \ 6782 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6783 "%(mp_prefix)s%(name)s,%(value)s)") \ 6784 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6785 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6786 return template 6787 6788 fsock = self.open('param_read.inc', format='fortran') 6789 res_strings = [format_line(param) \ 6790 for param in self.params_ext] 6791 6792 # Correct width sign for Majorana particles (where the width 6793 # and mass need to have the same sign) 6794 for particle in self.model.get('particles'): 6795 if particle.is_fermion() and particle.get('self_antipart') and \ 6796 particle.get('width').lower() != 'zero': 6797 6798 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6799 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6800 if self.opt['mp']: 6801 res_strings.append(\ 6802 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6803 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6804 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6805 6806 fsock.writelines('\n'.join(res_strings)) 6807 6808 6809 @staticmethod
6810 - def create_param_card_static(model, output_path, rule_card_path=False, 6811 mssm_convert=True):
6812 """ create the param_card.dat for a givent model --static method-- """ 6813 #1. Check if a default param_card is present: 6814 done = False 6815 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 6816 restrict_name = os.path.basename(model.restrict_card)[9:-4] 6817 model_path = model.get('modelpath') 6818 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6819 done = True 6820 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6821 output_path) 6822 if not done: 6823 param_writer.ParamCardWriter(model, output_path) 6824 6825 if rule_card_path: 6826 if hasattr(model, 'rule_card'): 6827 model.rule_card.write_file(rule_card_path) 6828 6829 if mssm_convert: 6830 model_name = model.get('name') 6831 # IF MSSM convert the card to SLAH1 6832 if model_name == 'mssm' or model_name.startswith('mssm-'): 6833 import models.check_param_card as translator 6834 # Check the format of the param_card for Pythia and make it correct 6835 if rule_card_path: 6836 translator.make_valid_param_card(output_path, rule_card_path) 6837 translator.convert_to_slha1(output_path)
6838
6839 - def create_param_card(self):
6840 """ create the param_card.dat """ 6841 6842 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 6843 if not hasattr(self.model, 'rule_card'): 6844 rule_card=False 6845 self.create_param_card_static(self.model, 6846 output_path=pjoin(self.dir_path, 'param_card.dat'), 6847 rule_card_path=rule_card, 6848 mssm_convert=True)
6849
6850 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True, cmd_options={}):
6851 """ Determine which Export_v4 class is required. cmd is the command 6852 interface containing all potential usefull information. 6853 The output_type argument specifies from which context the output 6854 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 6855 and 'default' for tree-level outputs.""" 6856 6857 opt = dict(cmd.options) 6858 opt['output_options'] = cmd_options 6859 6860 # ========================================================================== 6861 # First check whether Ninja must be installed. 6862 # Ninja would only be required if: 6863 # a) Loop optimized output is selected 6864 # b) the process gathered from the amplitude generated use loops 6865 6866 if len(cmd._curr_amps)>0: 6867 try: 6868 curr_proc = cmd._curr_amps[0].get('process') 6869 except base_objects.PhysicsObject.PhysicsObjectError: 6870 curr_proc = None 6871 elif hasattr(cmd,'_fks_multi_proc') and \ 6872 len(cmd._fks_multi_proc.get('process_definitions'))>0: 6873 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 6874 else: 6875 curr_proc = None 6876 6877 requires_reduction_tool = opt['loop_optimized_output'] and \ 6878 (not curr_proc is None) and \ 6879 (curr_proc.get('perturbation_couplings') != [] and \ 6880 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 6881 6882 # An installation is required then, but only if the specified path is the 6883 # default local one and that the Ninja library appears missing. 6884 if requires_reduction_tool: 6885 cmd.install_reduction_library() 6886 6887 # ========================================================================== 6888 # First treat the MadLoop5 standalone case 6889 MadLoop_SA_options = {'clean': not noclean, 6890 'complex_mass':cmd.options['complex_mass_scheme'], 6891 'export_format':'madloop', 6892 'mp':True, 6893 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 6894 'cuttools_dir': cmd._cuttools_dir, 6895 'iregi_dir':cmd._iregi_dir, 6896 'pjfry_dir':cmd.options['pjfry'], 6897 'golem_dir':cmd.options['golem'], 6898 'samurai_dir':cmd.options['samurai'], 6899 'ninja_dir':cmd.options['ninja'], 6900 'collier_dir':cmd.options['collier'], 6901 'fortran_compiler':cmd.options['fortran_compiler'], 6902 'f2py_compiler':cmd.options['f2py_compiler'], 6903 'output_dependencies':cmd.options['output_dependencies'], 6904 'SubProc_prefix':'P', 6905 'compute_color_flows':cmd.options['loop_color_flows'], 6906 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 6907 'cluster_local_path': cmd.options['cluster_local_path'], 6908 'output_options': cmd_options 6909 } 6910 6911 if output_type.startswith('madloop'): 6912 import madgraph.loop.loop_exporters as loop_exporters 6913 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 6914 ExporterClass=None 6915 if not cmd.options['loop_optimized_output']: 6916 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 6917 else: 6918 if output_type == "madloop": 6919 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 6920 MadLoop_SA_options['export_format'] = 'madloop_optimized' 6921 elif output_type == "madloop_matchbox": 6922 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 6923 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 6924 else: 6925 raise Exception, "output_type not recognize %s" % output_type 6926 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 6927 else: 6928 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 6929 ' in %s'%str(cmd._mgme_dir)) 6930 6931 # Then treat the aMC@NLO output 6932 elif output_type=='amcatnlo': 6933 import madgraph.iolibs.export_fks as export_fks 6934 ExporterClass=None 6935 amcatnlo_options = dict(opt) 6936 amcatnlo_options.update(MadLoop_SA_options) 6937 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 6938 if not cmd.options['loop_optimized_output']: 6939 logger.info("Writing out the aMC@NLO code") 6940 ExporterClass = export_fks.ProcessExporterFortranFKS 6941 amcatnlo_options['export_format']='FKS5_default' 6942 else: 6943 logger.info("Writing out the aMC@NLO code, using optimized Loops") 6944 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 6945 amcatnlo_options['export_format']='FKS5_optimized' 6946 return ExporterClass(cmd._export_dir, amcatnlo_options) 6947 6948 6949 # Then the default tree-level output 6950 elif output_type=='default': 6951 assert group_subprocesses in [True, False] 6952 6953 opt = dict(opt) 6954 opt.update({'clean': not noclean, 6955 'complex_mass': cmd.options['complex_mass_scheme'], 6956 'export_format':cmd._export_format, 6957 'mp': False, 6958 'sa_symmetry':False, 6959 'model': cmd._curr_model.get('name'), 6960 'v5_model': False if cmd._model_v4_path else True }) 6961 6962 format = cmd._export_format #shortcut 6963 6964 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 6965 opt['sa_symmetry'] = True 6966 elif format == 'plugin': 6967 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 6968 6969 loop_induced_opt = dict(opt) 6970 loop_induced_opt.update(MadLoop_SA_options) 6971 loop_induced_opt['export_format'] = 'madloop_optimized' 6972 loop_induced_opt['SubProc_prefix'] = 'PV' 6973 # For loop_induced output with MadEvent, we must have access to the 6974 # color flows. 6975 loop_induced_opt['compute_color_flows'] = True 6976 for key in opt: 6977 if key not in loop_induced_opt: 6978 loop_induced_opt[key] = opt[key] 6979 6980 # Madevent output supports MadAnalysis5 6981 if format in ['madevent']: 6982 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 6983 6984 if format == 'matrix' or format.startswith('standalone'): 6985 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 6986 6987 elif format in ['madevent'] and group_subprocesses: 6988 if isinstance(cmd._curr_amps[0], 6989 loop_diagram_generation.LoopAmplitude): 6990 import madgraph.loop.loop_exporters as loop_exporters 6991 return loop_exporters.LoopInducedExporterMEGroup( 6992 cmd._export_dir,loop_induced_opt) 6993 else: 6994 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 6995 elif format in ['madevent']: 6996 if isinstance(cmd._curr_amps[0], 6997 loop_diagram_generation.LoopAmplitude): 6998 import madgraph.loop.loop_exporters as loop_exporters 6999 return loop_exporters.LoopInducedExporterMENoGroup( 7000 cmd._export_dir,loop_induced_opt) 7001 else: 7002 return ProcessExporterFortranME(cmd._export_dir,opt) 7003 elif format in ['matchbox']: 7004 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 7005 elif cmd._export_format in ['madweight'] and group_subprocesses: 7006 7007 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 7008 elif cmd._export_format in ['madweight']: 7009 return ProcessExporterFortranMW(cmd._export_dir, opt) 7010 elif format == 'plugin': 7011 if isinstance(cmd._curr_amps[0], 7012 loop_diagram_generation.LoopAmplitude): 7013 return cmd._export_plugin(cmd._export_dir, loop_induced_opt) 7014 else: 7015 return cmd._export_plugin(cmd._export_dir, opt) 7016 7017 else: 7018 raise Exception, 'Wrong export_v4 format' 7019 else: 7020 raise MadGraph5Error, 'Output type %s not reckognized in ExportV4Factory.'
7021
7022 7023 7024 7025 #=============================================================================== 7026 # ProcessExporterFortranMWGroup 7027 #=============================================================================== 7028 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
7029 """Class to take care of exporting a set of matrix elements to 7030 MadEvent subprocess group format.""" 7031 7032 matrix_file = "matrix_madweight_group_v4.inc" 7033 grouped_mode = 'madweight' 7034 #=========================================================================== 7035 # generate_subprocess_directory 7036 #===========================================================================
7037 - def generate_subprocess_directory(self, subproc_group, 7038 fortran_model, 7039 group_number):
7040 """Generate the Pn directory for a subprocess group in MadEvent, 7041 including the necessary matrix_N.f files, configs.inc and various 7042 other helper files.""" 7043 7044 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 7045 raise base_objects.PhysicsObject.PhysicsObjectError,\ 7046 "subproc_group object not SubProcessGroup" 7047 7048 if not self.model: 7049 self.model = subproc_group.get('matrix_elements')[0].\ 7050 get('processes')[0].get('model') 7051 7052 pathdir = os.path.join(self.dir_path, 'SubProcesses') 7053 7054 # Create the directory PN in the specified path 7055 subprocdir = "P%d_%s" % (subproc_group.get('number'), 7056 subproc_group.get('name')) 7057 try: 7058 os.mkdir(pjoin(pathdir, subprocdir)) 7059 except os.error as error: 7060 logger.warning(error.strerror + " " + subprocdir) 7061 7062 7063 logger.info('Creating files in directory %s' % subprocdir) 7064 Ppath = pjoin(pathdir, subprocdir) 7065 7066 # Create the matrix.f files, auto_dsig.f files and all inc files 7067 # for all subprocesses in the group 7068 7069 maxamps = 0 7070 maxflows = 0 7071 tot_calls = 0 7072 7073 matrix_elements = subproc_group.get('matrix_elements') 7074 7075 for ime, matrix_element in \ 7076 enumerate(matrix_elements): 7077 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 7078 calls, ncolor = \ 7079 self.write_matrix_element_v4(writers.FortranWriter(filename), 7080 matrix_element, 7081 fortran_model, 7082 str(ime+1), 7083 subproc_group.get('diagram_maps')[\ 7084 ime]) 7085 7086 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 7087 self.write_auto_dsig_file(writers.FortranWriter(filename), 7088 matrix_element, 7089 str(ime+1)) 7090 7091 # Keep track of needed quantities 7092 tot_calls += int(calls) 7093 maxflows = max(maxflows, ncolor) 7094 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 7095 7096 # Draw diagrams 7097 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 7098 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 7099 get('diagrams'), 7100 filename, 7101 model = \ 7102 matrix_element.get('processes')[0].\ 7103 get('model'), 7104 amplitude=True) 7105 logger.info("Generating Feynman diagrams for " + \ 7106 matrix_element.get('processes')[0].nice_string()) 7107 plot.draw() 7108 7109 # Extract number of external particles 7110 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 7111 7112 # Generate a list of diagrams corresponding to each configuration 7113 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 7114 # If a subprocess has no diagrams for this config, the number is 0 7115 7116 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 7117 7118 filename = pjoin(Ppath, 'auto_dsig.f') 7119 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 7120 subproc_group) 7121 7122 filename = pjoin(Ppath,'configs.inc') 7123 nconfigs, s_and_t_channels = self.write_configs_file(\ 7124 writers.FortranWriter(filename), 7125 subproc_group, 7126 subproc_diagrams_for_config) 7127 7128 filename = pjoin(Ppath, 'leshouche.inc') 7129 self.write_leshouche_file(writers.FortranWriter(filename), 7130 subproc_group) 7131 7132 filename = pjoin(Ppath, 'phasespace.inc') 7133 self.write_phasespace_file(writers.FortranWriter(filename), 7134 nconfigs) 7135 7136 7137 filename = pjoin(Ppath, 'maxamps.inc') 7138 self.write_maxamps_file(writers.FortranWriter(filename), 7139 maxamps, 7140 maxflows, 7141 max([len(me.get('processes')) for me in \ 7142 matrix_elements]), 7143 len(matrix_elements)) 7144 7145 filename = pjoin(Ppath, 'mirrorprocs.inc') 7146 self.write_mirrorprocs(writers.FortranWriter(filename), 7147 subproc_group) 7148 7149 filename = pjoin(Ppath, 'nexternal.inc') 7150 self.write_nexternal_file(writers.FortranWriter(filename), 7151 nexternal, ninitial) 7152 7153 filename = pjoin(Ppath, 'pmass.inc') 7154 self.write_pmass_file(writers.FortranWriter(filename), 7155 matrix_element) 7156 7157 filename = pjoin(Ppath, 'props.inc') 7158 self.write_props_file(writers.FortranWriter(filename), 7159 matrix_element, 7160 s_and_t_channels) 7161 7162 # filename = pjoin(Ppath, 'processes.dat') 7163 # files.write_to_file(filename, 7164 # self.write_processes_file, 7165 # subproc_group) 7166 7167 # Generate jpgs -> pass in make_html 7168 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 7169 7170 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 7171 7172 for file in linkfiles: 7173 ln('../%s' % file, cwd=Ppath) 7174 7175 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 7176 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 7177 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 7178 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 7179 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 7180 ln('phasespace.inc', '../', log=True, cwd=Ppath) 7181 if not tot_calls: 7182 tot_calls = 0 7183 return tot_calls
7184 7185 7186 #=========================================================================== 7187 # Helper functions 7188 #===========================================================================
7189 - def modify_grouping(self, matrix_element):
7190 """allow to modify the grouping (if grouping is in place) 7191 return two value: 7192 - True/False if the matrix_element was modified 7193 - the new(or old) matrix element""" 7194 7195 return True, matrix_element.split_lepton_grouping()
7196 7197 #=========================================================================== 7198 # write_super_auto_dsig_file 7199 #===========================================================================
7200 - def write_super_auto_dsig_file(self, writer, subproc_group):
7201 """Write the auto_dsig.f file selecting between the subprocesses 7202 in subprocess group mode""" 7203 7204 replace_dict = {} 7205 7206 # Extract version number and date from VERSION file 7207 info_lines = self.get_mg5_info_lines() 7208 replace_dict['info_lines'] = info_lines 7209 7210 matrix_elements = subproc_group.get('matrix_elements') 7211 7212 # Extract process info lines 7213 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 7214 matrix_elements]) 7215 replace_dict['process_lines'] = process_lines 7216 7217 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 7218 replace_dict['nexternal'] = nexternal 7219 7220 replace_dict['nsprocs'] = 2*len(matrix_elements) 7221 7222 # Generate dsig definition line 7223 dsig_def_line = "DOUBLE PRECISION " + \ 7224 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 7225 range(len(matrix_elements))]) 7226 replace_dict["dsig_def_line"] = dsig_def_line 7227 7228 # Generate dsig process lines 7229 call_dsig_proc_lines = [] 7230 for iproc in range(len(matrix_elements)): 7231 call_dsig_proc_lines.append(\ 7232 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 7233 {"num": iproc + 1, 7234 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 7235 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 7236 7237 if writer: 7238 file = open(os.path.join(_file_path, \ 7239 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 7240 file = file % replace_dict 7241 # Write the file 7242 writer.writelines(file) 7243 else: 7244 return replace_dict
7245 7246 #=========================================================================== 7247 # write_mirrorprocs 7248 #===========================================================================
7249 - def write_mirrorprocs(self, writer, subproc_group):
7250 """Write the mirrorprocs.inc file determining which processes have 7251 IS mirror process in subprocess group mode.""" 7252 7253 lines = [] 7254 bool_dict = {True: '.true.', False: '.false.'} 7255 matrix_elements = subproc_group.get('matrix_elements') 7256 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 7257 (len(matrix_elements), 7258 ",".join([bool_dict[me.get('has_mirror_process')] for \ 7259 me in matrix_elements]))) 7260 # Write the file 7261 writer.writelines(lines)
7262 7263 #=========================================================================== 7264 # write_configs_file 7265 #===========================================================================
7266 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
7267 """Write the configs.inc file with topology information for a 7268 subprocess group. Use the first subprocess with a diagram for each 7269 configuration.""" 7270 7271 matrix_elements = subproc_group.get('matrix_elements') 7272 model = matrix_elements[0].get('processes')[0].get('model') 7273 7274 diagrams = [] 7275 config_numbers = [] 7276 for iconfig, config in enumerate(diagrams_for_config): 7277 # Check if any diagrams correspond to this config 7278 if set(config) == set([0]): 7279 continue 7280 subproc_diags = [] 7281 for s,d in enumerate(config): 7282 if d: 7283 subproc_diags.append(matrix_elements[s].\ 7284 get('diagrams')[d-1]) 7285 else: 7286 subproc_diags.append(None) 7287 diagrams.append(subproc_diags) 7288 config_numbers.append(iconfig + 1) 7289 7290 # Extract number of external particles 7291 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 7292 7293 return len(diagrams), \ 7294 self.write_configs_file_from_diagrams(writer, diagrams, 7295 config_numbers, 7296 nexternal, ninitial, 7297 matrix_elements[0],model)
7298 7299 #=========================================================================== 7300 # write_run_configs_file 7301 #===========================================================================
7302 - def write_run_config_file(self, writer):
7303 """Write the run_configs.inc file for MadEvent""" 7304 7305 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 7306 text = open(path).read() % {'chanperjob':'2'} 7307 writer.write(text) 7308 return True
7309 7310 7311 #=========================================================================== 7312 # write_leshouche_file 7313 #===========================================================================
7314 - def write_leshouche_file(self, writer, subproc_group):
7315 """Write the leshouche.inc file for MG4""" 7316 7317 all_lines = [] 7318 7319 for iproc, matrix_element in \ 7320 enumerate(subproc_group.get('matrix_elements')): 7321 all_lines.extend(self.get_leshouche_lines(matrix_element, 7322 iproc)) 7323 7324 # Write the file 7325 writer.writelines(all_lines) 7326 7327 return True
7328