Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to v4 format.""" 
  16   
  17  import copy 
  18  from cStringIO import StringIO 
  19  from distutils import dir_util 
  20  import itertools 
  21  import fractions 
  22  import glob 
  23  import logging 
  24  import math 
  25  import os 
  26  import re 
  27  import shutil 
  28  import subprocess 
  29  import sys 
  30  import time 
  31  import traceback 
  32   
  33  import aloha 
  34   
  35  import madgraph.core.base_objects as base_objects 
  36  import madgraph.core.color_algebra as color 
  37  import madgraph.core.helas_objects as helas_objects 
  38  import madgraph.iolibs.drawing_eps as draw 
  39  import madgraph.iolibs.files as files 
  40  import madgraph.iolibs.group_subprocs as group_subprocs 
  41  import madgraph.iolibs.file_writers as writers 
  42  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  43  import madgraph.iolibs.template_files as template_files 
  44  import madgraph.iolibs.ufo_expression_parsers as parsers 
  45  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  46  import madgraph.interface.common_run_interface as common_run_interface 
  47  import madgraph.various.diagram_symmetry as diagram_symmetry 
  48  import madgraph.various.misc as misc 
  49  import madgraph.various.banner as banner_mod 
  50  import madgraph.various.process_checks as process_checks 
  51  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  52  import aloha.create_aloha as create_aloha 
  53  import models.import_ufo as import_ufo 
  54  import models.write_param_card as param_writer 
  55  import models.check_param_card as check_param_card 
  56   
  57   
  58  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  59  from madgraph.iolibs.files import cp, ln, mv 
  60   
  61  from madgraph import InvalidCmd 
  62   
  63  pjoin = os.path.join 
  64   
  65  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  66  logger = logging.getLogger('madgraph.export_v4') 
  67   
  68  default_compiler= {'fortran': 'gfortran', 
  69                         'f2py': 'f2py', 
  70                         'cpp':'g++'} 
71 72 73 -class VirtualExporter(object):
74 75 #exporter variable who modified the way madgraph interacts with this class 76 77 grouped_mode = 'madevent' 78 # This variable changes the type of object called within 'generate_subprocess_directory' 79 #functions. 80 # False to avoid grouping (only identical matrix element are merged) 81 # 'madevent' group the massless quark and massless lepton 82 # 'madweight' group the gluon with the massless quark 83 sa_symmetry = False 84 # If no grouped_mode=False, uu~ and u~u will be called independently. 85 #Putting sa_symmetry generates only one of the two matrix-element. 86 check = True 87 # Ask madgraph to check if the directory already exists and propose to the user to 88 #remove it first if this is the case 89 output = 'Template' 90 # [Template, None, dir] 91 # - Template, madgraph will call copy_template 92 # - dir, madgraph will just create an empty directory for initialisation 93 # - None, madgraph do nothing for initialisation 94 exporter = 'v4' 95 # language of the output 'v4' for Fortran output 96 # 'cpp' for C++ output 97 98
99 - def __init__(self, dir_path = "", opt=None):
100 # cmd_options is a dictionary with all the optional argurment passed at output time 101 return
102
103 - def copy_template(self, model):
104 return
105
106 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
107 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 108 return 0 # return an integer stating the number of call to helicity routine
109
110 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
111 return
112
113 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
114 return
115 116
117 - def pass_information_from_cmd(self, cmd):
118 """pass information from the command interface to the exporter. 119 Please do not modify any object of the interface from the exporter. 120 """ 121 return
122
123 - def modify_grouping(self, matrix_element):
124 return False, matrix_element
125
126 - def export_model_files(self, model_v4_path):
127 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 128 return
129
130 - def export_helas(self, HELAS_PATH):
131 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 132 return
133
134 #=============================================================================== 135 # ProcessExporterFortran 136 #=============================================================================== 137 -class ProcessExporterFortran(VirtualExporter):
138 """Class to take care of exporting a set of matrix elements to 139 Fortran (v4) format.""" 140 141 default_opt = {'clean': False, 'complex_mass':False, 142 'export_format':'madevent', 'mp': False, 143 'v5_model': True, 144 'output_options':{} 145 } 146 grouped_mode = False 147
148 - def __init__(self, dir_path = "", opt=None):
149 """Initiate the ProcessExporterFortran with directory information""" 150 self.mgme_dir = MG5DIR 151 self.dir_path = dir_path 152 self.model = None 153 154 self.opt = dict(self.default_opt) 155 if opt: 156 self.opt.update(opt) 157 158 self.cmd_options = self.opt['output_options'] 159 160 #place holder to pass information to the run_interface 161 self.proc_characteristic = banner_mod.ProcCharacteristic()
162 163 164 #=========================================================================== 165 # process exporter fortran switch between group and not grouped 166 #===========================================================================
167 - def export_processes(self, matrix_elements, fortran_model):
168 """Make the switch between grouped and not grouped output""" 169 170 calls = 0 171 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 172 for (group_number, me_group) in enumerate(matrix_elements): 173 calls = calls + self.generate_subprocess_directory(\ 174 me_group, fortran_model, group_number) 175 else: 176 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 177 calls = calls + self.generate_subprocess_directory(\ 178 me, fortran_model, me_number) 179 180 return calls
181 182 183 #=========================================================================== 184 # create the run_card 185 #===========================================================================
186 - def create_run_card(self, matrix_elements, history):
187 """ """ 188 189 190 # bypass this for the loop-check 191 import madgraph.loop.loop_helas_objects as loop_helas_objects 192 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 193 matrix_elements = None 194 195 run_card = banner_mod.RunCard() 196 197 198 default=True 199 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 200 processes = [me.get('processes') for megroup in matrix_elements 201 for me in megroup['matrix_elements']] 202 elif matrix_elements: 203 processes = [me.get('processes') 204 for me in matrix_elements['matrix_elements']] 205 else: 206 default =False 207 208 if default: 209 run_card.create_default_for_process(self.proc_characteristic, 210 history, 211 processes) 212 213 214 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 215 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
216 217 218 #=========================================================================== 219 # copy the Template in a new directory. 220 #===========================================================================
221 - def copy_template(self, model):
222 """create the directory run_name as a copy of the MadEvent 223 Template, and clean the directory 224 """ 225 226 #First copy the full template tree if dir_path doesn't exit 227 if not os.path.isdir(self.dir_path): 228 assert self.mgme_dir, \ 229 "No valid MG_ME path given for MG4 run directory creation." 230 logger.info('initialize a new directory: %s' % \ 231 os.path.basename(self.dir_path)) 232 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 233 self.dir_path, True) 234 # distutils.dir_util.copy_tree since dir_path already exists 235 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 236 self.dir_path) 237 # copy plot_card 238 for card in ['plot_card']: 239 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 240 try: 241 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 242 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 243 except IOError: 244 logger.warning("Failed to copy " + card + ".dat to default") 245 elif os.getcwd() == os.path.realpath(self.dir_path): 246 logger.info('working in local directory: %s' % \ 247 os.path.realpath(self.dir_path)) 248 # distutils.dir_util.copy_tree since dir_path already exists 249 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), 250 self.dir_path) 251 # for name in misc.glob('Template/LO/*', self.mgme_dir): 252 # name = os.path.basename(name) 253 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 254 # if os.path.isfile(filename): 255 # files.cp(filename, pjoin(self.dir_path,name)) 256 # elif os.path.isdir(filename): 257 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 258 # distutils.dir_util.copy_tree since dir_path already exists 259 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 260 self.dir_path) 261 # Copy plot_card 262 for card in ['plot_card']: 263 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 264 try: 265 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 266 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 267 except IOError: 268 logger.warning("Failed to copy " + card + ".dat to default") 269 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 270 assert self.mgme_dir, \ 271 "No valid MG_ME path given for MG4 run directory creation." 272 try: 273 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 274 except IOError: 275 MG5_version = misc.get_pkg_info() 276 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 277 278 #Ensure that the Template is clean 279 if self.opt['clean']: 280 logger.info('remove old information in %s' % \ 281 os.path.basename(self.dir_path)) 282 if os.environ.has_key('MADGRAPH_BASE'): 283 misc.call([pjoin('bin', 'internal', 'clean_template'), 284 '--web'], cwd=self.dir_path) 285 else: 286 try: 287 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 288 cwd=self.dir_path) 289 except Exception, why: 290 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 291 % (os.path.basename(self.dir_path),why)) 292 293 #Write version info 294 MG_version = misc.get_pkg_info() 295 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 296 MG_version['version']) 297 298 # add the makefile in Source directory 299 filename = pjoin(self.dir_path,'Source','makefile') 300 self.write_source_makefile(writers.FileWriter(filename)) 301 302 # add the DiscreteSampler information 303 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 304 pjoin(self.dir_path, 'Source')) 305 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 306 pjoin(self.dir_path, 'Source')) 307 308 # We need to create the correct open_data for the pdf 309 self.write_pdf_opendata()
310 311 312 #=========================================================================== 313 # Call MadAnalysis5 to generate the default cards for this process 314 #===========================================================================
315 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 316 ma5_path, output_dir, levels = ['parton','hadron']):
317 """ Call MA5 so that it writes default cards for both parton and 318 post-shower levels, tailored for this particular process.""" 319 320 if len(levels)==0: 321 return 322 start = time.time() 323 logger.info('Generating MadAnalysis5 default cards tailored to this process') 324 try: 325 MA5_interpreter = common_run_interface.CommonRunCmd.\ 326 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 327 except (Exception, SystemExit) as e: 328 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty.') 329 return 330 if MA5_interpreter is None: 331 return 332 333 MA5_main = MA5_interpreter.main 334 for lvl in ['parton','hadron']: 335 if lvl in levels: 336 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 337 try: 338 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 339 except (Exception, SystemExit) as e: 340 # keep the default card (skip only) 341 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 342 ' default analysis card for this process.') 343 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 344 error=StringIO() 345 traceback.print_exc(file=error) 346 logger.debug('MadAnalysis5 error was:') 347 logger.debug('-'*60) 348 logger.debug(error.getvalue()[:-1]) 349 logger.debug('-'*60) 350 else: 351 open(card_to_generate,'w').write(text) 352 stop = time.time() 353 if stop-start >1: 354 logger.info('Cards created in %.2fs' % (stop-start))
355 356 #=========================================================================== 357 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 358 #===========================================================================
359 - def write_procdef_mg5(self, file_pos, modelname, process_str):
360 """ write an equivalent of the MG4 proc_card in order that all the Madevent 361 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 362 363 proc_card_template = template_files.mg4_proc_card.mg4_template 364 process_template = template_files.mg4_proc_card.process_template 365 process_text = '' 366 coupling = '' 367 new_process_content = [] 368 369 370 # First find the coupling and suppress the coupling from process_str 371 #But first ensure that coupling are define whithout spaces: 372 process_str = process_str.replace(' =', '=') 373 process_str = process_str.replace('= ', '=') 374 process_str = process_str.replace(',',' , ') 375 #now loop on the element and treat all the coupling 376 for info in process_str.split(): 377 if '=' in info: 378 coupling += info + '\n' 379 else: 380 new_process_content.append(info) 381 # Recombine the process_str (which is the input process_str without coupling 382 #info) 383 process_str = ' '.join(new_process_content) 384 385 #format the SubProcess 386 replace_dict = {'process': process_str, 387 'coupling': coupling} 388 process_text += process_template.substitute(replace_dict) 389 390 replace_dict = {'process': process_text, 391 'model': modelname, 392 'multiparticle':''} 393 text = proc_card_template.substitute(replace_dict) 394 395 if file_pos: 396 ff = open(file_pos, 'w') 397 ff.write(text) 398 ff.close() 399 else: 400 return replace_dict
401 402
403 - def pass_information_from_cmd(self, cmd):
404 """Pass information for MA5""" 405 406 self.proc_defs = cmd._curr_proc_defs
407 408 #=========================================================================== 409 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 410 #===========================================================================
411 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
412 """Function to finalize v4 directory, for inheritance.""" 413 414 self.create_run_card(matrix_elements, history) 415 self.create_MA5_cards(matrix_elements, history)
416
417 - def create_MA5_cards(self,matrix_elements,history):
418 """ A wrapper around the creation of the MA5 cards so that it can be 419 bypassed by daughter classes (i.e. in standalone).""" 420 if 'madanalysis5_path' in self.opt and not \ 421 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 422 processes = None 423 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 424 processes = [me.get('processes') for megroup in matrix_elements 425 for me in megroup['matrix_elements']] 426 elif matrix_elements: 427 processes = [me.get('processes') 428 for me in matrix_elements['matrix_elements']] 429 430 self.create_default_madanalysis5_cards( 431 history, self.proc_defs, processes, 432 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 433 levels = ['hadron','parton']) 434 435 for level in ['hadron','parton']: 436 # Copying these cards turn on the use of MadAnalysis5 by default. 437 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 438 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 439 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
440 441 #=========================================================================== 442 # Create the proc_characteristic file passing information to the run_interface 443 #===========================================================================
444 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
445 446 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
447 448 #=========================================================================== 449 # write_matrix_element_v4 450 #===========================================================================
451 - def write_matrix_element_v4(self):
452 """Function to write a matrix.f file, for inheritance. 453 """ 454 pass
455 456 #=========================================================================== 457 # write_pdf_opendata 458 #===========================================================================
459 - def write_pdf_opendata(self):
460 """ modify the pdf opendata file, to allow direct access to cluster node 461 repository if configure""" 462 463 if not self.opt["cluster_local_path"]: 464 changer = {"pdf_systemwide": ""} 465 else: 466 to_add = """ 467 tempname='%(path)s'//Tablefile 468 open(IU,file=tempname,status='old',ERR=1) 469 return 470 1 tempname='%(path)s/Pdfdata/'//Tablefile 471 open(IU,file=tempname,status='old',ERR=2) 472 return 473 2 tempname='%(path)s/lhapdf'//Tablefile 474 open(IU,file=tempname,status='old',ERR=3) 475 return 476 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 477 open(IU,file=tempname,status='old',ERR=4) 478 return 479 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 480 open(IU,file=tempname,status='old',ERR=5) 481 return 482 """ % {"path" : self.opt["cluster_local_path"]} 483 484 changer = {"pdf_systemwide": to_add} 485 486 487 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 488 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 489 ff.writelines(template % changer) 490 491 # Do the same for lhapdf set 492 if not self.opt["cluster_local_path"]: 493 changer = {"cluster_specific_path": ""} 494 else: 495 to_add=""" 496 LHAPath='%(path)s/PDFsets' 497 Inquire(File=LHAPath, exist=exists) 498 if(exists)return 499 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 500 Inquire(File=LHAPath, exist=exists) 501 if(exists)return 502 LHAPath='%(path)s/../lhapdf/pdfsets/' 503 Inquire(File=LHAPath, exist=exists) 504 if(exists)return 505 LHAPath='./PDFsets' 506 """ % {"path" : self.opt["cluster_local_path"]} 507 changer = {"cluster_specific_path": to_add} 508 509 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 510 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 511 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 512 ff.writelines(template % changer) 513 514 515 return
516 517 518 519 #=========================================================================== 520 # write_maxparticles_file 521 #===========================================================================
522 - def write_maxparticles_file(self, writer, matrix_elements):
523 """Write the maxparticles.inc file for MadEvent""" 524 525 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 526 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 527 matrix_elements.get('matrix_elements')]) 528 else: 529 maxparticles = max([me.get_nexternal_ninitial()[0] \ 530 for me in matrix_elements]) 531 532 lines = "integer max_particles\n" 533 lines += "parameter(max_particles=%d)" % maxparticles 534 535 # Write the file 536 writer.writelines(lines) 537 538 return True
539 540 541 #=========================================================================== 542 # export the model 543 #===========================================================================
544 - def export_model_files(self, model_path):
545 """Configure the files/link of the process according to the model""" 546 547 # Import the model 548 for file in os.listdir(model_path): 549 if os.path.isfile(pjoin(model_path, file)): 550 shutil.copy2(pjoin(model_path, file), \ 551 pjoin(self.dir_path, 'Source', 'MODEL'))
552 553 567 575 576 577 #=========================================================================== 578 # export the helas routine 579 #===========================================================================
580 - def export_helas(self, helas_path):
581 """Configure the files/link of the process according to the model""" 582 583 # Import helas routine 584 for filename in os.listdir(helas_path): 585 filepos = pjoin(helas_path, filename) 586 if os.path.isfile(filepos): 587 if filepos.endswith('Makefile.template'): 588 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 589 elif filepos.endswith('Makefile'): 590 pass 591 else: 592 cp(filepos, self.dir_path + '/Source/DHELAS')
593 # following lines do the same but whithout symbolic link 594 # 595 #def export_helas(mgme_dir, dir_path): 596 # 597 # # Copy the HELAS directory 598 # helas_dir = pjoin(mgme_dir, 'HELAS') 599 # for filename in os.listdir(helas_dir): 600 # if os.path.isfile(pjoin(helas_dir, filename)): 601 # shutil.copy2(pjoin(helas_dir, filename), 602 # pjoin(dir_path, 'Source', 'DHELAS')) 603 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 604 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 605 # 606 607 #=========================================================================== 608 # generate_subprocess_directory 609 #===========================================================================
610 - def generate_subprocess_directory(self, matrix_element, 611 fortran_model, 612 me_number):
613 """Routine to generate a subprocess directory (for inheritance)""" 614 615 pass
616 617 #=========================================================================== 618 # get_source_libraries_list 619 #===========================================================================
620 - def get_source_libraries_list(self):
621 """ Returns the list of libraries to be compiling when compiling the 622 SOURCE directory. It is different for loop_induced processes and 623 also depends on the value of the 'output_dependencies' option""" 624 625 return ['$(LIBDIR)libdhelas.$(libext)', 626 '$(LIBDIR)libpdf.$(libext)', 627 '$(LIBDIR)libmodel.$(libext)', 628 '$(LIBDIR)libcernlib.$(libext)', 629 '$(LIBDIR)libbias.$(libext)']
630 631 #=========================================================================== 632 # write_source_makefile 633 #===========================================================================
634 - def write_source_makefile(self, writer):
635 """Write the nexternal.inc file for MG4""" 636 637 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 638 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 639 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 640 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 641 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 642 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 643 else: 644 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 645 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 646 647 replace_dict= {'libraries': set_of_lib, 648 'model':model_line, 649 'additional_dsample': '', 650 'additional_dependencies':''} 651 652 if writer: 653 text = open(path).read() % replace_dict 654 writer.write(text) 655 656 return replace_dict
657 658 #=========================================================================== 659 # write_nexternal_madspin 660 #===========================================================================
661 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
662 """Write the nexternal_prod.inc file for madspin""" 663 664 replace_dict = {} 665 666 replace_dict['nexternal'] = nexternal 667 replace_dict['ninitial'] = ninitial 668 669 file = """ \ 670 integer nexternal_prod 671 parameter (nexternal_prod=%(nexternal)d) 672 integer nincoming_prod 673 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 674 675 # Write the file 676 if writer: 677 writer.writelines(file) 678 return True 679 else: 680 return replace_dict
681 682 #=========================================================================== 683 # write_helamp_madspin 684 #===========================================================================
685 - def write_helamp_madspin(self, writer, ncomb):
686 """Write the helamp.inc file for madspin""" 687 688 replace_dict = {} 689 690 replace_dict['ncomb'] = ncomb 691 692 file = """ \ 693 integer ncomb1 694 parameter (ncomb1=%(ncomb)d) 695 double precision helamp(ncomb1) 696 common /to_helamp/helamp """ % replace_dict 697 698 # Write the file 699 if writer: 700 writer.writelines(file) 701 return True 702 else: 703 return replace_dict
704 705 706 707 #=========================================================================== 708 # write_nexternal_file 709 #===========================================================================
710 - def write_nexternal_file(self, writer, nexternal, ninitial):
711 """Write the nexternal.inc file for MG4""" 712 713 replace_dict = {} 714 715 replace_dict['nexternal'] = nexternal 716 replace_dict['ninitial'] = ninitial 717 718 file = """ \ 719 integer nexternal 720 parameter (nexternal=%(nexternal)d) 721 integer nincoming 722 parameter (nincoming=%(ninitial)d)""" % replace_dict 723 724 # Write the file 725 if writer: 726 writer.writelines(file) 727 return True 728 else: 729 return replace_dict
730 #=========================================================================== 731 # write_pmass_file 732 #===========================================================================
733 - def write_pmass_file(self, writer, matrix_element):
734 """Write the pmass.inc file for MG4""" 735 736 model = matrix_element.get('processes')[0].get('model') 737 738 lines = [] 739 for wf in matrix_element.get_external_wavefunctions(): 740 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 741 if mass.lower() != "zero": 742 mass = "abs(%s)" % mass 743 744 lines.append("pmass(%d)=%s" % \ 745 (wf.get('number_external'), mass)) 746 747 # Write the file 748 writer.writelines(lines) 749 750 return True
751 752 #=========================================================================== 753 # write_ngraphs_file 754 #===========================================================================
755 - def write_ngraphs_file(self, writer, nconfigs):
756 """Write the ngraphs.inc file for MG4. Needs input from 757 write_configs_file.""" 758 759 file = " integer n_max_cg\n" 760 file = file + "parameter (n_max_cg=%d)" % nconfigs 761 762 # Write the file 763 writer.writelines(file) 764 765 return True
766 767 #=========================================================================== 768 # write_leshouche_file 769 #===========================================================================
770 - def write_leshouche_file(self, writer, matrix_element):
771 """Write the leshouche.inc file for MG4""" 772 773 # Write the file 774 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 775 776 return True
777 778 #=========================================================================== 779 # get_leshouche_lines 780 #===========================================================================
781 - def get_leshouche_lines(self, matrix_element, numproc):
782 """Write the leshouche.inc file for MG4""" 783 784 # Extract number of external particles 785 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 786 787 lines = [] 788 for iproc, proc in enumerate(matrix_element.get('processes')): 789 legs = proc.get_legs_with_decays() 790 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 791 (iproc + 1, numproc+1, nexternal, 792 ",".join([str(l.get('id')) for l in legs]))) 793 if iproc == 0 and numproc == 0: 794 for i in [1, 2]: 795 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 796 (i, nexternal, 797 ",".join([ "%3r" % 0 ] * ninitial + \ 798 [ "%3r" % i ] * (nexternal - ninitial)))) 799 800 # Here goes the color connections corresponding to the JAMPs 801 # Only one output, for the first subproc! 802 if iproc == 0: 803 # If no color basis, just output trivial color flow 804 if not matrix_element.get('color_basis'): 805 for i in [1, 2]: 806 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 807 (i, numproc+1,nexternal, 808 ",".join([ "%3r" % 0 ] * nexternal))) 809 810 else: 811 # First build a color representation dictionnary 812 repr_dict = {} 813 for l in legs: 814 repr_dict[l.get('number')] = \ 815 proc.get('model').get_particle(l.get('id')).get_color()\ 816 * (-1)**(1+l.get('state')) 817 # Get the list of color flows 818 color_flow_list = \ 819 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 820 ninitial) 821 # And output them properly 822 for cf_i, color_flow_dict in enumerate(color_flow_list): 823 for i in [0, 1]: 824 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 825 (i + 1, cf_i + 1, numproc+1, nexternal, 826 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 827 for l in legs]))) 828 829 return lines
830 831 832 833 834 #=========================================================================== 835 # write_maxamps_file 836 #===========================================================================
837 - def write_maxamps_file(self, writer, maxamps, maxflows, 838 maxproc,maxsproc):
839 """Write the maxamps.inc file for MG4.""" 840 841 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 842 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 843 (maxamps, maxflows) 844 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 845 (maxproc, maxsproc) 846 847 # Write the file 848 writer.writelines(file) 849 850 return True
851 852 853 #=========================================================================== 854 # Routines to output UFO models in MG4 format 855 #=========================================================================== 856
857 - def convert_model(self, model, wanted_lorentz = [], 858 wanted_couplings = []):
859 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 860 861 # Make sure aloha is in quadruple precision if needed 862 old_aloha_mp=aloha.mp_precision 863 aloha.mp_precision=self.opt['mp'] 864 865 # create the MODEL 866 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 867 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 868 model_builder.build(wanted_couplings) 869 870 # Backup the loop mode, because it can be changed in what follows. 871 old_loop_mode = aloha.loop_mode 872 873 # Create the aloha model or use the existing one (for loop exporters 874 # this is useful as the aloha model will be used again in the 875 # LoopHelasMatrixElements generated). We do not save the model generated 876 # here if it didn't exist already because it would be a waste of 877 # memory for tree level applications since aloha is only needed at the 878 # time of creating the aloha fortran subroutines. 879 if hasattr(self, 'aloha_model'): 880 aloha_model = self.aloha_model 881 else: 882 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 883 aloha_model.add_Lorentz_object(model.get('lorentz')) 884 885 # Compute the subroutines 886 if wanted_lorentz: 887 aloha_model.compute_subset(wanted_lorentz) 888 else: 889 aloha_model.compute_all(save=False) 890 891 # Write them out 892 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 893 aloha_model.write(write_dir, 'Fortran') 894 895 # Revert the original aloha loop mode 896 aloha.loop_mode = old_loop_mode 897 898 #copy Helas Template 899 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 900 if any([any(['L' in tag for tag in d[1]]) for d in wanted_lorentz]): 901 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 902 write_dir+'/aloha_functions.f') 903 aloha_model.loop_mode = False 904 else: 905 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 906 write_dir+'/aloha_functions.f') 907 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 908 909 # Make final link in the Process 910 self.make_model_symbolic_link() 911 912 # Re-establish original aloha mode 913 aloha.mp_precision=old_aloha_mp
914 915 916 #=========================================================================== 917 # Helper functions 918 #===========================================================================
919 - def modify_grouping(self, matrix_element):
920 """allow to modify the grouping (if grouping is in place) 921 return two value: 922 - True/False if the matrix_element was modified 923 - the new(or old) matrix element""" 924 925 return False, matrix_element
926 927 #=========================================================================== 928 # Helper functions 929 #===========================================================================
930 - def get_mg5_info_lines(self):
931 """Return info lines for MG5, suitable to place at beginning of 932 Fortran files""" 933 934 info = misc.get_pkg_info() 935 info_lines = "" 936 if info and info.has_key('version') and info.has_key('date'): 937 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 938 (info['version'], info['date']) 939 info_lines = info_lines + \ 940 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 941 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 942 else: 943 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 944 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 945 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 946 947 return info_lines
948
949 - def get_process_info_lines(self, matrix_element):
950 """Return info lines describing the processes for this matrix element""" 951 952 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 953 for process in matrix_element.get('processes')])
954 955
956 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
957 """Return the Helicity matrix definition lines for this matrix element""" 958 959 helicity_line_list = [] 960 i = 0 961 for helicities in matrix_element.get_helicity_matrix(): 962 i = i + 1 963 int_list = [i, len(helicities)] 964 int_list.extend(helicities) 965 helicity_line_list.append(\ 966 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 967 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 968 969 return "\n".join(helicity_line_list)
970
971 - def get_ic_line(self, matrix_element):
972 """Return the IC definition line coming after helicities, required by 973 switchmom in madevent""" 974 975 nexternal = matrix_element.get_nexternal_ninitial()[0] 976 int_list = range(1, nexternal + 1) 977 978 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 979 ",".join([str(i) for \ 980 i in int_list]))
981
982 - def set_chosen_SO_index(self, process, squared_orders):
983 """ From the squared order constraints set by the user, this function 984 finds what indices of the squared_orders list the user intends to pick. 985 It returns this as a string of comma-separated successive '.true.' or 986 '.false.' for each index.""" 987 988 user_squared_orders = process.get('squared_orders') 989 split_orders = process.get('split_orders') 990 991 if len(user_squared_orders)==0: 992 return ','.join(['.true.']*len(squared_orders)) 993 994 res = [] 995 for sqsos in squared_orders: 996 is_a_match = True 997 for user_sqso, value in user_squared_orders.items(): 998 if user_sqso == 'WEIGHTED' : 999 logger.debug('WEIGHTED^2%s%s encoutered. Please check behavior for' + \ 1000 'https://bazaar.launchpad.net/~maddevelopers/mg5amcnlo/3.0.1/revision/613', \ 1001 (process.get_squared_order_type(user_sqso), sqsos[split_orders.index(user_sqso)])) 1002 1003 if (process.get_squared_order_type(user_sqso) =='==' and \ 1004 value!=sqsos[split_orders.index(user_sqso)]) or \ 1005 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 1006 value<sqsos[split_orders.index(user_sqso)]) or \ 1007 (process.get_squared_order_type(user_sqso) == '>' and \ 1008 value>=sqsos[split_orders.index(user_sqso)]): 1009 is_a_match = False 1010 break 1011 res.append('.true.' if is_a_match else '.false.') 1012 1013 return ','.join(res)
1014
1015 - def get_split_orders_lines(self, orders, array_name, n=5):
1016 """ Return the split orders definition as defined in the list orders and 1017 for the name of the array 'array_name'. Split rows in chunks of size n.""" 1018 1019 ret_list = [] 1020 for index, order in enumerate(orders): 1021 for k in xrange(0, len(order), n): 1022 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1023 (array_name,index + 1, k + 1, min(k + n, len(order)), 1024 ','.join(["%5r" % i for i in order[k:k + n]]))) 1025 return ret_list
1026
1027 - def format_integer_list(self, list, name, n=5):
1028 """ Return an initialization of the python list in argument following 1029 the fortran syntax using the data keyword assignment, filling an array 1030 of name 'name'. It splits rows in chunks of size n.""" 1031 1032 ret_list = [] 1033 for k in xrange(0, len(list), n): 1034 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1035 (name, k + 1, min(k + n, len(list)), 1036 ','.join(["%5r" % i for i in list[k:k + n]]))) 1037 return ret_list
1038
1039 - def get_color_data_lines(self, matrix_element, n=6):
1040 """Return the color matrix definition lines for this matrix element. Split 1041 rows in chunks of size n.""" 1042 1043 if not matrix_element.get('color_matrix'): 1044 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1045 else: 1046 ret_list = [] 1047 my_cs = color.ColorString() 1048 for index, denominator in \ 1049 enumerate(matrix_element.get('color_matrix').\ 1050 get_line_denominators()): 1051 # First write the common denominator for this color matrix line 1052 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1053 # Then write the numerators for the matrix elements 1054 num_list = matrix_element.get('color_matrix').\ 1055 get_line_numerators(index, denominator) 1056 1057 for k in xrange(0, len(num_list), n): 1058 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1059 (index + 1, k + 1, min(k + n, len(num_list)), 1060 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 1061 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1062 ret_list.append("C %s" % repr(my_cs)) 1063 return ret_list
1064 1065
1066 - def get_den_factor_line(self, matrix_element):
1067 """Return the denominator factor line for this matrix element""" 1068 1069 return "DATA IDEN/%2r/" % \ 1070 matrix_element.get_denominator_factor()
1071
1072 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1073 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1074 which configs (diagrams).""" 1075 1076 ret_list = [] 1077 1078 booldict = {False: ".false.", True: ".true."} 1079 1080 if not matrix_element.get('color_basis'): 1081 # No color, so only one color factor. Simply write a ".true." 1082 # for each config (i.e., each diagram with only 3 particle 1083 # vertices 1084 configs = len(mapconfigs) 1085 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1086 (num_matrix_element, configs, 1087 ','.join([".true." for i in range(configs)]))) 1088 return ret_list 1089 1090 # There is a color basis - create a list showing which JAMPs have 1091 # contributions to which configs 1092 1093 # Only want to include leading color flows, so find max_Nc 1094 color_basis = matrix_element.get('color_basis') 1095 1096 # We don't want to include the power of Nc's which come from the potential 1097 # loop color trace (i.e. in the case of a closed fermion loop for example) 1098 # so we subtract it here when computing max_Nc 1099 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1100 color_basis.values()],[])) 1101 1102 # Crate dictionary between diagram number and JAMP number 1103 diag_jamp = {} 1104 for ijamp, col_basis_elem in \ 1105 enumerate(sorted(matrix_element.get('color_basis').keys())): 1106 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1107 # Only use color flows with Nc == max_Nc. However, notice that 1108 # we don't want to include the Nc power coming from the loop 1109 # in this counting. 1110 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1111 diag_num = diag_tuple[0] + 1 1112 # Add this JAMP number to this diag_num 1113 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1114 [ijamp+1] 1115 1116 colamps = ijamp + 1 1117 for iconfig, num_diag in enumerate(mapconfigs): 1118 if num_diag == 0: 1119 continue 1120 1121 # List of True or False 1122 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1123 # Add line 1124 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1125 (iconfig+1, num_matrix_element, colamps, 1126 ','.join(["%s" % booldict[b] for b in \ 1127 bool_list]))) 1128 1129 return ret_list
1130
1131 - def get_amp2_lines(self, matrix_element, config_map = []):
1132 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1133 1134 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1135 # Get minimum legs in a vertex 1136 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1137 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1138 minvert = min(vert_list) if vert_list!=[] else 0 1139 1140 ret_lines = [] 1141 if config_map: 1142 # In this case, we need to sum up all amplitudes that have 1143 # identical topologies, as given by the config_map (which 1144 # gives the topology/config for each of the diagrams 1145 diagrams = matrix_element.get('diagrams') 1146 # Combine the diagrams with identical topologies 1147 config_to_diag_dict = {} 1148 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1149 if config_map[idiag] == 0: 1150 continue 1151 try: 1152 config_to_diag_dict[config_map[idiag]].append(idiag) 1153 except KeyError: 1154 config_to_diag_dict[config_map[idiag]] = [idiag] 1155 # Write out the AMP2s summing squares of amplitudes belonging 1156 # to eiher the same diagram or different diagrams with 1157 # identical propagator properties. Note that we need to use 1158 # AMP2 number corresponding to the first diagram number used 1159 # for that AMP2. 1160 for config in sorted(config_to_diag_dict.keys()): 1161 1162 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1163 {"num": (config_to_diag_dict[config][0] + 1)} 1164 1165 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1166 sum([diagrams[idiag].get('amplitudes') for \ 1167 idiag in config_to_diag_dict[config]], [])]) 1168 1169 # Not using \sum |M|^2 anymore since this creates troubles 1170 # when ckm is not diagonal due to the JIM mechanism. 1171 if '+' in amp: 1172 line += "(%s)*dconjg(%s)" % (amp, amp) 1173 else: 1174 line += "%s*dconjg(%s)" % (amp, amp) 1175 ret_lines.append(line) 1176 else: 1177 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1178 # Ignore any diagrams with 4-particle vertices. 1179 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1180 continue 1181 # Now write out the expression for AMP2, meaning the sum of 1182 # squared amplitudes belonging to the same diagram 1183 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1184 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1185 {"num": a.get('number')} for a in \ 1186 diag.get('amplitudes')]) 1187 ret_lines.append(line) 1188 1189 return ret_lines
1190 1191 #=========================================================================== 1192 # Returns the data statements initializing the coeffictients for the JAMP 1193 # decomposition. It is used when the JAMP initialization is decided to be 1194 # done through big arrays containing the projection coefficients. 1195 #===========================================================================
1196 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1197 n=50, Nc_value=3):
1198 """This functions return the lines defining the DATA statement setting 1199 the coefficients building the JAMPS out of the AMPS. Split rows in 1200 bunches of size n. 1201 One can specify the color_basis from which the color amplitudes originates 1202 so that there are commentaries telling what color structure each JAMP 1203 corresponds to.""" 1204 1205 if(not isinstance(color_amplitudes,list) or 1206 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1207 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_coefs" 1208 1209 res_list = [] 1210 my_cs = color.ColorString() 1211 for index, coeff_list in enumerate(color_amplitudes): 1212 # Create the list of the complete numerical coefficient. 1213 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1214 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1215 coefficient in coeff_list] 1216 # Create the list of the numbers of the contributing amplitudes. 1217 # Mutliply by -1 for those which have an imaginary coefficient. 1218 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1219 for coefficient in coeff_list] 1220 # Find the common denominator. 1221 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1222 num_list=[(coefficient*commondenom).numerator \ 1223 for coefficient in coefs_list] 1224 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1225 index+1,len(num_list))) 1226 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1227 index+1,commondenom)) 1228 if color_basis: 1229 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1230 res_list.append("C %s" % repr(my_cs)) 1231 for k in xrange(0, len(num_list), n): 1232 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1233 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1234 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1235 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1236 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1237 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1238 pass 1239 return res_list
1240 1241
1242 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1243 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1244 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1245 defined as a matrix element or directly as a color_amplitudes dictionary. 1246 The split_order_amps specifies the group of amplitudes sharing the same 1247 amplitude orders which should be put in together in a given set of JAMPS. 1248 The split_order_amps is supposed to have the format of the second output 1249 of the function get_split_orders_mapping function in helas_objects.py. 1250 The split_order_names is optional (it should correspond to the process 1251 'split_orders' attribute) and only present to provide comments in the 1252 JAMP definitions in the code.""" 1253 1254 # Let the user call get_JAMP_lines_split_order directly from a 1255 error_msg="Malformed '%s' argument passed to the "+\ 1256 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1257 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1258 color_amplitudes=col_amps.get_color_amplitudes() 1259 elif(isinstance(col_amps,list)): 1260 if(col_amps and isinstance(col_amps[0],list)): 1261 color_amplitudes=col_amps 1262 else: 1263 raise MadGraph5Error, error_msg%'col_amps' 1264 else: 1265 raise MadGraph5Error, error_msg%'col_amps' 1266 1267 # Verify the sanity of the split_order_amps and split_order_names args 1268 if isinstance(split_order_amps,list): 1269 for elem in split_order_amps: 1270 if len(elem)!=2: 1271 raise MadGraph5Error, error_msg%'split_order_amps' 1272 # Check the first element of the two lists to make sure they are 1273 # integers, although in principle they should all be integers. 1274 if not isinstance(elem[0],tuple) or \ 1275 not isinstance(elem[1],tuple) or \ 1276 not isinstance(elem[0][0],int) or \ 1277 not isinstance(elem[1][0],int): 1278 raise MadGraph5Error, error_msg%'split_order_amps' 1279 else: 1280 raise MadGraph5Error, error_msg%'split_order_amps' 1281 1282 if not split_order_names is None: 1283 if isinstance(split_order_names,list): 1284 # Should specify the same number of names as there are elements 1285 # in the key of the split_order_amps. 1286 if len(split_order_names)!=len(split_order_amps[0][0]): 1287 raise MadGraph5Error, error_msg%'split_order_names' 1288 # Check the first element of the list to be a string 1289 if not isinstance(split_order_names[0],str): 1290 raise MadGraph5Error, error_msg%'split_order_names' 1291 else: 1292 raise MadGraph5Error, error_msg%'split_order_names' 1293 1294 # Now scan all contributing orders to be individually computed and 1295 # construct the list of color_amplitudes for JAMP to be constructed 1296 # accordingly. 1297 res_list=[] 1298 for i, amp_order in enumerate(split_order_amps): 1299 col_amps_order = [] 1300 for jamp in color_amplitudes: 1301 col_amps_order.append(filter(lambda col_amp: 1302 col_amp[1] in amp_order[1],jamp)) 1303 if split_order_names: 1304 res_list.append('C JAMPs contributing to orders '+' '.join( 1305 ['%s=%i'%order for order in zip(split_order_names, 1306 amp_order[0])])) 1307 if self.opt['export_format'] in ['madloop_matchbox']: 1308 res_list.extend(self.get_JAMP_lines(col_amps_order, 1309 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1310 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1311 else: 1312 res_list.extend(self.get_JAMP_lines(col_amps_order, 1313 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1314 1315 return res_list
1316 1317
1318 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1319 split=-1):
1320 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1321 defined as a matrix element or directly as a color_amplitudes dictionary, 1322 Jamp_formatLC should be define to allow to add LeadingColor computation 1323 (usefull for MatchBox) 1324 The split argument defines how the JAMP lines should be split in order 1325 not to be too long.""" 1326 1327 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1328 # the color amplitudes lists. 1329 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1330 color_amplitudes=col_amps.get_color_amplitudes() 1331 elif(isinstance(col_amps,list)): 1332 if(col_amps and isinstance(col_amps[0],list)): 1333 color_amplitudes=col_amps 1334 else: 1335 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1336 else: 1337 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1338 1339 1340 res_list = [] 1341 for i, coeff_list in enumerate(color_amplitudes): 1342 # It might happen that coeff_list is empty if this function was 1343 # called from get_JAMP_lines_split_order (i.e. if some color flow 1344 # does not contribute at all for a given order). 1345 # In this case we simply set it to 0. 1346 if coeff_list==[]: 1347 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1348 continue 1349 # Break the JAMP definition into 'n=split' pieces to avoid having 1350 # arbitrarly long lines. 1351 first=True 1352 n = (len(coeff_list)+1 if split<=0 else split) 1353 while coeff_list!=[]: 1354 coefs=coeff_list[:n] 1355 coeff_list=coeff_list[n:] 1356 res = ((JAMP_format+"=") % str(i + 1)) + \ 1357 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1358 1359 first=False 1360 # Optimization: if all contributions to that color basis element have 1361 # the same coefficient (up to a sign), put it in front 1362 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1363 common_factor = False 1364 diff_fracs = list(set(list_fracs)) 1365 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1366 common_factor = True 1367 global_factor = diff_fracs[0] 1368 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1369 1370 # loop for JAMP 1371 for (coefficient, amp_number) in coefs: 1372 if not coefficient: 1373 continue 1374 if common_factor: 1375 res = (res + "%s" + AMP_format) % \ 1376 (self.coeff(coefficient[0], 1377 coefficient[1] / abs(coefficient[1]), 1378 coefficient[2], 1379 coefficient[3]), 1380 str(amp_number)) 1381 else: 1382 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1383 coefficient[1], 1384 coefficient[2], 1385 coefficient[3]), 1386 str(amp_number)) 1387 1388 if common_factor: 1389 res = res + ')' 1390 1391 res_list.append(res) 1392 1393 return res_list
1394
1395 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1396 """Generate the PDF lines for the auto_dsig.f file""" 1397 1398 processes = matrix_element.get('processes') 1399 model = processes[0].get('model') 1400 1401 pdf_definition_lines = "" 1402 pdf_data_lines = "" 1403 pdf_lines = "" 1404 1405 if ninitial == 1: 1406 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1407 for i, proc in enumerate(processes): 1408 process_line = proc.base_string() 1409 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1410 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1411 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1412 else: 1413 # Pick out all initial state particles for the two beams 1414 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1415 p in processes]))), 1416 sorted(list(set([p.get_initial_pdg(2) for \ 1417 p in processes])))] 1418 1419 # Prepare all variable names 1420 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1421 sum(initial_states,[])]) 1422 for key,val in pdf_codes.items(): 1423 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1424 1425 # Set conversion from PDG code to number used in PDF calls 1426 pdgtopdf = {21: 0, 22: 7} 1427 1428 # Fill in missing entries of pdgtopdf 1429 for pdg in sum(initial_states,[]): 1430 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 1431 pdgtopdf[pdg] = pdg 1432 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 1433 # If any particle has pdg code 7, we need to use something else 1434 pdgtopdf[pdg] = 6000000 + pdg 1435 1436 # Get PDF variable declarations for all initial states 1437 for i in [0,1]: 1438 pdf_definition_lines += "DOUBLE PRECISION " + \ 1439 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1440 for pdg in \ 1441 initial_states[i]]) + \ 1442 "\n" 1443 1444 # Get PDF data lines for all initial states 1445 for i in [0,1]: 1446 pdf_data_lines += "DATA " + \ 1447 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1448 for pdg in initial_states[i]]) + \ 1449 "/%d*1D0/" % len(initial_states[i]) + \ 1450 "\n" 1451 1452 # Get PDF lines for all different initial states 1453 for i, init_states in enumerate(initial_states): 1454 if subproc_group: 1455 pdf_lines = pdf_lines + \ 1456 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1457 % (i + 1, i + 1) 1458 else: 1459 pdf_lines = pdf_lines + \ 1460 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1461 % (i + 1, i + 1) 1462 1463 for nbi,initial_state in enumerate(init_states): 1464 if initial_state in pdf_codes.keys(): 1465 if subproc_group: 1466 pdf_lines = pdf_lines + \ 1467 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP, 1," + \ 1468 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1469 (pdf_codes[initial_state], 1470 i + 1, i + 1, pdgtopdf[initial_state], 1471 i + 1, i + 1) 1472 else: 1473 pdf_lines = pdf_lines + \ 1474 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP, %d," + \ 1475 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1476 (pdf_codes[initial_state], 1477 i + 1, i + 1, pdgtopdf[initial_state], 1478 i + 1, 1479 i + 1, i + 1) 1480 pdf_lines = pdf_lines + "ENDIF\n" 1481 1482 # Add up PDFs for the different initial state particles 1483 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1484 for proc in processes: 1485 process_line = proc.base_string() 1486 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1487 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1488 for ibeam in [1, 2]: 1489 initial_state = proc.get_initial_pdg(ibeam) 1490 if initial_state in pdf_codes.keys(): 1491 pdf_lines = pdf_lines + "%s%d*" % \ 1492 (pdf_codes[initial_state], ibeam) 1493 else: 1494 pdf_lines = pdf_lines + "1d0*" 1495 # Remove last "*" from pdf_lines 1496 pdf_lines = pdf_lines[:-1] + "\n" 1497 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1498 1499 # Remove last line break from the return variables 1500 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1501 1502 #=========================================================================== 1503 # write_props_file 1504 #===========================================================================
1505 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1506 """Write the props.inc file for MadEvent. Needs input from 1507 write_configs_file.""" 1508 1509 lines = [] 1510 1511 particle_dict = matrix_element.get('processes')[0].get('model').\ 1512 get('particle_dict') 1513 1514 for iconf, configs in enumerate(s_and_t_channels): 1515 for vertex in configs[0] + configs[1][:-1]: 1516 leg = vertex.get('legs')[-1] 1517 if leg.get('id') not in particle_dict: 1518 # Fake propagator used in multiparticle vertices 1519 mass = 'zero' 1520 width = 'zero' 1521 pow_part = 0 1522 else: 1523 particle = particle_dict[leg.get('id')] 1524 # Get mass 1525 if particle.get('mass').lower() == 'zero': 1526 mass = particle.get('mass') 1527 else: 1528 mass = "abs(%s)" % particle.get('mass') 1529 # Get width 1530 if particle.get('width').lower() == 'zero': 1531 width = particle.get('width') 1532 else: 1533 width = "abs(%s)" % particle.get('width') 1534 1535 pow_part = 1 + int(particle.is_boson()) 1536 1537 lines.append("prmass(%d,%d) = %s" % \ 1538 (leg.get('number'), iconf + 1, mass)) 1539 lines.append("prwidth(%d,%d) = %s" % \ 1540 (leg.get('number'), iconf + 1, width)) 1541 lines.append("pow(%d,%d) = %d" % \ 1542 (leg.get('number'), iconf + 1, pow_part)) 1543 1544 # Write the file 1545 writer.writelines(lines) 1546 1547 return True
1548 1549 #=========================================================================== 1550 # write_configs_file 1551 #===========================================================================
1552 - def write_configs_file(self, writer, matrix_element):
1553 """Write the configs.inc file for MadEvent""" 1554 1555 # Extract number of external particles 1556 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1557 1558 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1559 mapconfigs = [c[0] for c in configs] 1560 model = matrix_element.get('processes')[0].get('model') 1561 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1562 [[c[1]] for c in configs], 1563 mapconfigs, 1564 nexternal, ninitial, 1565 model)
1566 1567 #=========================================================================== 1568 # write_configs_file_from_diagrams 1569 #===========================================================================
1570 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1571 nexternal, ninitial, model):
1572 """Write the actual configs.inc file. 1573 1574 configs is the diagrams corresponding to configs (each 1575 diagrams is a list of corresponding diagrams for all 1576 subprocesses, with None if there is no corresponding diagrams 1577 for a given process). 1578 mapconfigs gives the diagram number for each config. 1579 1580 For s-channels, we need to output one PDG for each subprocess in 1581 the subprocess group, in order to be able to pick the right 1582 one for multiprocesses.""" 1583 1584 lines = [] 1585 1586 s_and_t_channels = [] 1587 1588 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1589 for config in configs if [d for d in config if d][0].\ 1590 get_vertex_leg_numbers()!=[]] 1591 minvert = min(vert_list) if vert_list!=[] else 0 1592 1593 # Number of subprocesses 1594 nsubprocs = len(configs[0]) 1595 1596 nconfigs = 0 1597 1598 new_pdg = model.get_first_non_pdg() 1599 1600 for iconfig, helas_diags in enumerate(configs): 1601 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1602 [0].get_vertex_leg_numbers()) : 1603 # Only 3-vertices allowed in configs.inc except for vertices 1604 # which originate from a shrunk loop. 1605 continue 1606 nconfigs += 1 1607 1608 # Need s- and t-channels for all subprocesses, including 1609 # those that don't contribute to this config 1610 empty_verts = [] 1611 stchannels = [] 1612 for h in helas_diags: 1613 if h: 1614 # get_s_and_t_channels gives vertices starting from 1615 # final state external particles and working inwards 1616 stchannels.append(h.get('amplitudes')[0].\ 1617 get_s_and_t_channels(ninitial, model, new_pdg)) 1618 else: 1619 stchannels.append((empty_verts, None)) 1620 1621 # For t-channels, just need the first non-empty one 1622 tchannels = [t for s,t in stchannels if t != None][0] 1623 1624 # For s_and_t_channels (to be used later) use only first config 1625 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1626 tchannels]) 1627 1628 # Make sure empty_verts is same length as real vertices 1629 if any([s for s,t in stchannels]): 1630 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1631 1632 # Reorganize s-channel vertices to get a list of all 1633 # subprocesses for each vertex 1634 schannels = zip(*[s for s,t in stchannels]) 1635 else: 1636 schannels = [] 1637 1638 allchannels = schannels 1639 if len(tchannels) > 1: 1640 # Write out tchannels only if there are any non-trivial ones 1641 allchannels = schannels + tchannels 1642 1643 # Write out propagators for s-channel and t-channel vertices 1644 1645 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1646 # Correspondance between the config and the diagram = amp2 1647 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1648 mapconfigs[iconfig])) 1649 1650 for verts in allchannels: 1651 if verts in schannels: 1652 vert = [v for v in verts if v][0] 1653 else: 1654 vert = verts 1655 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1656 last_leg = vert.get('legs')[-1] 1657 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1658 (last_leg.get('number'), nconfigs, len(daughters), 1659 ",".join([str(d) for d in daughters]))) 1660 if verts in schannels: 1661 pdgs = [] 1662 for v in verts: 1663 if v: 1664 pdgs.append(v.get('legs')[-1].get('id')) 1665 else: 1666 pdgs.append(0) 1667 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1668 (last_leg.get('number'), nconfigs, nsubprocs, 1669 ",".join([str(d) for d in pdgs]))) 1670 lines.append("data tprid(%d,%d)/0/" % \ 1671 (last_leg.get('number'), nconfigs)) 1672 elif verts in tchannels[:-1]: 1673 lines.append("data tprid(%d,%d)/%d/" % \ 1674 (last_leg.get('number'), nconfigs, 1675 abs(last_leg.get('id')))) 1676 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1677 (last_leg.get('number'), nconfigs, nsubprocs, 1678 ",".join(['0'] * nsubprocs))) 1679 1680 # Write out number of configs 1681 lines.append("# Number of configs") 1682 lines.append("data mapconfig(0)/%d/" % nconfigs) 1683 1684 # Write the file 1685 writer.writelines(lines) 1686 1687 return s_and_t_channels
1688 1689 #=========================================================================== 1690 # Global helper methods 1691 #=========================================================================== 1692
1693 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1694 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1695 1696 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1697 1698 if total_coeff == 1: 1699 if is_imaginary: 1700 return '+imag1*' 1701 else: 1702 return '+' 1703 elif total_coeff == -1: 1704 if is_imaginary: 1705 return '-imag1*' 1706 else: 1707 return '-' 1708 1709 res_str = '%+iD0' % total_coeff.numerator 1710 1711 if total_coeff.denominator != 1: 1712 # Check if total_coeff is an integer 1713 res_str = res_str + '/%iD0' % total_coeff.denominator 1714 1715 if is_imaginary: 1716 res_str = res_str + '*imag1' 1717 1718 return res_str + '*'
1719 1720
1721 - def set_fortran_compiler(self, default_compiler, force=False):
1722 """Set compiler based on what's available on the system""" 1723 1724 # Check for compiler 1725 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1726 f77_compiler = default_compiler['fortran'] 1727 elif misc.which('gfortran'): 1728 f77_compiler = 'gfortran' 1729 elif misc.which('g77'): 1730 f77_compiler = 'g77' 1731 elif misc.which('f77'): 1732 f77_compiler = 'f77' 1733 elif default_compiler['fortran']: 1734 logger.warning('No Fortran Compiler detected! Please install one') 1735 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1736 else: 1737 raise MadGraph5Error, 'No Fortran Compiler detected! Please install one' 1738 logger.info('Use Fortran compiler ' + f77_compiler) 1739 1740 1741 # Check for compiler. 1. set default. 1742 if default_compiler['f2py']: 1743 f2py_compiler = default_compiler['f2py'] 1744 else: 1745 f2py_compiler = '' 1746 # Try to find the correct one. 1747 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1748 f2py_compiler = default_compiler['f2py'] 1749 elif misc.which('f2py'): 1750 f2py_compiler = 'f2py' 1751 elif sys.version_info[1] == 6: 1752 if misc.which('f2py-2.6'): 1753 f2py_compiler = 'f2py-2.6' 1754 elif misc.which('f2py2.6'): 1755 f2py_compiler = 'f2py2.6' 1756 elif sys.version_info[1] == 7: 1757 if misc.which('f2py-2.7'): 1758 f2py_compiler = 'f2py-2.7' 1759 elif misc.which('f2py2.7'): 1760 f2py_compiler = 'f2py2.7' 1761 1762 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1763 1764 1765 self.replace_make_opt_f_compiler(to_replace) 1766 # Replace also for Template but not for cluster 1767 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1768 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1769 1770 return f77_compiler
1771 1772 # an alias for backward compatibility 1773 set_compiler = set_fortran_compiler 1774 1775
1776 - def set_cpp_compiler(self, default_compiler, force=False):
1777 """Set compiler based on what's available on the system""" 1778 1779 # Check for compiler 1780 if default_compiler and misc.which(default_compiler): 1781 compiler = default_compiler 1782 elif misc.which('g++'): 1783 #check if clang version 1784 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1785 stderr=subprocess.PIPE) 1786 out, _ = p.communicate() 1787 if 'clang' in out and misc.which('clang'): 1788 compiler = 'clang' 1789 else: 1790 compiler = 'g++' 1791 elif misc.which('c++'): 1792 compiler = 'c++' 1793 elif misc.which('clang'): 1794 compiler = 'clang' 1795 elif default_compiler: 1796 logger.warning('No c++ Compiler detected! Please install one') 1797 compiler = default_compiler # maybe misc fail so try with it 1798 else: 1799 raise MadGraph5Error, 'No c++ Compiler detected! Please install one' 1800 logger.info('Use c++ compiler ' + compiler) 1801 self.replace_make_opt_c_compiler(compiler) 1802 # Replace also for Template but not for cluster 1803 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite and \ 1804 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 1805 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1806 1807 return compiler
1808 1809
1810 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1811 """Set FC=compiler in Source/make_opts""" 1812 1813 assert isinstance(compilers, dict) 1814 1815 mod = False #avoid to rewrite the file if not needed 1816 if not root_dir: 1817 root_dir = self.dir_path 1818 1819 compiler= compilers['fortran'] 1820 f2py_compiler = compilers['f2py'] 1821 if not f2py_compiler: 1822 f2py_compiler = 'f2py' 1823 for_update= {'DEFAULT_F_COMPILER':compiler, 1824 'DEFAULT_F2PY_COMPILER':f2py_compiler} 1825 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1826 1827 try: 1828 common_run_interface.CommonRunCmd.update_make_opts_full( 1829 make_opts, for_update) 1830 except IOError: 1831 if root_dir == self.dir_path: 1832 logger.info('Fail to set compiler. Trying to continue anyway.')
1833
1834 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1835 """Set CXX=compiler in Source/make_opts. 1836 The version is also checked, in order to set some extra flags 1837 if the compiler is clang (on MACOS)""" 1838 1839 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 1840 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 1841 1842 1843 # list of the variable to set in the make_opts file 1844 for_update= {'DEFAULT_CPP_COMPILER':compiler, 1845 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 1846 'STDLIB': '-lc++' if is_lc else '-lstdc++', 1847 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 1848 } 1849 1850 # for MOJAVE remove the MACFLAG: 1851 if is_clang: 1852 import platform 1853 version, _, _ = platform.mac_ver() 1854 if not version:# not linux 1855 version = 14 # set version to remove MACFLAG 1856 else: 1857 version = int(version.split('.')[1]) 1858 if version >= 14: 1859 for_update['MACFLAG'] = '-mmacosx-version-min=10.8' if is_lc else '' 1860 1861 if not root_dir: 1862 root_dir = self.dir_path 1863 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1864 1865 try: 1866 common_run_interface.CommonRunCmd.update_make_opts_full( 1867 make_opts, for_update) 1868 except IOError: 1869 if root_dir == self.dir_path: 1870 logger.info('Fail to set compiler. Trying to continue anyway.') 1871 1872 return
1873
1874 #=============================================================================== 1875 # ProcessExporterFortranSA 1876 #=============================================================================== 1877 -class ProcessExporterFortranSA(ProcessExporterFortran):
1878 """Class to take care of exporting a set of matrix elements to 1879 MadGraph v4 StandAlone format.""" 1880 1881 matrix_template = "matrix_standalone_v4.inc" 1882
1883 - def __init__(self, *args,**opts):
1884 """add the format information compare to standard init""" 1885 1886 if 'format' in opts: 1887 self.format = opts['format'] 1888 del opts['format'] 1889 else: 1890 self.format = 'standalone' 1891 1892 self.prefix_info = {} 1893 ProcessExporterFortran.__init__(self, *args, **opts)
1894
1895 - def copy_template(self, model):
1896 """Additional actions needed for setup of Template 1897 """ 1898 1899 #First copy the full template tree if dir_path doesn't exit 1900 if os.path.isdir(self.dir_path): 1901 return 1902 1903 logger.info('initialize a new standalone directory: %s' % \ 1904 os.path.basename(self.dir_path)) 1905 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1906 1907 # Create the directory structure 1908 os.mkdir(self.dir_path) 1909 os.mkdir(pjoin(self.dir_path, 'Source')) 1910 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1911 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1912 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1913 os.mkdir(pjoin(self.dir_path, 'bin')) 1914 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1915 os.mkdir(pjoin(self.dir_path, 'lib')) 1916 os.mkdir(pjoin(self.dir_path, 'Cards')) 1917 1918 # Information at top-level 1919 #Write version info 1920 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1921 try: 1922 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1923 except IOError: 1924 MG5_version = misc.get_pkg_info() 1925 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1926 "5." + MG5_version['version']) 1927 1928 1929 # Add file in SubProcesses 1930 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1931 pjoin(self.dir_path, 'SubProcesses', 'makefileP')) 1932 1933 if self.format == 'standalone': 1934 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1935 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1936 1937 # Add file in Source 1938 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1939 pjoin(self.dir_path, 'Source')) 1940 # add the makefile 1941 filename = pjoin(self.dir_path,'Source','makefile') 1942 self.write_source_makefile(writers.FileWriter(filename))
1943 1944 #=========================================================================== 1945 # export model files 1946 #===========================================================================
1947 - def export_model_files(self, model_path):
1948 """export the model dependent files for V4 model""" 1949 1950 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1951 # Add the routine update_as_param in v4 model 1952 # This is a function created in the UFO 1953 text=""" 1954 subroutine update_as_param() 1955 call setpara('param_card.dat',.false.) 1956 return 1957 end 1958 """ 1959 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1960 ff.write(text) 1961 ff.close() 1962 1963 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1964 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1965 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1966 fsock.write(text) 1967 fsock.close() 1968 1969 self.make_model_symbolic_link()
1970 1971 #=========================================================================== 1972 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 1973 #===========================================================================
1974 - def write_procdef_mg5(self, file_pos, modelname, process_str):
1975 """ write an equivalent of the MG4 proc_card in order that all the Madevent 1976 Perl script of MadEvent4 are still working properly for pure MG5 run. 1977 Not needed for StandAlone so just return 1978 """ 1979 1980 return
1981 1982 1983 #=========================================================================== 1984 # Make the Helas and Model directories for Standalone directory 1985 #===========================================================================
1986 - def make(self):
1987 """Run make in the DHELAS and MODEL directories, to set up 1988 everything for running standalone 1989 """ 1990 1991 source_dir = pjoin(self.dir_path, "Source") 1992 logger.info("Running make for Helas") 1993 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 1994 logger.info("Running make for Model") 1995 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
1996 1997 #=========================================================================== 1998 # Create proc_card_mg5.dat for Standalone directory 1999 #===========================================================================
2000 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2001 """Finalize Standalone MG4 directory by 2002 generation proc_card_mg5.dat 2003 generate a global makefile 2004 """ 2005 2006 compiler = {'fortran': mg5options['fortran_compiler'], 2007 'cpp': mg5options['cpp_compiler'], 2008 'f2py': mg5options['f2py_compiler']} 2009 2010 self.compiler_choice(compiler) 2011 self.make() 2012 2013 # Write command history as proc_card_mg5 2014 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 2015 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2016 history.write(output_file) 2017 2018 ProcessExporterFortran.finalize(self, matrix_elements, 2019 history, mg5options, flaglist) 2020 open(pjoin(self.dir_path,'__init__.py'),'w') 2021 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 2022 2023 if 'mode' in self.opt and self.opt['mode'] == "reweight": 2024 #add the module to hande the NLO weight 2025 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 2026 pjoin(self.dir_path, 'Source')) 2027 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 2028 pjoin(self.dir_path, 'Source', 'PDF')) 2029 self.write_pdf_opendata() 2030 2031 if self.prefix_info: 2032 self.write_f2py_splitter() 2033 self.write_f2py_makefile() 2034 self.write_f2py_check_sa(matrix_elements, 2035 pjoin(self.dir_path,'SubProcesses','check_sa.py')) 2036 else: 2037 # create a single makefile to compile all the subprocesses 2038 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 2039 deppython = '' 2040 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 2041 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 2042 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 2043 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 2044 text+='all: %s\n\techo \'done\'' % deppython 2045 2046 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 2047 ff.write(text) 2048 ff.close()
2049
2050 - def write_f2py_splitter(self):
2051 """write a function to call the correct matrix element""" 2052 2053 template = """ 2054 %(python_information)s 2055 subroutine smatrixhel(pdgs, npdg, p, ALPHAS, SCALE2, nhel, ANS) 2056 IMPLICIT NONE 2057 2058 CF2PY double precision, intent(in), dimension(0:3,npdg) :: p 2059 CF2PY integer, intent(in), dimension(npdg) :: pdgs 2060 CF2PY integer, intent(in) :: npdg 2061 CF2PY double precision, intent(out) :: ANS 2062 CF2PY double precision, intent(in) :: ALPHAS 2063 CF2PY double precision, intent(in) :: SCALE2 2064 integer pdgs(*) 2065 integer npdg, nhel 2066 double precision p(*) 2067 double precision ANS, ALPHAS, PI,SCALE2 2068 include 'coupl.inc' 2069 2070 PI = 3.141592653589793D0 2071 G = 2* DSQRT(ALPHAS*PI) 2072 CALL UPDATE_AS_PARAM() 2073 if (scale2.ne.0d0) stop 1 2074 2075 %(smatrixhel)s 2076 2077 return 2078 end 2079 2080 SUBROUTINE INITIALISE(PATH) 2081 C ROUTINE FOR F2PY to read the benchmark point. 2082 IMPLICIT NONE 2083 CHARACTER*512 PATH 2084 CF2PY INTENT(IN) :: PATH 2085 CALL SETPARA(PATH) !first call to setup the paramaters 2086 RETURN 2087 END 2088 2089 subroutine get_pdg_order(PDG) 2090 IMPLICIT NONE 2091 CF2PY INTEGER, intent(out) :: PDG(%(nb_me)i,%(maxpart)i) 2092 INTEGER PDG(%(nb_me)i,%(maxpart)i), PDGS(%(nb_me)i,%(maxpart)i) 2093 DATA PDGS/ %(pdgs)s / 2094 PDG = PDGS 2095 RETURN 2096 END 2097 2098 subroutine get_prefix(PREFIX) 2099 IMPLICIT NONE 2100 CF2PY CHARACTER*20, intent(out) :: PREFIX(%(nb_me)i) 2101 character*20 PREFIX(%(nb_me)i),PREF(%(nb_me)i) 2102 DATA PREF / '%(prefix)s'/ 2103 PREFIX = PREF 2104 RETURN 2105 END 2106 2107 2108 """ 2109 2110 allids = self.prefix_info.keys() 2111 allprefix = [self.prefix_info[key][0] for key in allids] 2112 min_nexternal = min([len(ids) for ids in allids]) 2113 max_nexternal = max([len(ids) for ids in allids]) 2114 2115 info = [] 2116 for key, (prefix, tag) in self.prefix_info.items(): 2117 info.append('#PY %s : %s # %s' % (tag, key, prefix)) 2118 2119 2120 text = [] 2121 for n_ext in range(min_nexternal, max_nexternal+1): 2122 current = [ids for ids in allids if len(ids)==n_ext] 2123 if not current: 2124 continue 2125 if min_nexternal != max_nexternal: 2126 if n_ext == min_nexternal: 2127 text.append(' if (npdg.eq.%i)then' % n_ext) 2128 else: 2129 text.append(' else if (npdg.eq.%i)then' % n_ext) 2130 for ii,pdgs in enumerate(current): 2131 condition = '.and.'.join(['%i.eq.pdgs(%i)' %(pdg, i+1) for i, pdg in enumerate(pdgs)]) 2132 if ii==0: 2133 text.append( ' if(%s) then ! %i' % (condition, i)) 2134 else: 2135 text.append( ' else if(%s) then ! %i' % (condition,i)) 2136 text.append(' call %ssmatrixhel(p, nhel, ans)' % self.prefix_info[pdgs][0]) 2137 text.append(' endif') 2138 #close the function 2139 if min_nexternal != max_nexternal: 2140 text.append('endif') 2141 2142 formatting = {'python_information':'\n'.join(info), 2143 'smatrixhel': '\n'.join(text), 2144 'maxpart': max_nexternal, 2145 'nb_me': len(allids), 2146 'pdgs': ','.join(str(pdg[i]) if i<len(pdg) else '0' 2147 for i in range(max_nexternal) for pdg in allids), 2148 'prefix':'\',\''.join(allprefix) 2149 } 2150 formatting['lenprefix'] = len(formatting['prefix']) 2151 text = template % formatting 2152 fsock = writers.FortranWriter(pjoin(self.dir_path, 'SubProcesses', 'all_matrix.f'),'w') 2153 fsock.writelines(text) 2154 fsock.close()
2155
2156 - def write_f2py_check_sa(self, matrix_element, writer):
2157 """ Write the general check_sa.py in SubProcesses that calls all processes successively.""" 2158 # To be implemented. It is just an example file, i.e. not crucial. 2159 return
2160
2161 - def write_f2py_makefile(self):
2162 """ """ 2163 # Add file in SubProcesses 2164 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f2py'), 2165 pjoin(self.dir_path, 'SubProcesses', 'makefile'))
2166
2167 - def create_MA5_cards(self,*args,**opts):
2168 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2169 pass
2170
2171 - def compiler_choice(self, compiler):
2172 """ Different daughter classes might want different compilers. 2173 So this function is meant to be overloaded if desired.""" 2174 2175 self.set_compiler(compiler)
2176 2177 #=========================================================================== 2178 # generate_subprocess_directory 2179 #===========================================================================
2180 - def generate_subprocess_directory(self, matrix_element, 2181 fortran_model, number):
2182 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2183 including the necessary matrix.f and nexternal.inc files""" 2184 2185 cwd = os.getcwd() 2186 # Create the directory PN_xx_xxxxx in the specified path 2187 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2188 "P%s" % matrix_element.get('processes')[0].shell_string()) 2189 2190 if self.opt['sa_symmetry']: 2191 # avoid symmetric output 2192 for i,proc in enumerate(matrix_element.get('processes')): 2193 2194 tag = proc.get_tag() 2195 legs = proc.get('legs')[:] 2196 leg0 = proc.get('legs')[0] 2197 leg1 = proc.get('legs')[1] 2198 if not leg1.get('state'): 2199 proc.get('legs')[0] = leg1 2200 proc.get('legs')[1] = leg0 2201 flegs = proc.get('legs')[2:] 2202 for perm in itertools.permutations(flegs): 2203 for i,p in enumerate(perm): 2204 proc.get('legs')[i+2] = p 2205 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2206 "P%s" % proc.shell_string()) 2207 #restore original order 2208 proc.get('legs')[2:] = legs[2:] 2209 if os.path.exists(dirpath2): 2210 proc.get('legs')[:] = legs 2211 return 0 2212 proc.get('legs')[:] = legs 2213 2214 try: 2215 os.mkdir(dirpath) 2216 except os.error as error: 2217 logger.warning(error.strerror + " " + dirpath) 2218 2219 #try: 2220 # os.chdir(dirpath) 2221 #except os.error: 2222 # logger.error('Could not cd to directory %s' % dirpath) 2223 # return 0 2224 2225 logger.info('Creating files in directory %s' % dirpath) 2226 2227 # Extract number of external particles 2228 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2229 2230 # Create the matrix.f file and the nexternal.inc file 2231 if self.opt['export_format']=='standalone_msP': 2232 filename = pjoin(dirpath, 'matrix_prod.f') 2233 else: 2234 filename = pjoin(dirpath, 'matrix.f') 2235 2236 proc_prefix = '' 2237 if 'prefix' in self.cmd_options: 2238 if self.cmd_options['prefix'] == 'int': 2239 proc_prefix = 'M%s_' % number 2240 elif self.cmd_options['prefix'] == 'proc': 2241 proc_prefix = matrix_element.get('processes')[0].shell_string().split('_',1)[1] 2242 else: 2243 raise Exception, '--prefix options supports only \'int\' and \'proc\'' 2244 for proc in matrix_element.get('processes'): 2245 ids = [l.get('id') for l in proc.get('legs_with_decays')] 2246 self.prefix_info[tuple(ids)] = [proc_prefix, proc.get_tag()] 2247 2248 calls = self.write_matrix_element_v4( 2249 writers.FortranWriter(filename), 2250 matrix_element, 2251 fortran_model, 2252 proc_prefix=proc_prefix) 2253 2254 if self.opt['export_format'] == 'standalone_msP': 2255 filename = pjoin(dirpath,'configs_production.inc') 2256 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2257 writers.FortranWriter(filename), 2258 matrix_element) 2259 2260 filename = pjoin(dirpath,'props_production.inc') 2261 self.write_props_file(writers.FortranWriter(filename), 2262 matrix_element, 2263 s_and_t_channels) 2264 2265 filename = pjoin(dirpath,'nexternal_prod.inc') 2266 self.write_nexternal_madspin(writers.FortranWriter(filename), 2267 nexternal, ninitial) 2268 2269 if self.opt['export_format']=='standalone_msF': 2270 filename = pjoin(dirpath, 'helamp.inc') 2271 ncomb=matrix_element.get_helicity_combinations() 2272 self.write_helamp_madspin(writers.FortranWriter(filename), 2273 ncomb) 2274 2275 filename = pjoin(dirpath, 'nexternal.inc') 2276 self.write_nexternal_file(writers.FortranWriter(filename), 2277 nexternal, ninitial) 2278 2279 filename = pjoin(dirpath, 'pmass.inc') 2280 self.write_pmass_file(writers.FortranWriter(filename), 2281 matrix_element) 2282 2283 filename = pjoin(dirpath, 'ngraphs.inc') 2284 self.write_ngraphs_file(writers.FortranWriter(filename), 2285 len(matrix_element.get_all_amplitudes())) 2286 2287 # Generate diagrams 2288 filename = pjoin(dirpath, "matrix.ps") 2289 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2290 get('diagrams'), 2291 filename, 2292 model=matrix_element.get('processes')[0].\ 2293 get('model'), 2294 amplitude=True) 2295 logger.info("Generating Feynman diagrams for " + \ 2296 matrix_element.get('processes')[0].nice_string()) 2297 plot.draw() 2298 2299 linkfiles = ['check_sa.f', 'coupl.inc'] 2300 2301 if proc_prefix and os.path.exists(pjoin(dirpath, '..', 'check_sa.f')): 2302 text = open(pjoin(dirpath, '..', 'check_sa.f')).read() 2303 pat = re.compile('smatrix', re.I) 2304 new_text, n = re.subn(pat, '%ssmatrix' % proc_prefix, text) 2305 with open(pjoin(dirpath, 'check_sa.f'),'w') as f: 2306 f.write(new_text) 2307 linkfiles.pop(0) 2308 2309 for file in linkfiles: 2310 ln('../%s' % file, cwd=dirpath) 2311 ln('../makefileP', name='makefile', cwd=dirpath) 2312 # Return to original PWD 2313 #os.chdir(cwd) 2314 2315 if not calls: 2316 calls = 0 2317 return calls
2318 2319 2320 #=========================================================================== 2321 # write_source_makefile 2322 #===========================================================================
2323 - def write_source_makefile(self, writer):
2324 """Write the nexternal.inc file for MG4""" 2325 2326 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2327 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2328 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2329 2330 replace_dict= {'libraries': set_of_lib, 2331 'model':model_line, 2332 'additional_dsample': '', 2333 'additional_dependencies':''} 2334 2335 text = open(path).read() % replace_dict 2336 2337 if writer: 2338 writer.write(text) 2339 2340 return replace_dict
2341 2342 #=========================================================================== 2343 # write_matrix_element_v4 2344 #===========================================================================
2345 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2346 write=True, proc_prefix=''):
2347 """Export a matrix element to a matrix.f file in MG4 standalone format 2348 if write is on False, just return the replace_dict and not write anything.""" 2349 2350 2351 if not matrix_element.get('processes') or \ 2352 not matrix_element.get('diagrams'): 2353 return 0 2354 2355 if writer: 2356 if not isinstance(writer, writers.FortranWriter): 2357 raise writers.FortranWriter.FortranWriterError(\ 2358 "writer not FortranWriter but %s" % type(writer)) 2359 # Set lowercase/uppercase Fortran code 2360 writers.FortranWriter.downcase = False 2361 2362 2363 if not self.opt.has_key('sa_symmetry'): 2364 self.opt['sa_symmetry']=False 2365 2366 2367 # The proc_id is for MadEvent grouping which is never used in SA. 2368 replace_dict = {'global_variable':'', 'amp2_lines':'', 2369 'proc_prefix':proc_prefix, 'proc_id':''} 2370 2371 # Extract helas calls 2372 helas_calls = fortran_model.get_matrix_element_calls(\ 2373 matrix_element) 2374 2375 replace_dict['helas_calls'] = "\n".join(helas_calls) 2376 2377 # Extract version number and date from VERSION file 2378 info_lines = self.get_mg5_info_lines() 2379 replace_dict['info_lines'] = info_lines 2380 2381 # Extract process info lines 2382 process_lines = self.get_process_info_lines(matrix_element) 2383 replace_dict['process_lines'] = process_lines 2384 2385 # Extract number of external particles 2386 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2387 replace_dict['nexternal'] = nexternal 2388 replace_dict['nincoming'] = ninitial 2389 2390 # Extract ncomb 2391 ncomb = matrix_element.get_helicity_combinations() 2392 replace_dict['ncomb'] = ncomb 2393 2394 # Extract helicity lines 2395 helicity_lines = self.get_helicity_lines(matrix_element) 2396 replace_dict['helicity_lines'] = helicity_lines 2397 2398 # Extract overall denominator 2399 # Averaging initial state color, spin, and identical FS particles 2400 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2401 2402 # Extract ngraphs 2403 ngraphs = matrix_element.get_number_of_amplitudes() 2404 replace_dict['ngraphs'] = ngraphs 2405 2406 # Extract nwavefuncs 2407 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2408 replace_dict['nwavefuncs'] = nwavefuncs 2409 2410 # Extract ncolor 2411 ncolor = max(1, len(matrix_element.get('color_basis'))) 2412 replace_dict['ncolor'] = ncolor 2413 2414 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2415 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2416 matrix_element.get_beams_hel_avg_factor() 2417 2418 # Extract color data lines 2419 color_data_lines = self.get_color_data_lines(matrix_element) 2420 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2421 2422 if self.opt['export_format']=='standalone_msP': 2423 # For MadSpin need to return the AMP2 2424 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2425 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2426 replace_dict['global_variable'] = \ 2427 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2428 2429 # JAMP definition, depends on the number of independent split orders 2430 split_orders=matrix_element.get('processes')[0].get('split_orders') 2431 2432 if len(split_orders)==0: 2433 replace_dict['nSplitOrders']='' 2434 # Extract JAMP lines 2435 jamp_lines = self.get_JAMP_lines(matrix_element) 2436 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2437 # set all amplitude order to weight 1 and only one squared order 2438 # contribution which is of course ALL_ORDERS=2. 2439 squared_orders = [(2,),] 2440 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2441 replace_dict['chosen_so_configs'] = '.TRUE.' 2442 replace_dict['nSqAmpSplitOrders']=1 2443 replace_dict['split_order_str_list']='' 2444 else: 2445 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2446 replace_dict['nAmpSplitOrders']=len(amp_orders) 2447 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2448 replace_dict['nSplitOrders']=len(split_orders) 2449 replace_dict['split_order_str_list']=str(split_orders) 2450 amp_so = self.get_split_orders_lines( 2451 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2452 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2453 replace_dict['ampsplitorders']='\n'.join(amp_so) 2454 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2455 jamp_lines = self.get_JAMP_lines_split_order(\ 2456 matrix_element,amp_orders,split_order_names=split_orders) 2457 2458 # Now setup the array specifying what squared split order is chosen 2459 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2460 matrix_element.get('processes')[0],squared_orders) 2461 2462 # For convenience we also write the driver check_sa_splitOrders.f 2463 # that explicitely writes out the contribution from each squared order. 2464 # The original driver still works and is compiled with 'make' while 2465 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2466 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2467 self.write_check_sa_splitOrders(squared_orders,split_orders, 2468 nexternal,ninitial,proc_prefix,check_sa_writer) 2469 2470 if write: 2471 writers.FortranWriter('nsqso_born.inc').writelines( 2472 """INTEGER NSQSO_BORN 2473 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2474 2475 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2476 2477 matrix_template = self.matrix_template 2478 if self.opt['export_format']=='standalone_msP' : 2479 matrix_template = 'matrix_standalone_msP_v4.inc' 2480 elif self.opt['export_format']=='standalone_msF': 2481 matrix_template = 'matrix_standalone_msF_v4.inc' 2482 elif self.opt['export_format']=='matchbox': 2483 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2484 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2485 2486 if len(split_orders)>0: 2487 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2488 logger.debug("Warning: The export format %s is not "+\ 2489 " available for individual ME evaluation of given coupl. orders."+\ 2490 " Only the total ME will be computed.", self.opt['export_format']) 2491 elif self.opt['export_format'] in ['madloop_matchbox']: 2492 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2493 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2494 else: 2495 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2496 2497 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2498 replace_dict['template_file2'] = pjoin(_file_path, \ 2499 'iolibs/template_files/split_orders_helping_functions.inc') 2500 if write and writer: 2501 path = replace_dict['template_file'] 2502 content = open(path).read() 2503 content = content % replace_dict 2504 # Write the file 2505 writer.writelines(content) 2506 # Add the helper functions. 2507 if len(split_orders)>0: 2508 content = '\n' + open(replace_dict['template_file2'])\ 2509 .read()%replace_dict 2510 writer.writelines(content) 2511 return len(filter(lambda call: call.find('#') != 0, helas_calls)) 2512 else: 2513 replace_dict['return_value'] = len(filter(lambda call: call.find('#') != 0, helas_calls)) 2514 return replace_dict # for subclass update
2515
2516 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2517 nincoming, proc_prefix, writer):
2518 """ Write out a more advanced version of the check_sa drivers that 2519 individually returns the matrix element for each contributing squared 2520 order.""" 2521 2522 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2523 'template_files', 'check_sa_splitOrders.f')).read() 2524 printout_sq_orders=[] 2525 for i, squared_order in enumerate(squared_orders): 2526 sq_orders=[] 2527 for j, sqo in enumerate(squared_order): 2528 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2529 printout_sq_orders.append(\ 2530 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2531 %(i+1,' '.join(sq_orders),i+1)) 2532 printout_sq_orders='\n'.join(printout_sq_orders) 2533 replace_dict = {'printout_sqorders':printout_sq_orders, 2534 'nSplitOrders':len(squared_orders), 2535 'nexternal':nexternal, 2536 'nincoming':nincoming, 2537 'proc_prefix':proc_prefix} 2538 2539 if writer: 2540 writer.writelines(check_sa_content % replace_dict) 2541 else: 2542 return replace_dict
2543
2544 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2545 """class to take care of exporting a set of matrix element for the Matchbox 2546 code in the case of Born only routine""" 2547 2548 default_opt = {'clean': False, 'complex_mass':False, 2549 'export_format':'matchbox', 'mp': False, 2550 'sa_symmetry': True} 2551 2552 #specific template of the born 2553 2554 2555 matrix_template = "matrix_standalone_matchbox.inc" 2556 2557 @staticmethod
2558 - def get_color_string_lines(matrix_element):
2559 """Return the color matrix definition lines for this matrix element. Split 2560 rows in chunks of size n.""" 2561 2562 if not matrix_element.get('color_matrix'): 2563 return "\n".join(["out = 1"]) 2564 2565 #start the real work 2566 color_denominators = matrix_element.get('color_matrix').\ 2567 get_line_denominators() 2568 matrix_strings = [] 2569 my_cs = color.ColorString() 2570 for i_color in xrange(len(color_denominators)): 2571 # Then write the numerators for the matrix elements 2572 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2573 t_str=repr(my_cs) 2574 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2575 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2576 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2577 all_matches = t_match.findall(t_str) 2578 output = {} 2579 arg=[] 2580 for match in all_matches: 2581 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2582 if ctype in ['ColorOne' ]: 2583 continue 2584 if ctype not in ['T', 'Tr' ]: 2585 raise MadGraph5Error, 'Color Structure not handled by Matchbox: %s' % ctype 2586 tmparg += ['0'] 2587 arg +=tmparg 2588 for j, v in enumerate(arg): 2589 output[(i_color,j)] = v 2590 2591 for key in output: 2592 if matrix_strings == []: 2593 #first entry 2594 matrix_strings.append(""" 2595 if (in1.eq.%s.and.in2.eq.%s)then 2596 out = %s 2597 """ % (key[0], key[1], output[key])) 2598 else: 2599 #not first entry 2600 matrix_strings.append(""" 2601 elseif (in1.eq.%s.and.in2.eq.%s)then 2602 out = %s 2603 """ % (key[0], key[1], output[key])) 2604 if len(matrix_strings): 2605 matrix_strings.append(" else \n out = - 1 \n endif") 2606 else: 2607 return "\n out = - 1 \n " 2608 return "\n".join(matrix_strings)
2609
2610 - def make(self,*args,**opts):
2611 pass
2612
2613 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2614 JAMP_formatLC=None):
2615 2616 """Adding leading color part of the colorflow""" 2617 2618 if not JAMP_formatLC: 2619 JAMP_formatLC= "LN%s" % JAMP_format 2620 2621 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2622 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2623 col_amps=col_amps.get_color_amplitudes() 2624 elif(isinstance(col_amps,list)): 2625 if(col_amps and isinstance(col_amps[0],list)): 2626 col_amps=col_amps 2627 else: 2628 raise MadGraph5Error, error_msg % 'col_amps' 2629 else: 2630 raise MadGraph5Error, error_msg % 'col_amps' 2631 2632 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2633 JAMP_format=JAMP_format, 2634 AMP_format=AMP_format, 2635 split=-1) 2636 2637 2638 # Filter the col_ampls to generate only those without any 1/NC terms 2639 2640 LC_col_amps = [] 2641 for coeff_list in col_amps: 2642 to_add = [] 2643 for (coefficient, amp_number) in coeff_list: 2644 if coefficient[3]==0: 2645 to_add.append( (coefficient, amp_number) ) 2646 LC_col_amps.append(to_add) 2647 2648 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2649 JAMP_format=JAMP_formatLC, 2650 AMP_format=AMP_format, 2651 split=-1) 2652 2653 return text
2654
2655 2656 2657 2658 #=============================================================================== 2659 # ProcessExporterFortranMW 2660 #=============================================================================== 2661 -class ProcessExporterFortranMW(ProcessExporterFortran):
2662 """Class to take care of exporting a set of matrix elements to 2663 MadGraph v4 - MadWeight format.""" 2664 2665 matrix_file="matrix_standalone_v4.inc" 2666
2667 - def copy_template(self, model):
2668 """Additional actions needed for setup of Template 2669 """ 2670 2671 super(ProcessExporterFortranMW, self).copy_template(model) 2672 2673 # Add the MW specific file 2674 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2675 pjoin(self.dir_path, 'Source','MadWeight'), True) 2676 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2677 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2678 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2679 pjoin(self.dir_path, 'Source','setrun.f')) 2680 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2681 pjoin(self.dir_path, 'Source','run.inc')) 2682 # File created from Template (Different in some child class) 2683 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2684 self.write_run_config_file(writers.FortranWriter(filename)) 2685 2686 try: 2687 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2688 stdout = os.open(os.devnull, os.O_RDWR), 2689 stderr = os.open(os.devnull, os.O_RDWR), 2690 cwd=self.dir_path) 2691 except OSError: 2692 # Probably madweight already called 2693 pass 2694 2695 # Copy the different python file in the Template 2696 self.copy_python_file() 2697 # create the appropriate cuts.f 2698 self.get_mw_cuts_version() 2699 2700 # add the makefile in Source directory 2701 filename = os.path.join(self.dir_path,'Source','makefile') 2702 self.write_source_makefile(writers.FortranWriter(filename))
2703 2704 2705 2706 2707 #=========================================================================== 2708 # convert_model 2709 #===========================================================================
2710 - def convert_model(self, model, wanted_lorentz = [], 2711 wanted_couplings = []):
2712 2713 super(ProcessExporterFortranMW,self).convert_model(model, 2714 wanted_lorentz, wanted_couplings) 2715 2716 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2717 try: 2718 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2719 except OSError as error: 2720 pass 2721 model_path = model.get('modelpath') 2722 # This is not safe if there is a '##' or '-' in the path. 2723 shutil.copytree(model_path, 2724 pjoin(self.dir_path,'bin','internal','ufomodel'), 2725 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2726 if hasattr(model, 'restrict_card'): 2727 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2728 'restrict_default.dat') 2729 if isinstance(model.restrict_card, check_param_card.ParamCard): 2730 model.restrict_card.write(out_path) 2731 else: 2732 files.cp(model.restrict_card, out_path)
2733 2734 #=========================================================================== 2735 # generate_subprocess_directory 2736 #===========================================================================
2737 - def copy_python_file(self):
2738 """copy the python file require for the Template""" 2739 2740 # madevent interface 2741 cp(_file_path+'/interface/madweight_interface.py', 2742 self.dir_path+'/bin/internal/madweight_interface.py') 2743 cp(_file_path+'/interface/extended_cmd.py', 2744 self.dir_path+'/bin/internal/extended_cmd.py') 2745 cp(_file_path+'/interface/common_run_interface.py', 2746 self.dir_path+'/bin/internal/common_run_interface.py') 2747 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2748 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2749 cp(_file_path+'/iolibs/save_load_object.py', 2750 self.dir_path+'/bin/internal/save_load_object.py') 2751 cp(_file_path+'/madevent/gen_crossxhtml.py', 2752 self.dir_path+'/bin/internal/gen_crossxhtml.py') 2753 cp(_file_path+'/madevent/sum_html.py', 2754 self.dir_path+'/bin/internal/sum_html.py') 2755 cp(_file_path+'/various/FO_analyse_card.py', 2756 self.dir_path+'/bin/internal/FO_analyse_card.py') 2757 cp(_file_path+'/iolibs/file_writers.py', 2758 self.dir_path+'/bin/internal/file_writers.py') 2759 #model file 2760 cp(_file_path+'../models/check_param_card.py', 2761 self.dir_path+'/bin/internal/check_param_card.py') 2762 2763 #madevent file 2764 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2765 cp(_file_path+'/various/lhe_parser.py', 2766 self.dir_path+'/bin/internal/lhe_parser.py') 2767 2768 cp(_file_path+'/various/banner.py', 2769 self.dir_path+'/bin/internal/banner.py') 2770 cp(_file_path+'/various/shower_card.py', 2771 self.dir_path+'/bin/internal/shower_card.py') 2772 cp(_file_path+'/various/cluster.py', 2773 self.dir_path+'/bin/internal/cluster.py') 2774 2775 # logging configuration 2776 cp(_file_path+'/interface/.mg5_logging.conf', 2777 self.dir_path+'/bin/internal/me5_logging.conf') 2778 cp(_file_path+'/interface/coloring_logging.py', 2779 self.dir_path+'/bin/internal/coloring_logging.py')
2780 2781 2782 #=========================================================================== 2783 # Change the version of cuts.f to the one compatible with MW 2784 #===========================================================================
2785 - def get_mw_cuts_version(self, outpath=None):
2786 """create the appropriate cuts.f 2787 This is based on the one associated to ME output but: 2788 1) No clustering (=> remove initcluster/setclscales) 2789 2) Adding the definition of cut_bw at the file. 2790 """ 2791 2792 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2793 2794 text = StringIO() 2795 #1) remove all dependencies in ickkw >1: 2796 nb_if = 0 2797 for line in template: 2798 if 'if(xqcut.gt.0d0' in line: 2799 nb_if = 1 2800 if nb_if == 0: 2801 text.write(line) 2802 continue 2803 if re.search(r'if\(.*\)\s*then', line): 2804 nb_if += 1 2805 elif 'endif' in line: 2806 nb_if -= 1 2807 2808 #2) add fake cut_bw (have to put the true one later) 2809 text.write(""" 2810 logical function cut_bw(p) 2811 include 'madweight_param.inc' 2812 double precision p(*) 2813 if (bw_cut) then 2814 cut_bw = .true. 2815 else 2816 stop 1 2817 endif 2818 return 2819 end 2820 """) 2821 2822 final = text.getvalue() 2823 #3) remove the call to initcluster: 2824 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2825 template = template.replace('genps.inc', 'maxparticles.inc') 2826 #Now we can write it 2827 if not outpath: 2828 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2829 elif isinstance(outpath, str): 2830 fsock = open(outpath, 'w') 2831 else: 2832 fsock = outpath 2833 fsock.write(template)
2834 2835 2836 2837 #=========================================================================== 2838 # Make the Helas and Model directories for Standalone directory 2839 #===========================================================================
2840 - def make(self):
2841 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2842 everything for running madweight 2843 """ 2844 2845 source_dir = os.path.join(self.dir_path, "Source") 2846 logger.info("Running make for Helas") 2847 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2848 logger.info("Running make for Model") 2849 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2850 logger.info("Running make for PDF") 2851 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2852 logger.info("Running make for CERNLIB") 2853 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2854 logger.info("Running make for GENERIC") 2855 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2856 logger.info("Running make for blocks") 2857 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2858 logger.info("Running make for tools") 2859 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2860 2861 #=========================================================================== 2862 # Create proc_card_mg5.dat for MadWeight directory 2863 #===========================================================================
2864 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2865 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2866 2867 compiler = {'fortran': mg5options['fortran_compiler'], 2868 'cpp': mg5options['cpp_compiler'], 2869 'f2py': mg5options['f2py_compiler']} 2870 2871 2872 2873 #proc_charac 2874 self.create_proc_charac() 2875 2876 # Write maxparticles.inc based on max of ME's/subprocess groups 2877 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2878 self.write_maxparticles_file(writers.FortranWriter(filename), 2879 matrix_elements) 2880 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2881 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2882 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2883 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2884 2885 self.set_compiler(compiler) 2886 self.make() 2887 2888 # Write command history as proc_card_mg5 2889 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2890 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2891 history.write(output_file) 2892 2893 ProcessExporterFortran.finalize(self, matrix_elements, 2894 history, mg5options, flaglist)
2895 2896 2897 2898 #=========================================================================== 2899 # create the run_card for MW 2900 #===========================================================================
2901 - def create_run_card(self, matrix_elements, history):
2902 """ """ 2903 2904 run_card = banner_mod.RunCard() 2905 2906 # pass to default for MW 2907 run_card["run_tag"] = "\'not_use\'" 2908 run_card["fixed_ren_scale"] = "T" 2909 run_card["fixed_fac_scale"] = "T" 2910 run_card.remove_all_cut() 2911 2912 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 2913 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2914 python_template=True) 2915 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 2916 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2917 python_template=True)
2918 2919 #=========================================================================== 2920 # export model files 2921 #===========================================================================
2922 - def export_model_files(self, model_path):
2923 """export the model dependent files for V4 model""" 2924 2925 super(ProcessExporterFortranMW,self).export_model_files(model_path) 2926 # Add the routine update_as_param in v4 model 2927 # This is a function created in the UFO 2928 text=""" 2929 subroutine update_as_param() 2930 call setpara('param_card.dat',.false.) 2931 return 2932 end 2933 """ 2934 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2935 ff.write(text) 2936 ff.close() 2937 2938 # Modify setrun.f 2939 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 2940 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 2941 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 2942 fsock.write(text) 2943 fsock.close() 2944 2945 # Modify initialization.f 2946 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 2947 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 2948 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 2949 fsock.write(text) 2950 fsock.close() 2951 2952 2953 self.make_model_symbolic_link()
2954 2955 #=========================================================================== 2956 # generate_subprocess_directory 2957 #===========================================================================
2958 - def generate_subprocess_directory(self, matrix_element, 2959 fortran_model,number):
2960 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 2961 including the necessary matrix.f and nexternal.inc files""" 2962 2963 cwd = os.getcwd() 2964 # Create the directory PN_xx_xxxxx in the specified path 2965 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 2966 "P%s" % matrix_element.get('processes')[0].shell_string()) 2967 2968 try: 2969 os.mkdir(dirpath) 2970 except os.error as error: 2971 logger.warning(error.strerror + " " + dirpath) 2972 2973 #try: 2974 # os.chdir(dirpath) 2975 #except os.error: 2976 # logger.error('Could not cd to directory %s' % dirpath) 2977 # return 0 2978 2979 logger.info('Creating files in directory %s' % dirpath) 2980 2981 # Extract number of external particles 2982 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2983 2984 # Create the matrix.f file and the nexternal.inc file 2985 filename = pjoin(dirpath,'matrix.f') 2986 calls,ncolor = self.write_matrix_element_v4( 2987 writers.FortranWriter(filename), 2988 matrix_element, 2989 fortran_model) 2990 2991 filename = pjoin(dirpath, 'auto_dsig.f') 2992 self.write_auto_dsig_file(writers.FortranWriter(filename), 2993 matrix_element) 2994 2995 filename = pjoin(dirpath, 'configs.inc') 2996 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2997 writers.FortranWriter(filename), 2998 matrix_element) 2999 3000 filename = pjoin(dirpath, 'nexternal.inc') 3001 self.write_nexternal_file(writers.FortranWriter(filename), 3002 nexternal, ninitial) 3003 3004 filename = pjoin(dirpath, 'leshouche.inc') 3005 self.write_leshouche_file(writers.FortranWriter(filename), 3006 matrix_element) 3007 3008 filename = pjoin(dirpath, 'props.inc') 3009 self.write_props_file(writers.FortranWriter(filename), 3010 matrix_element, 3011 s_and_t_channels) 3012 3013 filename = pjoin(dirpath, 'pmass.inc') 3014 self.write_pmass_file(writers.FortranWriter(filename), 3015 matrix_element) 3016 3017 filename = pjoin(dirpath, 'ngraphs.inc') 3018 self.write_ngraphs_file(writers.FortranWriter(filename), 3019 len(matrix_element.get_all_amplitudes())) 3020 3021 filename = pjoin(dirpath, 'maxamps.inc') 3022 self.write_maxamps_file(writers.FortranWriter(filename), 3023 len(matrix_element.get('diagrams')), 3024 ncolor, 3025 len(matrix_element.get('processes')), 3026 1) 3027 3028 filename = pjoin(dirpath, 'phasespace.inc') 3029 self.write_phasespace_file(writers.FortranWriter(filename), 3030 len(matrix_element.get('diagrams')), 3031 ) 3032 3033 # Generate diagrams 3034 filename = pjoin(dirpath, "matrix.ps") 3035 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3036 get('diagrams'), 3037 filename, 3038 model=matrix_element.get('processes')[0].\ 3039 get('model'), 3040 amplitude='') 3041 logger.info("Generating Feynman diagrams for " + \ 3042 matrix_element.get('processes')[0].nice_string()) 3043 plot.draw() 3044 3045 #import genps.inc and maxconfigs.inc into Subprocesses 3046 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 3047 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 3048 3049 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 3050 3051 for file in linkfiles: 3052 ln('../%s' % file, starting_dir=cwd) 3053 3054 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 3055 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 3056 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 3057 ln('phasespace.inc', '../', log=True, cwd=dirpath) 3058 # Return to original PWD 3059 #os.chdir(cwd) 3060 3061 if not calls: 3062 calls = 0 3063 return calls
3064 3065 #=========================================================================== 3066 # write_matrix_element_v4 3067 #===========================================================================
3068 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
3069 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 3070 3071 if not matrix_element.get('processes') or \ 3072 not matrix_element.get('diagrams'): 3073 return 0 3074 3075 if writer: 3076 if not isinstance(writer, writers.FortranWriter): 3077 raise writers.FortranWriter.FortranWriterError(\ 3078 "writer not FortranWriter") 3079 3080 # Set lowercase/uppercase Fortran code 3081 writers.FortranWriter.downcase = False 3082 3083 replace_dict = {} 3084 3085 # Extract version number and date from VERSION file 3086 info_lines = self.get_mg5_info_lines() 3087 replace_dict['info_lines'] = info_lines 3088 3089 # Extract process info lines 3090 process_lines = self.get_process_info_lines(matrix_element) 3091 replace_dict['process_lines'] = process_lines 3092 3093 # Set proc_id 3094 replace_dict['proc_id'] = proc_id 3095 3096 # Extract number of external particles 3097 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3098 replace_dict['nexternal'] = nexternal 3099 3100 # Extract ncomb 3101 ncomb = matrix_element.get_helicity_combinations() 3102 replace_dict['ncomb'] = ncomb 3103 3104 # Extract helicity lines 3105 helicity_lines = self.get_helicity_lines(matrix_element) 3106 replace_dict['helicity_lines'] = helicity_lines 3107 3108 # Extract overall denominator 3109 # Averaging initial state color, spin, and identical FS particles 3110 den_factor_line = self.get_den_factor_line(matrix_element) 3111 replace_dict['den_factor_line'] = den_factor_line 3112 3113 # Extract ngraphs 3114 ngraphs = matrix_element.get_number_of_amplitudes() 3115 replace_dict['ngraphs'] = ngraphs 3116 3117 # Extract nwavefuncs 3118 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3119 replace_dict['nwavefuncs'] = nwavefuncs 3120 3121 # Extract ncolor 3122 ncolor = max(1, len(matrix_element.get('color_basis'))) 3123 replace_dict['ncolor'] = ncolor 3124 3125 # Extract color data lines 3126 color_data_lines = self.get_color_data_lines(matrix_element) 3127 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3128 3129 # Extract helas calls 3130 helas_calls = fortran_model.get_matrix_element_calls(\ 3131 matrix_element) 3132 3133 replace_dict['helas_calls'] = "\n".join(helas_calls) 3134 3135 # Extract JAMP lines 3136 jamp_lines = self.get_JAMP_lines(matrix_element) 3137 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3138 3139 replace_dict['template_file'] = os.path.join(_file_path, \ 3140 'iolibs/template_files/%s' % self.matrix_file) 3141 replace_dict['template_file2'] = '' 3142 3143 if writer: 3144 file = open(replace_dict['template_file']).read() 3145 file = file % replace_dict 3146 # Write the file 3147 writer.writelines(file) 3148 return len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor 3149 else: 3150 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor)
3151 3152 #=========================================================================== 3153 # write_source_makefile 3154 #===========================================================================
3155 - def write_source_makefile(self, writer):
3156 """Write the nexternal.inc file for madweight""" 3157 3158 3159 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 3160 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 3161 text = open(path).read() % {'libraries': set_of_lib} 3162 writer.write(text) 3163 3164 return True
3165
3166 - def write_phasespace_file(self, writer, nb_diag):
3167 """ """ 3168 3169 template = """ include 'maxparticles.inc' 3170 integer max_branches 3171 parameter (max_branches=max_particles-1) 3172 integer max_configs 3173 parameter (max_configs=%(nb_diag)s) 3174 3175 c channel position 3176 integer config_pos,perm_pos 3177 common /to_config/config_pos,perm_pos 3178 3179 """ 3180 3181 writer.write(template % {'nb_diag': nb_diag})
3182 3183 3184 #=========================================================================== 3185 # write_auto_dsig_file 3186 #===========================================================================
3187 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3188 """Write the auto_dsig.f file for the differential cross section 3189 calculation, includes pdf call information (MadWeight format)""" 3190 3191 if not matrix_element.get('processes') or \ 3192 not matrix_element.get('diagrams'): 3193 return 0 3194 3195 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3196 3197 if ninitial < 1 or ninitial > 2: 3198 raise writers.FortranWriter.FortranWriterError, \ 3199 """Need ninitial = 1 or 2 to write auto_dsig file""" 3200 3201 replace_dict = {} 3202 3203 # Extract version number and date from VERSION file 3204 info_lines = self.get_mg5_info_lines() 3205 replace_dict['info_lines'] = info_lines 3206 3207 # Extract process info lines 3208 process_lines = self.get_process_info_lines(matrix_element) 3209 replace_dict['process_lines'] = process_lines 3210 3211 # Set proc_id 3212 replace_dict['proc_id'] = proc_id 3213 replace_dict['numproc'] = 1 3214 3215 # Set dsig_line 3216 if ninitial == 1: 3217 # No conversion, since result of decay should be given in GeV 3218 dsig_line = "pd(0)*dsiguu" 3219 else: 3220 # Convert result (in GeV) to pb 3221 dsig_line = "pd(0)*conv*dsiguu" 3222 3223 replace_dict['dsig_line'] = dsig_line 3224 3225 # Extract pdf lines 3226 pdf_vars, pdf_data, pdf_lines = \ 3227 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3228 replace_dict['pdf_vars'] = pdf_vars 3229 replace_dict['pdf_data'] = pdf_data 3230 replace_dict['pdf_lines'] = pdf_lines 3231 3232 # Lines that differ between subprocess group and regular 3233 if proc_id: 3234 replace_dict['numproc'] = int(proc_id) 3235 replace_dict['passcuts_begin'] = "" 3236 replace_dict['passcuts_end'] = "" 3237 # Set lines for subprocess group version 3238 # Set define_iconfigs_lines 3239 replace_dict['define_subdiag_lines'] = \ 3240 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3241 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3242 else: 3243 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3244 replace_dict['passcuts_end'] = "ENDIF" 3245 replace_dict['define_subdiag_lines'] = "" 3246 3247 if writer: 3248 file = open(os.path.join(_file_path, \ 3249 'iolibs/template_files/auto_dsig_mw.inc')).read() 3250 3251 file = file % replace_dict 3252 # Write the file 3253 writer.writelines(file) 3254 else: 3255 return replace_dict
3256 #=========================================================================== 3257 # write_configs_file 3258 #===========================================================================
3259 - def write_configs_file(self, writer, matrix_element):
3260 """Write the configs.inc file for MadEvent""" 3261 3262 # Extract number of external particles 3263 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3264 3265 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3266 mapconfigs = [c[0] for c in configs] 3267 model = matrix_element.get('processes')[0].get('model') 3268 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3269 [[c[1]] for c in configs], 3270 mapconfigs, 3271 nexternal, ninitial,matrix_element, model)
3272 3273 #=========================================================================== 3274 # write_run_configs_file 3275 #===========================================================================
3276 - def write_run_config_file(self, writer):
3277 """Write the run_configs.inc file for MadWeight""" 3278 3279 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3280 text = open(path).read() % {'chanperjob':'5'} 3281 writer.write(text) 3282 return True
3283 3284 #=========================================================================== 3285 # write_configs_file_from_diagrams 3286 #===========================================================================
3287 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3288 nexternal, ninitial, matrix_element, model):
3289 """Write the actual configs.inc file. 3290 3291 configs is the diagrams corresponding to configs (each 3292 diagrams is a list of corresponding diagrams for all 3293 subprocesses, with None if there is no corresponding diagrams 3294 for a given process). 3295 mapconfigs gives the diagram number for each config. 3296 3297 For s-channels, we need to output one PDG for each subprocess in 3298 the subprocess group, in order to be able to pick the right 3299 one for multiprocesses.""" 3300 3301 lines = [] 3302 3303 particle_dict = matrix_element.get('processes')[0].get('model').\ 3304 get('particle_dict') 3305 3306 s_and_t_channels = [] 3307 3308 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3309 for config in configs if [d for d in config if d][0].\ 3310 get_vertex_leg_numbers()!=[]] 3311 3312 minvert = min(vert_list) if vert_list!=[] else 0 3313 # Number of subprocesses 3314 nsubprocs = len(configs[0]) 3315 3316 nconfigs = 0 3317 3318 new_pdg = model.get_first_non_pdg() 3319 3320 for iconfig, helas_diags in enumerate(configs): 3321 if any([vert > minvert for vert in 3322 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3323 # Only 3-vertices allowed in configs.inc 3324 continue 3325 nconfigs += 1 3326 3327 # Need s- and t-channels for all subprocesses, including 3328 # those that don't contribute to this config 3329 empty_verts = [] 3330 stchannels = [] 3331 for h in helas_diags: 3332 if h: 3333 # get_s_and_t_channels gives vertices starting from 3334 # final state external particles and working inwards 3335 stchannels.append(h.get('amplitudes')[0].\ 3336 get_s_and_t_channels(ninitial,model,new_pdg)) 3337 else: 3338 stchannels.append((empty_verts, None)) 3339 3340 # For t-channels, just need the first non-empty one 3341 tchannels = [t for s,t in stchannels if t != None][0] 3342 3343 # For s_and_t_channels (to be used later) use only first config 3344 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3345 tchannels]) 3346 3347 # Make sure empty_verts is same length as real vertices 3348 if any([s for s,t in stchannels]): 3349 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3350 3351 # Reorganize s-channel vertices to get a list of all 3352 # subprocesses for each vertex 3353 schannels = zip(*[s for s,t in stchannels]) 3354 else: 3355 schannels = [] 3356 3357 allchannels = schannels 3358 if len(tchannels) > 1: 3359 # Write out tchannels only if there are any non-trivial ones 3360 allchannels = schannels + tchannels 3361 3362 # Write out propagators for s-channel and t-channel vertices 3363 3364 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3365 # Correspondance between the config and the diagram = amp2 3366 lines.append("* %d %d " % (nconfigs, 3367 mapconfigs[iconfig])) 3368 3369 for verts in allchannels: 3370 if verts in schannels: 3371 vert = [v for v in verts if v][0] 3372 else: 3373 vert = verts 3374 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3375 last_leg = vert.get('legs')[-1] 3376 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3377 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3378 # (last_leg.get('number'), nconfigs, len(daughters), 3379 # ",".join([str(d) for d in daughters]))) 3380 3381 if last_leg.get('id') == 21 and 21 not in particle_dict: 3382 # Fake propagator used in multiparticle vertices 3383 mass = 'zero' 3384 width = 'zero' 3385 pow_part = 0 3386 else: 3387 if (last_leg.get('id')!=7): 3388 particle = particle_dict[last_leg.get('id')] 3389 # Get mass 3390 mass = particle.get('mass') 3391 # Get width 3392 width = particle.get('width') 3393 else : # fake propagator used in multiparticle vertices 3394 mass= 'zero' 3395 width= 'zero' 3396 3397 line=line+" "+mass+" "+width+" " 3398 3399 if verts in schannels: 3400 pdgs = [] 3401 for v in verts: 3402 if v: 3403 pdgs.append(v.get('legs')[-1].get('id')) 3404 else: 3405 pdgs.append(0) 3406 lines.append(line+" S "+str(last_leg.get('id'))) 3407 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3408 # (last_leg.get('number'), nconfigs, nsubprocs, 3409 # ",".join([str(d) for d in pdgs]))) 3410 # lines.append("data tprid(%d,%d)/0/" % \ 3411 # (last_leg.get('number'), nconfigs)) 3412 elif verts in tchannels[:-1]: 3413 lines.append(line+" T "+str(last_leg.get('id'))) 3414 # lines.append("data tprid(%d,%d)/%d/" % \ 3415 # (last_leg.get('number'), nconfigs, 3416 # abs(last_leg.get('id')))) 3417 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3418 # (last_leg.get('number'), nconfigs, nsubprocs, 3419 # ",".join(['0'] * nsubprocs))) 3420 3421 # Write out number of configs 3422 # lines.append("# Number of configs") 3423 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3424 lines.append(" * ") # a line with just a star indicates this is the end of file 3425 # Write the file 3426 writer.writelines(lines) 3427 3428 return s_and_t_channels
3429
3430 3431 #=============================================================================== 3432 # ProcessExporterFortranME 3433 #=============================================================================== 3434 -class ProcessExporterFortranME(ProcessExporterFortran):
3435 """Class to take care of exporting a set of matrix elements to 3436 MadEvent format.""" 3437 3438 matrix_file = "matrix_madevent_v4.inc" 3439
3440 - def copy_template(self, model):
3441 """Additional actions needed for setup of Template 3442 """ 3443 3444 super(ProcessExporterFortranME, self).copy_template(model) 3445 3446 # File created from Template (Different in some child class) 3447 filename = pjoin(self.dir_path,'Source','run_config.inc') 3448 self.write_run_config_file(writers.FortranWriter(filename)) 3449 3450 # The next file are model dependant (due to SLAH convention) 3451 self.model_name = model.get('name') 3452 # Add the symmetry.f 3453 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3454 self.write_symmetry(writers.FortranWriter(filename)) 3455 # 3456 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3457 self.write_addmothers(writers.FortranWriter(filename)) 3458 # Copy the different python file in the Template 3459 self.copy_python_file()
3460 3461 3462 3463 3464 3465 #=========================================================================== 3466 # generate_subprocess_directory 3467 #===========================================================================
3468 - def copy_python_file(self):
3469 """copy the python file require for the Template""" 3470 3471 # madevent interface 3472 cp(_file_path+'/interface/madevent_interface.py', 3473 self.dir_path+'/bin/internal/madevent_interface.py') 3474 cp(_file_path+'/interface/extended_cmd.py', 3475 self.dir_path+'/bin/internal/extended_cmd.py') 3476 cp(_file_path+'/interface/common_run_interface.py', 3477 self.dir_path+'/bin/internal/common_run_interface.py') 3478 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3479 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3480 cp(_file_path+'/iolibs/save_load_object.py', 3481 self.dir_path+'/bin/internal/save_load_object.py') 3482 cp(_file_path+'/iolibs/file_writers.py', 3483 self.dir_path+'/bin/internal/file_writers.py') 3484 #model file 3485 cp(_file_path+'../models/check_param_card.py', 3486 self.dir_path+'/bin/internal/check_param_card.py') 3487 3488 #copy all the file present in madevent directory 3489 for name in os.listdir(pjoin(_file_path, 'madevent')): 3490 if name not in ['__init__.py'] and name.endswith('.py'): 3491 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3492 3493 #madevent file 3494 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3495 cp(_file_path+'/various/lhe_parser.py', 3496 self.dir_path+'/bin/internal/lhe_parser.py') 3497 cp(_file_path+'/various/banner.py', 3498 self.dir_path+'/bin/internal/banner.py') 3499 cp(_file_path+'/various/histograms.py', 3500 self.dir_path+'/bin/internal/histograms.py') 3501 cp(_file_path+'/various/plot_djrs.py', 3502 self.dir_path+'/bin/internal/plot_djrs.py') 3503 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3504 3505 cp(_file_path+'/various/cluster.py', 3506 self.dir_path+'/bin/internal/cluster.py') 3507 cp(_file_path+'/madevent/combine_runs.py', 3508 self.dir_path+'/bin/internal/combine_runs.py') 3509 # logging configuration 3510 cp(_file_path+'/interface/.mg5_logging.conf', 3511 self.dir_path+'/bin/internal/me5_logging.conf') 3512 cp(_file_path+'/interface/coloring_logging.py', 3513 self.dir_path+'/bin/internal/coloring_logging.py') 3514 # shower card and FO_analyse_card. 3515 # Although not needed, it is imported by banner.py 3516 cp(_file_path+'/various/shower_card.py', 3517 self.dir_path+'/bin/internal/shower_card.py') 3518 cp(_file_path+'/various/FO_analyse_card.py', 3519 self.dir_path+'/bin/internal/FO_analyse_card.py')
3520 3521
3522 - def convert_model(self, model, wanted_lorentz = [], 3523 wanted_couplings = []):
3524 3525 super(ProcessExporterFortranME,self).convert_model(model, 3526 wanted_lorentz, wanted_couplings) 3527 3528 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3529 try: 3530 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3531 except OSError as error: 3532 pass 3533 model_path = model.get('modelpath') 3534 # This is not safe if there is a '##' or '-' in the path. 3535 shutil.copytree(model_path, 3536 pjoin(self.dir_path,'bin','internal','ufomodel'), 3537 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3538 if hasattr(model, 'restrict_card'): 3539 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3540 'restrict_default.dat') 3541 if isinstance(model.restrict_card, check_param_card.ParamCard): 3542 model.restrict_card.write(out_path) 3543 else: 3544 files.cp(model.restrict_card, out_path)
3545 3546 #=========================================================================== 3547 # export model files 3548 #===========================================================================
3549 - def export_model_files(self, model_path):
3550 """export the model dependent files""" 3551 3552 super(ProcessExporterFortranME,self).export_model_files(model_path) 3553 3554 # Add the routine update_as_param in v4 model 3555 # This is a function created in the UFO 3556 text=""" 3557 subroutine update_as_param() 3558 call setpara('param_card.dat',.false.) 3559 return 3560 end 3561 """ 3562 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3563 ff.write(text) 3564 ff.close() 3565 3566 # Add the symmetry.f 3567 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3568 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3569 3570 # Modify setrun.f 3571 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3572 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3573 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3574 fsock.write(text) 3575 fsock.close() 3576 3577 self.make_model_symbolic_link()
3578 3579 #=========================================================================== 3580 # generate_subprocess_directory 3581 #===========================================================================
3582 - def generate_subprocess_directory(self, matrix_element, 3583 fortran_model, 3584 me_number):
3585 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3586 including the necessary matrix.f and various helper files""" 3587 3588 cwd = os.getcwd() 3589 path = pjoin(self.dir_path, 'SubProcesses') 3590 3591 3592 if not self.model: 3593 self.model = matrix_element.get('processes')[0].get('model') 3594 3595 3596 3597 #os.chdir(path) 3598 # Create the directory PN_xx_xxxxx in the specified path 3599 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3600 try: 3601 os.mkdir(pjoin(path,subprocdir)) 3602 except os.error as error: 3603 logger.warning(error.strerror + " " + subprocdir) 3604 3605 #try: 3606 # os.chdir(subprocdir) 3607 #except os.error: 3608 # logger.error('Could not cd to directory %s' % subprocdir) 3609 # return 0 3610 3611 logger.info('Creating files in directory %s' % subprocdir) 3612 Ppath = pjoin(path, subprocdir) 3613 3614 # Extract number of external particles 3615 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3616 3617 # Add the driver.f 3618 ncomb = matrix_element.get_helicity_combinations() 3619 filename = pjoin(Ppath,'driver.f') 3620 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3621 v5=self.opt['v5_model']) 3622 3623 # Create the matrix.f file, auto_dsig.f file and all inc files 3624 filename = pjoin(Ppath, 'matrix.f') 3625 calls, ncolor = \ 3626 self.write_matrix_element_v4(writers.FortranWriter(filename), 3627 matrix_element, fortran_model, subproc_number = me_number) 3628 3629 filename = pjoin(Ppath, 'auto_dsig.f') 3630 self.write_auto_dsig_file(writers.FortranWriter(filename), 3631 matrix_element) 3632 3633 filename = pjoin(Ppath, 'configs.inc') 3634 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3635 writers.FortranWriter(filename), 3636 matrix_element) 3637 3638 filename = pjoin(Ppath, 'config_nqcd.inc') 3639 self.write_config_nqcd_file(writers.FortranWriter(filename), 3640 nqcd_list) 3641 3642 filename = pjoin(Ppath, 'config_subproc_map.inc') 3643 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3644 s_and_t_channels) 3645 3646 filename = pjoin(Ppath, 'coloramps.inc') 3647 self.write_coloramps_file(writers.FortranWriter(filename), 3648 mapconfigs, 3649 matrix_element) 3650 3651 filename = pjoin(Ppath, 'get_color.f') 3652 self.write_colors_file(writers.FortranWriter(filename), 3653 matrix_element) 3654 3655 filename = pjoin(Ppath, 'decayBW.inc') 3656 self.write_decayBW_file(writers.FortranWriter(filename), 3657 s_and_t_channels) 3658 3659 filename = pjoin(Ppath, 'dname.mg') 3660 self.write_dname_file(writers.FileWriter(filename), 3661 "P"+matrix_element.get('processes')[0].shell_string()) 3662 3663 filename = pjoin(Ppath, 'iproc.dat') 3664 self.write_iproc_file(writers.FortranWriter(filename), 3665 me_number) 3666 3667 filename = pjoin(Ppath, 'leshouche.inc') 3668 self.write_leshouche_file(writers.FortranWriter(filename), 3669 matrix_element) 3670 3671 filename = pjoin(Ppath, 'maxamps.inc') 3672 self.write_maxamps_file(writers.FortranWriter(filename), 3673 len(matrix_element.get('diagrams')), 3674 ncolor, 3675 len(matrix_element.get('processes')), 3676 1) 3677 3678 filename = pjoin(Ppath, 'mg.sym') 3679 self.write_mg_sym_file(writers.FortranWriter(filename), 3680 matrix_element) 3681 3682 filename = pjoin(Ppath, 'ncombs.inc') 3683 self.write_ncombs_file(writers.FortranWriter(filename), 3684 nexternal) 3685 3686 filename = pjoin(Ppath, 'nexternal.inc') 3687 self.write_nexternal_file(writers.FortranWriter(filename), 3688 nexternal, ninitial) 3689 3690 filename = pjoin(Ppath, 'ngraphs.inc') 3691 self.write_ngraphs_file(writers.FortranWriter(filename), 3692 len(mapconfigs)) 3693 3694 3695 filename = pjoin(Ppath, 'pmass.inc') 3696 self.write_pmass_file(writers.FortranWriter(filename), 3697 matrix_element) 3698 3699 filename = pjoin(Ppath, 'props.inc') 3700 self.write_props_file(writers.FortranWriter(filename), 3701 matrix_element, 3702 s_and_t_channels) 3703 3704 # Find config symmetries and permutations 3705 symmetry, perms, ident_perms = \ 3706 diagram_symmetry.find_symmetry(matrix_element) 3707 3708 filename = pjoin(Ppath, 'symswap.inc') 3709 self.write_symswap_file(writers.FortranWriter(filename), 3710 ident_perms) 3711 3712 filename = pjoin(Ppath, 'symfact_orig.dat') 3713 self.write_symfact_file(open(filename, 'w'), symmetry) 3714 3715 # Generate diagrams 3716 filename = pjoin(Ppath, "matrix.ps") 3717 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3718 get('diagrams'), 3719 filename, 3720 model=matrix_element.get('processes')[0].\ 3721 get('model'), 3722 amplitude=True) 3723 logger.info("Generating Feynman diagrams for " + \ 3724 matrix_element.get('processes')[0].nice_string()) 3725 plot.draw() 3726 3727 self.link_files_in_SubProcess(Ppath) 3728 3729 #import nexternal/leshouche in Source 3730 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3731 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3732 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3733 # Return to SubProcesses dir 3734 #os.chdir(os.path.pardir) 3735 3736 # Add subprocess to subproc.mg 3737 filename = pjoin(path, 'subproc.mg') 3738 files.append_to_file(filename, 3739 self.write_subproc, 3740 subprocdir) 3741 3742 # Return to original dir 3743 #os.chdir(cwd) 3744 3745 # Generate info page 3746 gen_infohtml.make_info_html(self.dir_path) 3747 3748 3749 if not calls: 3750 calls = 0 3751 return calls
3752 3753 link_Sub_files = ['addmothers.f', 3754 'cluster.f', 3755 'cluster.inc', 3756 'coupl.inc', 3757 'cuts.f', 3758 'cuts.inc', 3759 'genps.f', 3760 'genps.inc', 3761 'idenparts.f', 3762 'initcluster.f', 3763 'makefile', 3764 'message.inc', 3765 'myamp.f', 3766 'reweight.f', 3767 'run.inc', 3768 'maxconfigs.inc', 3769 'maxparticles.inc', 3770 'run_config.inc', 3771 'lhe_event_infos.inc', 3772 'setcuts.f', 3773 'setscales.f', 3774 'sudakov.inc', 3775 'symmetry.f', 3776 'unwgt.f', 3777 'dummy_fct.f' 3778 ] 3779 3793 3794
3795 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3796 """Finalize ME v4 directory by creating jpeg diagrams, html 3797 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3798 3799 if 'nojpeg' in flaglist: 3800 makejpg = False 3801 else: 3802 makejpg = True 3803 if 'online' in flaglist: 3804 online = True 3805 else: 3806 online = False 3807 3808 compiler = {'fortran': mg5options['fortran_compiler'], 3809 'cpp': mg5options['cpp_compiler'], 3810 'f2py': mg5options['f2py_compiler']} 3811 3812 # indicate that the output type is not grouped 3813 if not isinstance(self, ProcessExporterFortranMEGroup): 3814 self.proc_characteristic['grouped_matrix'] = False 3815 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 3816 # indicate the PDG of all initial particle 3817 try: 3818 pdgs1 = [p.get_initial_pdg(1) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3819 pdgs2 = [p.get_initial_pdg(2) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3820 except AttributeError: 3821 pdgs1 = [p.get_initial_pdg(1) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3822 pdgs2 = [p.get_initial_pdg(2) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3823 self.proc_characteristic['pdg_initial1'] = pdgs1 3824 self.proc_characteristic['pdg_initial2'] = pdgs2 3825 3826 3827 modelname = self.opt['model'] 3828 if modelname == 'mssm' or modelname.startswith('mssm-'): 3829 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3830 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3831 check_param_card.convert_to_mg5card(param_card, mg5_param) 3832 check_param_card.check_valid_param_card(mg5_param) 3833 3834 # Add the combine_events.f modify param_card path/number of @X 3835 filename = pjoin(self.dir_path,'Source','combine_events.f') 3836 try: 3837 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3838 except AttributeError: 3839 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3840 nb_proc = len(set(nb_proc)) 3841 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3842 # Write maxconfigs.inc based on max of ME's/subprocess groups 3843 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3844 self.write_maxconfigs_file(writers.FortranWriter(filename), 3845 matrix_elements) 3846 3847 # Write maxparticles.inc based on max of ME's/subprocess groups 3848 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3849 self.write_maxparticles_file(writers.FortranWriter(filename), 3850 matrix_elements) 3851 3852 # Touch "done" file 3853 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3854 3855 # Check for compiler 3856 self.set_compiler(compiler) 3857 self.set_cpp_compiler(compiler['cpp']) 3858 3859 3860 old_pos = os.getcwd() 3861 subpath = pjoin(self.dir_path, 'SubProcesses') 3862 3863 P_dir_list = [proc for proc in os.listdir(subpath) 3864 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3865 3866 devnull = os.open(os.devnull, os.O_RDWR) 3867 # Convert the poscript in jpg files (if authorize) 3868 if makejpg: 3869 try: 3870 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3871 except Exception, error: 3872 pass 3873 3874 if misc.which('gs'): 3875 logger.info("Generate jpeg diagrams") 3876 for Pdir in P_dir_list: 3877 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3878 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3879 3880 logger.info("Generate web pages") 3881 # Create the WebPage using perl script 3882 3883 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3884 stdout = devnull,cwd=pjoin(self.dir_path)) 3885 3886 #os.chdir(os.path.pardir) 3887 3888 obj = gen_infohtml.make_info_html(self.dir_path) 3889 3890 if online: 3891 nb_channel = obj.rep_rule['nb_gen_diag'] 3892 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 3893 #add the information to proc_charac 3894 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 3895 3896 # Write command history as proc_card_mg5 3897 if os.path.isdir(pjoin(self.dir_path,'Cards')): 3898 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 3899 history.write(output_file) 3900 3901 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3902 stdout = devnull) 3903 3904 #crate the proc_characteristic file 3905 self.create_proc_charac(matrix_elements, history) 3906 3907 # create the run_card 3908 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 3909 3910 # Run "make" to generate madevent.tar.gz file 3911 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 3912 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 3913 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 3914 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 3915 stdout = devnull, cwd=self.dir_path) 3916 3917 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3918 stdout = devnull, cwd=self.dir_path)
3919 3920 3921 3922 3923 3924 3925 #return to the initial dir 3926 #os.chdir(old_pos) 3927 3928 #=========================================================================== 3929 # write_matrix_element_v4 3930 #===========================================================================
3931 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 3932 proc_id = "", config_map = [], subproc_number = ""):
3933 """Export a matrix element to a matrix.f file in MG4 madevent format""" 3934 3935 if not matrix_element.get('processes') or \ 3936 not matrix_element.get('diagrams'): 3937 return 0 3938 3939 if writer: 3940 if not isinstance(writer, writers.FortranWriter): 3941 raise writers.FortranWriter.FortranWriterError(\ 3942 "writer not FortranWriter") 3943 # Set lowercase/uppercase Fortran code 3944 writers.FortranWriter.downcase = False 3945 3946 # The proc prefix is not used for MadEvent output so it can safely be set 3947 # to an empty string. 3948 replace_dict = {'proc_prefix':''} 3949 3950 # Extract helas calls 3951 helas_calls = fortran_model.get_matrix_element_calls(\ 3952 matrix_element) 3953 3954 replace_dict['helas_calls'] = "\n".join(helas_calls) 3955 3956 3957 # Extract version number and date from VERSION file 3958 info_lines = self.get_mg5_info_lines() 3959 replace_dict['info_lines'] = info_lines 3960 3961 # Extract process info lines 3962 process_lines = self.get_process_info_lines(matrix_element) 3963 replace_dict['process_lines'] = process_lines 3964 3965 # Set proc_id 3966 replace_dict['proc_id'] = proc_id 3967 3968 # Extract ncomb 3969 ncomb = matrix_element.get_helicity_combinations() 3970 replace_dict['ncomb'] = ncomb 3971 3972 # Extract helicity lines 3973 helicity_lines = self.get_helicity_lines(matrix_element) 3974 replace_dict['helicity_lines'] = helicity_lines 3975 3976 # Extract IC line 3977 ic_line = self.get_ic_line(matrix_element) 3978 replace_dict['ic_line'] = ic_line 3979 3980 # Extract overall denominator 3981 # Averaging initial state color, spin, and identical FS particles 3982 den_factor_line = self.get_den_factor_line(matrix_element) 3983 replace_dict['den_factor_line'] = den_factor_line 3984 3985 # Extract ngraphs 3986 ngraphs = matrix_element.get_number_of_amplitudes() 3987 replace_dict['ngraphs'] = ngraphs 3988 3989 # Extract ndiags 3990 ndiags = len(matrix_element.get('diagrams')) 3991 replace_dict['ndiags'] = ndiags 3992 3993 # Set define_iconfigs_lines 3994 replace_dict['define_iconfigs_lines'] = \ 3995 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 3996 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 3997 3998 if proc_id: 3999 # Set lines for subprocess group version 4000 # Set define_iconfigs_lines 4001 replace_dict['define_iconfigs_lines'] += \ 4002 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4003 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4004 # Set set_amp2_line 4005 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 4006 proc_id 4007 else: 4008 # Standard running 4009 # Set set_amp2_line 4010 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 4011 4012 # Extract nwavefuncs 4013 nwavefuncs = matrix_element.get_number_of_wavefunctions() 4014 replace_dict['nwavefuncs'] = nwavefuncs 4015 4016 # Extract ncolor 4017 ncolor = max(1, len(matrix_element.get('color_basis'))) 4018 replace_dict['ncolor'] = ncolor 4019 4020 # Extract color data lines 4021 color_data_lines = self.get_color_data_lines(matrix_element) 4022 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 4023 4024 4025 # Set the size of Wavefunction 4026 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 4027 replace_dict['wavefunctionsize'] = 18 4028 else: 4029 replace_dict['wavefunctionsize'] = 6 4030 4031 # Extract amp2 lines 4032 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 4033 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 4034 4035 # The JAMP definition depends on the splitting order 4036 split_orders=matrix_element.get('processes')[0].get('split_orders') 4037 if len(split_orders)>0: 4038 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 4039 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 4040 matrix_element.get('processes')[0],squared_orders) 4041 else: 4042 # Consider the output of a dummy order 'ALL_ORDERS' for which we 4043 # set all amplitude order to weight 1 and only one squared order 4044 # contribution which is of course ALL_ORDERS=2. 4045 squared_orders = [(2,),] 4046 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 4047 replace_dict['chosen_so_configs'] = '.TRUE.' 4048 4049 replace_dict['nAmpSplitOrders']=len(amp_orders) 4050 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 4051 replace_dict['split_order_str_list']=str(split_orders) 4052 replace_dict['nSplitOrders']=max(len(split_orders),1) 4053 amp_so = self.get_split_orders_lines( 4054 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 4055 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 4056 replace_dict['ampsplitorders']='\n'.join(amp_so) 4057 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 4058 4059 4060 # Extract JAMP lines 4061 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 4062 jamp_lines = self.get_JAMP_lines_split_order(\ 4063 matrix_element,amp_orders,split_order_names= 4064 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 4065 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 4066 4067 replace_dict['template_file'] = pjoin(_file_path, \ 4068 'iolibs/template_files/%s' % self.matrix_file) 4069 replace_dict['template_file2'] = pjoin(_file_path, \ 4070 'iolibs/template_files/split_orders_helping_functions.inc') 4071 if writer: 4072 file = open(replace_dict['template_file']).read() 4073 file = file % replace_dict 4074 # Add the split orders helper functions. 4075 file = file + '\n' + open(replace_dict['template_file2'])\ 4076 .read()%replace_dict 4077 # Write the file 4078 writer.writelines(file) 4079 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor 4080 else: 4081 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor) 4082 return replace_dict
4083 4084 #=========================================================================== 4085 # write_auto_dsig_file 4086 #===========================================================================
4087 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
4088 """Write the auto_dsig.f file for the differential cross section 4089 calculation, includes pdf call information""" 4090 4091 if not matrix_element.get('processes') or \ 4092 not matrix_element.get('diagrams'): 4093 return 0 4094 4095 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 4096 self.proc_characteristic['ninitial'] = ninitial 4097 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 4098 4099 # Add information relevant for MLM matching: 4100 # Maximum QCD power in all the contributions 4101 max_qcd_order = 0 4102 for diag in matrix_element.get('diagrams'): 4103 orders = diag.calculate_orders() 4104 if 'QCD' in orders: 4105 max_qcd_order = max(max_qcd_order,orders['QCD']) 4106 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 4107 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 4108 proc.get('model').get_particle(id).get('color')>1]) 4109 for proc in matrix_element.get('processes')) 4110 # Maximum number of final state light jets to be matched 4111 self.proc_characteristic['max_n_matched_jets'] = max( 4112 self.proc_characteristic['max_n_matched_jets'], 4113 min(max_qcd_order,max_n_light_final_partons)) 4114 4115 # List of default pdgs to be considered for the CKKWl merging cut 4116 self.proc_characteristic['colored_pdgs'] = \ 4117 sorted(list(set([abs(p.get('pdg_code')) for p in 4118 matrix_element.get('processes')[0].get('model').get('particles') if 4119 p.get('color')>1]))) 4120 4121 if ninitial < 1 or ninitial > 2: 4122 raise writers.FortranWriter.FortranWriterError, \ 4123 """Need ninitial = 1 or 2 to write auto_dsig file""" 4124 4125 replace_dict = {} 4126 4127 # Extract version number and date from VERSION file 4128 info_lines = self.get_mg5_info_lines() 4129 replace_dict['info_lines'] = info_lines 4130 4131 # Extract process info lines 4132 process_lines = self.get_process_info_lines(matrix_element) 4133 replace_dict['process_lines'] = process_lines 4134 4135 # Set proc_id 4136 replace_dict['proc_id'] = proc_id 4137 replace_dict['numproc'] = 1 4138 4139 # Set dsig_line 4140 if ninitial == 1: 4141 # No conversion, since result of decay should be given in GeV 4142 dsig_line = "pd(0)*dsiguu" 4143 else: 4144 # Convert result (in GeV) to pb 4145 dsig_line = "pd(0)*conv*dsiguu" 4146 4147 replace_dict['dsig_line'] = dsig_line 4148 4149 # Extract pdf lines 4150 pdf_vars, pdf_data, pdf_lines = \ 4151 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 4152 replace_dict['pdf_vars'] = pdf_vars 4153 replace_dict['pdf_data'] = pdf_data 4154 replace_dict['pdf_lines'] = pdf_lines 4155 4156 # Lines that differ between subprocess group and regular 4157 if proc_id: 4158 replace_dict['numproc'] = int(proc_id) 4159 replace_dict['passcuts_begin'] = "" 4160 replace_dict['passcuts_end'] = "" 4161 # Set lines for subprocess group version 4162 # Set define_iconfigs_lines 4163 replace_dict['define_subdiag_lines'] = \ 4164 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4165 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4166 replace_dict['cutsdone'] = "" 4167 else: 4168 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 4169 replace_dict['passcuts_end'] = "ENDIF" 4170 replace_dict['define_subdiag_lines'] = "" 4171 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 4172 4173 if not isinstance(self, ProcessExporterFortranMEGroup): 4174 ncomb=matrix_element.get_helicity_combinations() 4175 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4176 else: 4177 replace_dict['read_write_good_hel'] = "" 4178 4179 context = {'read_write_good_hel':True} 4180 4181 if writer: 4182 file = open(pjoin(_file_path, \ 4183 'iolibs/template_files/auto_dsig_v4.inc')).read() 4184 file = file % replace_dict 4185 4186 # Write the file 4187 writer.writelines(file, context=context) 4188 else: 4189 return replace_dict, context
4190 #=========================================================================== 4191 # write_coloramps_file 4192 #===========================================================================
4193 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
4194 """Write the coloramps.inc file for MadEvent""" 4195 4196 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 4197 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 4198 (max(len(matrix_element.get('color_basis').keys()), 1), 4199 len(mapconfigs))) 4200 4201 4202 # Write the file 4203 writer.writelines(lines) 4204 4205 return True
4206 4207 #=========================================================================== 4208 # write_colors_file 4209 #===========================================================================
4210 - def write_colors_file(self, writer, matrix_elements):
4211 """Write the get_color.f file for MadEvent, which returns color 4212 for all particles used in the matrix element.""" 4213 4214 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4215 matrix_elements = [matrix_elements] 4216 4217 model = matrix_elements[0].get('processes')[0].get('model') 4218 4219 # We need the both particle and antiparticle wf_ids, since the identity 4220 # depends on the direction of the wf. 4221 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4222 for wf in d.get('wavefunctions')],[]) \ 4223 for d in me.get('diagrams')], []) \ 4224 for me in matrix_elements], [])) 4225 4226 leg_ids = set(sum([sum([sum([[l.get('id'), 4227 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4228 for l in p.get_legs_with_decays()], []) \ 4229 for p in me.get('processes')], []) \ 4230 for me in matrix_elements], [])) 4231 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4232 4233 lines = """function get_color(ipdg) 4234 implicit none 4235 integer get_color, ipdg 4236 4237 if(ipdg.eq.%d)then 4238 get_color=%d 4239 return 4240 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4241 4242 for part_id in particle_ids[1:]: 4243 lines += """else if(ipdg.eq.%d)then 4244 get_color=%d 4245 return 4246 """ % (part_id, model.get_particle(part_id).get_color()) 4247 # Dummy particle for multiparticle vertices with pdg given by 4248 # first code not in the model 4249 lines += """else if(ipdg.eq.%d)then 4250 c This is dummy particle used in multiparticle vertices 4251 get_color=2 4252 return 4253 """ % model.get_first_non_pdg() 4254 lines += """else 4255 write(*,*)'Error: No color given for pdg ',ipdg 4256 get_color=0 4257 return 4258 endif 4259 end 4260 """ 4261 4262 # Write the file 4263 writer.writelines(lines) 4264 4265 return True
4266 4267 #=========================================================================== 4268 # write_config_nqcd_file 4269 #===========================================================================
4270 - def write_config_nqcd_file(self, writer, nqcd_list):
4271 """Write the config_nqcd.inc with the number of QCD couplings 4272 for each config""" 4273 4274 lines = [] 4275 for iconf, n in enumerate(nqcd_list): 4276 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4277 4278 # Write the file 4279 writer.writelines(lines) 4280 4281 return True
4282 4283 #=========================================================================== 4284 # write_maxconfigs_file 4285 #===========================================================================
4286 - def write_maxconfigs_file(self, writer, matrix_elements):
4287 """Write the maxconfigs.inc file for MadEvent""" 4288 4289 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4290 maxconfigs = max([me.get_num_configs() for me in \ 4291 matrix_elements.get('matrix_elements')]) 4292 else: 4293 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4294 4295 lines = "integer lmaxconfigs\n" 4296 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4297 4298 # Write the file 4299 writer.writelines(lines) 4300 4301 return True
4302 4303 #=========================================================================== 4304 # read_write_good_hel 4305 #===========================================================================
4306 - def read_write_good_hel(self, ncomb):
4307 """return the code to read/write the good_hel common_block""" 4308 4309 convert = {'ncomb' : ncomb} 4310 output = """ 4311 subroutine write_good_hel(stream_id) 4312 implicit none 4313 integer stream_id 4314 INTEGER NCOMB 4315 PARAMETER ( NCOMB=%(ncomb)d) 4316 LOGICAL GOODHEL(NCOMB) 4317 INTEGER NTRY 4318 common/BLOCK_GOODHEL/NTRY,GOODHEL 4319 write(stream_id,*) GOODHEL 4320 return 4321 end 4322 4323 4324 subroutine read_good_hel(stream_id) 4325 implicit none 4326 include 'genps.inc' 4327 integer stream_id 4328 INTEGER NCOMB 4329 PARAMETER ( NCOMB=%(ncomb)d) 4330 LOGICAL GOODHEL(NCOMB) 4331 INTEGER NTRY 4332 common/BLOCK_GOODHEL/NTRY,GOODHEL 4333 read(stream_id,*) GOODHEL 4334 NTRY = MAXTRIES + 1 4335 return 4336 end 4337 4338 subroutine init_good_hel() 4339 implicit none 4340 INTEGER NCOMB 4341 PARAMETER ( NCOMB=%(ncomb)d) 4342 LOGICAL GOODHEL(NCOMB) 4343 INTEGER NTRY 4344 INTEGER I 4345 4346 do i=1,NCOMB 4347 GOODHEL(I) = .false. 4348 enddo 4349 NTRY = 0 4350 end 4351 4352 integer function get_maxsproc() 4353 implicit none 4354 get_maxsproc = 1 4355 return 4356 end 4357 4358 """ % convert 4359 4360 return output
4361 4362 #=========================================================================== 4363 # write_config_subproc_map_file 4364 #===========================================================================
4365 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4366 """Write a dummy config_subproc.inc file for MadEvent""" 4367 4368 lines = [] 4369 4370 for iconfig in range(len(s_and_t_channels)): 4371 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4372 (iconfig + 1)) 4373 4374 # Write the file 4375 writer.writelines(lines) 4376 4377 return True
4378 4379 #=========================================================================== 4380 # write_configs_file 4381 #===========================================================================
4382 - def write_configs_file(self, writer, matrix_element):
4383 """Write the configs.inc file for MadEvent""" 4384 4385 # Extract number of external particles 4386 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4387 4388 model = matrix_element.get('processes')[0].get('model') 4389 configs = [(i+1, d) for (i, d) in \ 4390 enumerate(matrix_element.get('diagrams'))] 4391 mapconfigs = [c[0] for c in configs] 4392 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4393 [[c[1]] for c in configs], 4394 mapconfigs, 4395 nexternal, ninitial, 4396 model)
4397 4398 #=========================================================================== 4399 # write_run_configs_file 4400 #===========================================================================
4401 - def write_run_config_file(self, writer):
4402 """Write the run_configs.inc file for MadEvent""" 4403 4404 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4405 4406 if self.proc_characteristic['loop_induced']: 4407 job_per_chan = 1 4408 else: 4409 job_per_chan = 5 4410 4411 if writer: 4412 text = open(path).read() % {'chanperjob': job_per_chan} 4413 writer.write(text) 4414 return True 4415 else: 4416 return {'chanperjob': job_per_chan}
4417 4418 #=========================================================================== 4419 # write_configs_file_from_diagrams 4420 #===========================================================================
4421 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4422 nexternal, ninitial, model):
4423 """Write the actual configs.inc file. 4424 4425 configs is the diagrams corresponding to configs (each 4426 diagrams is a list of corresponding diagrams for all 4427 subprocesses, with None if there is no corresponding diagrams 4428 for a given process). 4429 mapconfigs gives the diagram number for each config. 4430 4431 For s-channels, we need to output one PDG for each subprocess in 4432 the subprocess group, in order to be able to pick the right 4433 one for multiprocesses.""" 4434 4435 lines = [] 4436 4437 s_and_t_channels = [] 4438 4439 nqcd_list = [] 4440 4441 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4442 for config in configs if [d for d in config if d][0].\ 4443 get_vertex_leg_numbers()!=[]] 4444 minvert = min(vert_list) if vert_list!=[] else 0 4445 4446 # Number of subprocesses 4447 nsubprocs = len(configs[0]) 4448 4449 nconfigs = 0 4450 4451 new_pdg = model.get_first_non_pdg() 4452 4453 for iconfig, helas_diags in enumerate(configs): 4454 if any([vert > minvert for vert in 4455 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4456 # Only 3-vertices allowed in configs.inc 4457 continue 4458 nconfigs += 1 4459 4460 # Need s- and t-channels for all subprocesses, including 4461 # those that don't contribute to this config 4462 empty_verts = [] 4463 stchannels = [] 4464 for h in helas_diags: 4465 if h: 4466 # get_s_and_t_channels gives vertices starting from 4467 # final state external particles and working inwards 4468 stchannels.append(h.get('amplitudes')[0].\ 4469 get_s_and_t_channels(ninitial, model, 4470 new_pdg)) 4471 else: 4472 stchannels.append((empty_verts, None)) 4473 4474 # For t-channels, just need the first non-empty one 4475 tchannels = [t for s,t in stchannels if t != None][0] 4476 4477 # For s_and_t_channels (to be used later) use only first config 4478 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4479 tchannels]) 4480 4481 # Make sure empty_verts is same length as real vertices 4482 if any([s for s,t in stchannels]): 4483 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4484 4485 # Reorganize s-channel vertices to get a list of all 4486 # subprocesses for each vertex 4487 schannels = zip(*[s for s,t in stchannels]) 4488 else: 4489 schannels = [] 4490 4491 allchannels = schannels 4492 if len(tchannels) > 1: 4493 # Write out tchannels only if there are any non-trivial ones 4494 allchannels = schannels + tchannels 4495 4496 # Write out propagators for s-channel and t-channel vertices 4497 4498 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4499 # Correspondance between the config and the diagram = amp2 4500 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4501 mapconfigs[iconfig])) 4502 # Number of QCD couplings in this diagram 4503 nqcd = 0 4504 for h in helas_diags: 4505 if h: 4506 try: 4507 nqcd = h.calculate_orders()['QCD'] 4508 except KeyError: 4509 pass 4510 break 4511 else: 4512 continue 4513 4514 nqcd_list.append(nqcd) 4515 4516 for verts in allchannels: 4517 if verts in schannels: 4518 vert = [v for v in verts if v][0] 4519 else: 4520 vert = verts 4521 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4522 last_leg = vert.get('legs')[-1] 4523 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4524 (last_leg.get('number'), nconfigs, len(daughters), 4525 ",".join([str(d) for d in daughters]))) 4526 if verts in schannels: 4527 pdgs = [] 4528 for v in verts: 4529 if v: 4530 pdgs.append(v.get('legs')[-1].get('id')) 4531 else: 4532 pdgs.append(0) 4533 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4534 (last_leg.get('number'), nconfigs, nsubprocs, 4535 ",".join([str(d) for d in pdgs]))) 4536 lines.append("data tprid(%d,%d)/0/" % \ 4537 (last_leg.get('number'), nconfigs)) 4538 elif verts in tchannels[:-1]: 4539 lines.append("data tprid(%d,%d)/%d/" % \ 4540 (last_leg.get('number'), nconfigs, 4541 abs(last_leg.get('id')))) 4542 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4543 (last_leg.get('number'), nconfigs, nsubprocs, 4544 ",".join(['0'] * nsubprocs))) 4545 4546 # Write out number of configs 4547 lines.append("# Number of configs") 4548 lines.append("data mapconfig(0)/%d/" % nconfigs) 4549 4550 # Write the file 4551 writer.writelines(lines) 4552 4553 return s_and_t_channels, nqcd_list
4554 4555 #=========================================================================== 4556 # write_decayBW_file 4557 #===========================================================================
4558 - def write_decayBW_file(self, writer, s_and_t_channels):
4559 """Write the decayBW.inc file for MadEvent""" 4560 4561 lines = [] 4562 4563 booldict = {None: "0", True: "1", False: "2"} 4564 4565 for iconf, config in enumerate(s_and_t_channels): 4566 schannels = config[0] 4567 for vertex in schannels: 4568 # For the resulting leg, pick out whether it comes from 4569 # decay or not, as given by the onshell flag 4570 leg = vertex.get('legs')[-1] 4571 lines.append("data gForceBW(%d,%d)/%s/" % \ 4572 (leg.get('number'), iconf + 1, 4573 booldict[leg.get('onshell')])) 4574 4575 # Write the file 4576 writer.writelines(lines) 4577 4578 return True
4579 4580 #=========================================================================== 4581 # write_dname_file 4582 #===========================================================================
4583 - def write_dname_file(self, writer, dir_name):
4584 """Write the dname.mg file for MG4""" 4585 4586 line = "DIRNAME=%s" % dir_name 4587 4588 # Write the file 4589 writer.write(line + "\n") 4590 4591 return True
4592 4593 #=========================================================================== 4594 # write_driver 4595 #===========================================================================
4596 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4597 """Write the SubProcess/driver.f file for MG4""" 4598 4599 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4600 4601 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4602 card = 'Source/MODEL/MG5_param.dat' 4603 else: 4604 card = 'param_card.dat' 4605 # Requiring each helicity configuration to be probed by 10 points for 4606 # matrix element before using the resulting grid for MC over helicity 4607 # sampling. 4608 # We multiply this by 2 because each grouped subprocess is called at most 4609 # twice for each IMIRROR. 4610 replace_dict = {'param_card_name':card, 4611 'ncomb':ncomb, 4612 'hel_init_points':n_grouped_proc*10*2} 4613 if not v5: 4614 replace_dict['secondparam']=',.true.' 4615 else: 4616 replace_dict['secondparam']='' 4617 4618 if writer: 4619 text = open(path).read() % replace_dict 4620 writer.write(text) 4621 return True 4622 else: 4623 return replace_dict
4624 4625 #=========================================================================== 4626 # write_addmothers 4627 #===========================================================================
4628 - def write_addmothers(self, writer):
4629 """Write the SubProcess/addmothers.f""" 4630 4631 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4632 4633 text = open(path).read() % {'iconfig': 'diag_number'} 4634 writer.write(text) 4635 4636 return True
4637 4638 4639 #=========================================================================== 4640 # write_combine_events 4641 #===========================================================================
4642 - def write_combine_events(self, writer, nb_proc=100):
4643 """Write the SubProcess/driver.f file for MG4""" 4644 4645 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4646 4647 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4648 card = 'Source/MODEL/MG5_param.dat' 4649 else: 4650 card = 'param_card.dat' 4651 4652 #set maxpup (number of @X in the process card) 4653 4654 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4655 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4656 writer.write(text) 4657 4658 return True
4659 4660 4661 #=========================================================================== 4662 # write_symmetry 4663 #===========================================================================
4664 - def write_symmetry(self, writer, v5=True):
4665 """Write the SubProcess/driver.f file for ME""" 4666 4667 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4668 4669 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4670 card = 'Source/MODEL/MG5_param.dat' 4671 else: 4672 card = 'param_card.dat' 4673 4674 if v5: 4675 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 4676 else: 4677 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 4678 4679 if writer: 4680 text = open(path).read() 4681 text = text % replace_dict 4682 writer.write(text) 4683 return True 4684 else: 4685 return replace_dict
4686 4687 4688 4689 #=========================================================================== 4690 # write_iproc_file 4691 #===========================================================================
4692 - def write_iproc_file(self, writer, me_number):
4693 """Write the iproc.dat file for MG4""" 4694 line = "%d" % (me_number + 1) 4695 4696 # Write the file 4697 for line_to_write in writer.write_line(line): 4698 writer.write(line_to_write) 4699 return True
4700 4701 #=========================================================================== 4702 # write_mg_sym_file 4703 #===========================================================================
4704 - def write_mg_sym_file(self, writer, matrix_element):
4705 """Write the mg.sym file for MadEvent.""" 4706 4707 lines = [] 4708 4709 # Extract process with all decays included 4710 final_legs = filter(lambda leg: leg.get('state') == True, 4711 matrix_element.get('processes')[0].get_legs_with_decays()) 4712 4713 ninitial = len(filter(lambda leg: leg.get('state') == False, 4714 matrix_element.get('processes')[0].get('legs'))) 4715 4716 identical_indices = {} 4717 4718 # Extract identical particle info 4719 for i, leg in enumerate(final_legs): 4720 if leg.get('id') in identical_indices: 4721 identical_indices[leg.get('id')].append(\ 4722 i + ninitial + 1) 4723 else: 4724 identical_indices[leg.get('id')] = [i + ninitial + 1] 4725 4726 # Remove keys which have only one particle 4727 for key in identical_indices.keys(): 4728 if len(identical_indices[key]) < 2: 4729 del identical_indices[key] 4730 4731 # Write mg.sym file 4732 lines.append(str(len(identical_indices.keys()))) 4733 for key in identical_indices.keys(): 4734 lines.append(str(len(identical_indices[key]))) 4735 for number in identical_indices[key]: 4736 lines.append(str(number)) 4737 4738 # Write the file 4739 writer.writelines(lines) 4740 4741 return True
4742 4743 #=========================================================================== 4744 # write_mg_sym_file 4745 #===========================================================================
4746 - def write_default_mg_sym_file(self, writer):
4747 """Write the mg.sym file for MadEvent.""" 4748 4749 lines = "0" 4750 4751 # Write the file 4752 writer.writelines(lines) 4753 4754 return True
4755 4756 #=========================================================================== 4757 # write_ncombs_file 4758 #===========================================================================
4759 - def write_ncombs_file(self, writer, nexternal):
4760 """Write the ncombs.inc file for MadEvent.""" 4761 4762 # ncomb (used for clustering) is 2^nexternal 4763 file = " integer n_max_cl\n" 4764 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4765 4766 # Write the file 4767 writer.writelines(file) 4768 4769 return True
4770 4771 #=========================================================================== 4772 # write_processes_file 4773 #===========================================================================
4774 - def write_processes_file(self, writer, subproc_group):
4775 """Write the processes.dat file with info about the subprocesses 4776 in this group.""" 4777 4778 lines = [] 4779 4780 for ime, me in \ 4781 enumerate(subproc_group.get('matrix_elements')): 4782 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4783 ",".join(p.base_string() for p in \ 4784 me.get('processes')))) 4785 if me.get('has_mirror_process'): 4786 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4787 for proc in mirror_procs: 4788 legs = copy.copy(proc.get('legs_with_decays')) 4789 legs.insert(0, legs.pop(1)) 4790 proc.set("legs_with_decays", legs) 4791 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4792 mirror_procs)) 4793 else: 4794 lines.append("mirror none") 4795 4796 # Write the file 4797 writer.write("\n".join(lines)) 4798 4799 return True
4800 4801 #=========================================================================== 4802 # write_symswap_file 4803 #===========================================================================
4804 - def write_symswap_file(self, writer, ident_perms):
4805 """Write the file symswap.inc for MG4 by comparing diagrams using 4806 the internal matrix element value functionality.""" 4807 4808 lines = [] 4809 4810 # Write out lines for symswap.inc file (used to permute the 4811 # external leg momenta 4812 for iperm, perm in enumerate(ident_perms): 4813 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4814 (iperm+1, ",".join([str(i+1) for i in perm]))) 4815 lines.append("data nsym/%d/" % len(ident_perms)) 4816 4817 # Write the file 4818 writer.writelines(lines) 4819 4820 return True
4821 4822 #=========================================================================== 4823 # write_symfact_file 4824 #===========================================================================
4825 - def write_symfact_file(self, writer, symmetry):
4826 """Write the files symfact.dat for MG4 by comparing diagrams using 4827 the internal matrix element value functionality.""" 4828 4829 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4830 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4831 # Write out lines for symswap.inc file (used to permute the 4832 # external leg momenta 4833 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4834 # Write the file 4835 writer.write('\n'.join(lines)) 4836 writer.write('\n') 4837 4838 return True
4839 4840 #=========================================================================== 4841 # write_symperms_file 4842 #===========================================================================
4843 - def write_symperms_file(self, writer, perms):
4844 """Write the symperms.inc file for subprocess group, used for 4845 symmetric configurations""" 4846 4847 lines = [] 4848 for iperm, perm in enumerate(perms): 4849 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4850 (iperm+1, ",".join([str(i+1) for i in perm]))) 4851 4852 # Write the file 4853 writer.writelines(lines) 4854 4855 return True
4856 4857 #=========================================================================== 4858 # write_subproc 4859 #===========================================================================
4860 - def write_subproc(self, writer, subprocdir):
4861 """Append this subprocess to the subproc.mg file for MG4""" 4862 4863 # Write line to file 4864 writer.write(subprocdir + "\n") 4865 4866 return True
4867
4868 #=============================================================================== 4869 # ProcessExporterFortranMEGroup 4870 #=============================================================================== 4871 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
4872 """Class to take care of exporting a set of matrix elements to 4873 MadEvent subprocess group format.""" 4874 4875 matrix_file = "matrix_madevent_group_v4.inc" 4876 grouped_mode = 'madevent' 4877 #=========================================================================== 4878 # generate_subprocess_directory 4879 #===========================================================================
4880 - def generate_subprocess_directory(self, subproc_group, 4881 fortran_model, 4882 group_number):
4883 """Generate the Pn directory for a subprocess group in MadEvent, 4884 including the necessary matrix_N.f files, configs.inc and various 4885 other helper files.""" 4886 4887 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 4888 "subproc_group object not SubProcessGroup" 4889 4890 if not self.model: 4891 self.model = subproc_group.get('matrix_elements')[0].\ 4892 get('processes')[0].get('model') 4893 4894 cwd = os.getcwd() 4895 path = pjoin(self.dir_path, 'SubProcesses') 4896 4897 os.chdir(path) 4898 pathdir = os.getcwd() 4899 4900 # Create the directory PN in the specified path 4901 subprocdir = "P%d_%s" % (subproc_group.get('number'), 4902 subproc_group.get('name')) 4903 try: 4904 os.mkdir(subprocdir) 4905 except os.error as error: 4906 logger.warning(error.strerror + " " + subprocdir) 4907 4908 try: 4909 os.chdir(subprocdir) 4910 except os.error: 4911 logger.error('Could not cd to directory %s' % subprocdir) 4912 return 0 4913 4914 logger.info('Creating files in directory %s' % subprocdir) 4915 4916 # Create the matrix.f files, auto_dsig.f files and all inc files 4917 # for all subprocesses in the group 4918 4919 maxamps = 0 4920 maxflows = 0 4921 tot_calls = 0 4922 4923 matrix_elements = subproc_group.get('matrix_elements') 4924 4925 # Add the driver.f, all grouped ME's must share the same number of 4926 # helicity configuration 4927 ncomb = matrix_elements[0].get_helicity_combinations() 4928 for me in matrix_elements[1:]: 4929 if ncomb!=me.get_helicity_combinations(): 4930 raise MadGraph5Error, "All grouped processes must share the "+\ 4931 "same number of helicity configurations." 4932 4933 filename = 'driver.f' 4934 self.write_driver(writers.FortranWriter(filename),ncomb, 4935 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 4936 4937 for ime, matrix_element in \ 4938 enumerate(matrix_elements): 4939 filename = 'matrix%d.f' % (ime+1) 4940 calls, ncolor = \ 4941 self.write_matrix_element_v4(writers.FortranWriter(filename), 4942 matrix_element, 4943 fortran_model, 4944 proc_id=str(ime+1), 4945 config_map=subproc_group.get('diagram_maps')[ime], 4946 subproc_number=group_number) 4947 4948 filename = 'auto_dsig%d.f' % (ime+1) 4949 self.write_auto_dsig_file(writers.FortranWriter(filename), 4950 matrix_element, 4951 str(ime+1)) 4952 4953 # Keep track of needed quantities 4954 tot_calls += int(calls) 4955 maxflows = max(maxflows, ncolor) 4956 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 4957 4958 # Draw diagrams 4959 filename = "matrix%d.ps" % (ime+1) 4960 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 4961 get('diagrams'), 4962 filename, 4963 model = \ 4964 matrix_element.get('processes')[0].\ 4965 get('model'), 4966 amplitude=True) 4967 logger.info("Generating Feynman diagrams for " + \ 4968 matrix_element.get('processes')[0].nice_string()) 4969 plot.draw() 4970 4971 # Extract number of external particles 4972 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4973 4974 # Generate a list of diagrams corresponding to each configuration 4975 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 4976 # If a subprocess has no diagrams for this config, the number is 0 4977 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 4978 4979 filename = 'auto_dsig.f' 4980 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 4981 subproc_group) 4982 4983 filename = 'coloramps.inc' 4984 self.write_coloramps_file(writers.FortranWriter(filename), 4985 subproc_diagrams_for_config, 4986 maxflows, 4987 matrix_elements) 4988 4989 filename = 'get_color.f' 4990 self.write_colors_file(writers.FortranWriter(filename), 4991 matrix_elements) 4992 4993 filename = 'config_subproc_map.inc' 4994 self.write_config_subproc_map_file(writers.FortranWriter(filename), 4995 subproc_diagrams_for_config) 4996 4997 filename = 'configs.inc' 4998 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 4999 writers.FortranWriter(filename), 5000 subproc_group, 5001 subproc_diagrams_for_config) 5002 5003 filename = 'config_nqcd.inc' 5004 self.write_config_nqcd_file(writers.FortranWriter(filename), 5005 nqcd_list) 5006 5007 filename = 'decayBW.inc' 5008 self.write_decayBW_file(writers.FortranWriter(filename), 5009 s_and_t_channels) 5010 5011 filename = 'dname.mg' 5012 self.write_dname_file(writers.FortranWriter(filename), 5013 subprocdir) 5014 5015 filename = 'iproc.dat' 5016 self.write_iproc_file(writers.FortranWriter(filename), 5017 group_number) 5018 5019 filename = 'leshouche.inc' 5020 self.write_leshouche_file(writers.FortranWriter(filename), 5021 subproc_group) 5022 5023 filename = 'maxamps.inc' 5024 self.write_maxamps_file(writers.FortranWriter(filename), 5025 maxamps, 5026 maxflows, 5027 max([len(me.get('processes')) for me in \ 5028 matrix_elements]), 5029 len(matrix_elements)) 5030 5031 # Note that mg.sym is not relevant for this case 5032 filename = 'mg.sym' 5033 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 5034 5035 filename = 'mirrorprocs.inc' 5036 self.write_mirrorprocs(writers.FortranWriter(filename), 5037 subproc_group) 5038 5039 filename = 'ncombs.inc' 5040 self.write_ncombs_file(writers.FortranWriter(filename), 5041 nexternal) 5042 5043 filename = 'nexternal.inc' 5044 self.write_nexternal_file(writers.FortranWriter(filename), 5045 nexternal, ninitial) 5046 5047 filename = 'ngraphs.inc' 5048 self.write_ngraphs_file(writers.FortranWriter(filename), 5049 nconfigs) 5050 5051 filename = 'pmass.inc' 5052 self.write_pmass_file(writers.FortranWriter(filename), 5053 matrix_element) 5054 5055 filename = 'props.inc' 5056 self.write_props_file(writers.FortranWriter(filename), 5057 matrix_element, 5058 s_and_t_channels) 5059 5060 filename = 'processes.dat' 5061 files.write_to_file(filename, 5062 self.write_processes_file, 5063 subproc_group) 5064 5065 # Find config symmetries and permutations 5066 symmetry, perms, ident_perms = \ 5067 diagram_symmetry.find_symmetry(subproc_group) 5068 5069 filename = 'symswap.inc' 5070 self.write_symswap_file(writers.FortranWriter(filename), 5071 ident_perms) 5072 5073 filename = 'symfact_orig.dat' 5074 self.write_symfact_file(open(filename, 'w'), symmetry) 5075 5076 filename = 'symperms.inc' 5077 self.write_symperms_file(writers.FortranWriter(filename), 5078 perms) 5079 5080 # Generate jpgs -> pass in make_html 5081 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 5082 5083 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 5084 5085 #import nexternal/leshouch in Source 5086 ln('nexternal.inc', '../../Source', log=False) 5087 ln('leshouche.inc', '../../Source', log=False) 5088 ln('maxamps.inc', '../../Source', log=False) 5089 5090 # Return to SubProcesses dir) 5091 os.chdir(pathdir) 5092 5093 # Add subprocess to subproc.mg 5094 filename = 'subproc.mg' 5095 files.append_to_file(filename, 5096 self.write_subproc, 5097 subprocdir) 5098 5099 # Return to original dir 5100 os.chdir(cwd) 5101 5102 if not tot_calls: 5103 tot_calls = 0 5104 return tot_calls
5105 5106 #=========================================================================== 5107 # write_super_auto_dsig_file 5108 #===========================================================================
5109 - def write_super_auto_dsig_file(self, writer, subproc_group):
5110 """Write the auto_dsig.f file selecting between the subprocesses 5111 in subprocess group mode""" 5112 5113 replace_dict = {} 5114 5115 # Extract version number and date from VERSION file 5116 info_lines = self.get_mg5_info_lines() 5117 replace_dict['info_lines'] = info_lines 5118 5119 matrix_elements = subproc_group.get('matrix_elements') 5120 5121 # Extract process info lines 5122 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 5123 matrix_elements]) 5124 replace_dict['process_lines'] = process_lines 5125 5126 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 5127 replace_dict['nexternal'] = nexternal 5128 5129 replace_dict['nsprocs'] = 2*len(matrix_elements) 5130 5131 # Generate dsig definition line 5132 dsig_def_line = "DOUBLE PRECISION " + \ 5133 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 5134 range(len(matrix_elements))]) 5135 replace_dict["dsig_def_line"] = dsig_def_line 5136 5137 # Generate dsig process lines 5138 call_dsig_proc_lines = [] 5139 for iproc in range(len(matrix_elements)): 5140 call_dsig_proc_lines.append(\ 5141 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 5142 {"num": iproc + 1, 5143 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 5144 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 5145 5146 ncomb=matrix_elements[0].get_helicity_combinations() 5147 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 5148 5149 if writer: 5150 file = open(pjoin(_file_path, \ 5151 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 5152 file = file % replace_dict 5153 5154 # Write the file 5155 writer.writelines(file) 5156 else: 5157 return replace_dict
5158 5159 #=========================================================================== 5160 # write_mirrorprocs 5161 #===========================================================================
5162 - def write_mirrorprocs(self, writer, subproc_group):
5163 """Write the mirrorprocs.inc file determining which processes have 5164 IS mirror process in subprocess group mode.""" 5165 5166 lines = [] 5167 bool_dict = {True: '.true.', False: '.false.'} 5168 matrix_elements = subproc_group.get('matrix_elements') 5169 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 5170 (len(matrix_elements), 5171 ",".join([bool_dict[me.get('has_mirror_process')] for \ 5172 me in matrix_elements]))) 5173 # Write the file 5174 writer.writelines(lines)
5175 5176 #=========================================================================== 5177 # write_addmothers 5178 #===========================================================================
5179 - def write_addmothers(self, writer):
5180 """Write the SubProcess/addmothers.f""" 5181 5182 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5183 5184 text = open(path).read() % {'iconfig': 'lconfig'} 5185 writer.write(text) 5186 5187 return True
5188 5189 5190 #=========================================================================== 5191 # write_coloramps_file 5192 #===========================================================================
5193 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 5194 matrix_elements):
5195 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 5196 5197 # Create a map from subprocess (matrix element) to a list of 5198 # the diagrams corresponding to each config 5199 5200 lines = [] 5201 5202 subproc_to_confdiag = {} 5203 for config in diagrams_for_config: 5204 for subproc, diag in enumerate(config): 5205 try: 5206 subproc_to_confdiag[subproc].append(diag) 5207 except KeyError: 5208 subproc_to_confdiag[subproc] = [diag] 5209 5210 for subproc in sorted(subproc_to_confdiag.keys()): 5211 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5212 matrix_elements[subproc], 5213 subproc + 1)) 5214 5215 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5216 (maxflows, 5217 len(diagrams_for_config), 5218 len(matrix_elements))) 5219 5220 # Write the file 5221 writer.writelines(lines) 5222 5223 return True
5224 5225 #=========================================================================== 5226 # write_config_subproc_map_file 5227 #===========================================================================
5228 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5229 """Write the config_subproc_map.inc file for subprocess groups""" 5230 5231 lines = [] 5232 # Output only configs that have some corresponding diagrams 5233 iconfig = 0 5234 for config in config_subproc_map: 5235 if set(config) == set([0]): 5236 continue 5237 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 5238 (iconfig + 1, len(config), 5239 ",".join([str(i) for i in config]))) 5240 iconfig += 1 5241 # Write the file 5242 writer.writelines(lines) 5243 5244 return True
5245 5246 #=========================================================================== 5247 # read_write_good_hel 5248 #===========================================================================
5249 - def read_write_good_hel(self, ncomb):
5250 """return the code to read/write the good_hel common_block""" 5251 5252 convert = {'ncomb' : ncomb} 5253 5254 output = """ 5255 subroutine write_good_hel(stream_id) 5256 implicit none 5257 integer stream_id 5258 INTEGER NCOMB 5259 PARAMETER ( NCOMB=%(ncomb)d) 5260 LOGICAL GOODHEL(NCOMB, 2) 5261 INTEGER NTRY(2) 5262 common/BLOCK_GOODHEL/NTRY,GOODHEL 5263 write(stream_id,*) GOODHEL 5264 return 5265 end 5266 5267 5268 subroutine read_good_hel(stream_id) 5269 implicit none 5270 include 'genps.inc' 5271 integer stream_id 5272 INTEGER NCOMB 5273 PARAMETER ( NCOMB=%(ncomb)d) 5274 LOGICAL GOODHEL(NCOMB, 2) 5275 INTEGER NTRY(2) 5276 common/BLOCK_GOODHEL/NTRY,GOODHEL 5277 read(stream_id,*) GOODHEL 5278 NTRY(1) = MAXTRIES + 1 5279 NTRY(2) = MAXTRIES + 1 5280 return 5281 end 5282 5283 subroutine init_good_hel() 5284 implicit none 5285 INTEGER NCOMB 5286 PARAMETER ( NCOMB=%(ncomb)d) 5287 LOGICAL GOODHEL(NCOMB, 2) 5288 INTEGER NTRY(2) 5289 INTEGER I 5290 5291 do i=1,NCOMB 5292 GOODHEL(I,1) = .false. 5293 GOODHEL(I,2) = .false. 5294 enddo 5295 NTRY(1) = 0 5296 NTRY(2) = 0 5297 end 5298 5299 integer function get_maxsproc() 5300 implicit none 5301 include 'maxamps.inc' 5302 5303 get_maxsproc = maxsproc 5304 return 5305 end 5306 5307 """ % convert 5308 5309 return output
5310 5311 5312 5313 #=========================================================================== 5314 # write_configs_file 5315 #===========================================================================
5316 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
5317 """Write the configs.inc file with topology information for a 5318 subprocess group. Use the first subprocess with a diagram for each 5319 configuration.""" 5320 5321 matrix_elements = subproc_group.get('matrix_elements') 5322 model = matrix_elements[0].get('processes')[0].get('model') 5323 5324 diagrams = [] 5325 config_numbers = [] 5326 for iconfig, config in enumerate(diagrams_for_config): 5327 # Check if any diagrams correspond to this config 5328 if set(config) == set([0]): 5329 continue 5330 subproc_diags = [] 5331 for s,d in enumerate(config): 5332 if d: 5333 subproc_diags.append(matrix_elements[s].\ 5334 get('diagrams')[d-1]) 5335 else: 5336 subproc_diags.append(None) 5337 diagrams.append(subproc_diags) 5338 config_numbers.append(iconfig + 1) 5339 5340 # Extract number of external particles 5341 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 5342 5343 return len(diagrams), \ 5344 self.write_configs_file_from_diagrams(writer, diagrams, 5345 config_numbers, 5346 nexternal, ninitial, 5347 model)
5348 5349 #=========================================================================== 5350 # write_run_configs_file 5351 #===========================================================================
5352 - def write_run_config_file(self, writer):
5353 """Write the run_configs.inc file for MadEvent""" 5354 5355 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 5356 if self.proc_characteristic['loop_induced']: 5357 job_per_chan = 1 5358 else: 5359 job_per_chan = 2 5360 text = open(path).read() % {'chanperjob':job_per_chan} 5361 writer.write(text) 5362 return True
5363 5364 5365 #=========================================================================== 5366 # write_leshouche_file 5367 #===========================================================================
5368 - def write_leshouche_file(self, writer, subproc_group):
5369 """Write the leshouche.inc file for MG4""" 5370 5371 all_lines = [] 5372 5373 for iproc, matrix_element in \ 5374 enumerate(subproc_group.get('matrix_elements')): 5375 all_lines.extend(self.get_leshouche_lines(matrix_element, 5376 iproc)) 5377 # Write the file 5378 writer.writelines(all_lines) 5379 return True
5380 5381
5382 - def finalize(self,*args, **opts):
5383 5384 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 5385 #ensure that the grouping information is on the correct value 5386 self.proc_characteristic['grouped_matrix'] = True
5387 5388 5389 #=============================================================================== 5390 # UFO_model_to_mg4 5391 #=============================================================================== 5392 5393 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
5394 5395 -class UFO_model_to_mg4(object):
5396 """ A converter of the UFO-MG5 Model to the MG4 format """ 5397 5398 # The list below shows the only variables the user is allowed to change by 5399 # himself for each PS point. If he changes any other, then calling 5400 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 5401 # correctly account for the change. 5402 PS_dependent_key = ['aS','MU_R'] 5403 mp_complex_format = 'complex*32' 5404 mp_real_format = 'real*16' 5405 # Warning, it is crucial none of the couplings/parameters of the model 5406 # starts with this prefix. I should add a check for this. 5407 # You can change it as the global variable to check_param_card.ParamCard 5408 mp_prefix = check_param_card.ParamCard.mp_prefix 5409
5410 - def __init__(self, model, output_path, opt=None):
5411 """ initialization of the objects """ 5412 5413 self.model = model 5414 self.model_name = model['name'] 5415 self.dir_path = output_path 5416 5417 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5418 'loop_induced': False} 5419 if opt: 5420 self.opt.update(opt) 5421 5422 self.coups_dep = [] # (name, expression, type) 5423 self.coups_indep = [] # (name, expression, type) 5424 self.params_dep = [] # (name, expression, type) 5425 self.params_indep = [] # (name, expression, type) 5426 self.params_ext = [] # external parameter 5427 self.p_to_f = parsers.UFOExpressionParserFortran(self.model) 5428 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran(self.model)
5429
5431 """modify the parameter if some of them are identical up to the case""" 5432 5433 lower_dict={} 5434 duplicate = set() 5435 keys = self.model['parameters'].keys() 5436 for key in keys: 5437 for param in self.model['parameters'][key]: 5438 lower_name = param.name.lower() 5439 if not lower_name: 5440 continue 5441 try: 5442 lower_dict[lower_name].append(param) 5443 except KeyError,error: 5444 lower_dict[lower_name] = [param] 5445 else: 5446 duplicate.add(lower_name) 5447 logger.debug('%s is define both as lower case and upper case.' 5448 % lower_name) 5449 if not duplicate: 5450 return 5451 5452 re_expr = r'''\b(%s)\b''' 5453 to_change = [] 5454 change={} 5455 for value in duplicate: 5456 for i, var in enumerate(lower_dict[value]): 5457 to_change.append(var.name) 5458 new_name = '%s%s' % (var.name.lower(), 5459 ('__%d'%(i+1) if i>0 else '')) 5460 change[var.name] = new_name 5461 var.name = new_name 5462 5463 # Apply the modification to the map_CTcoup_CTparam of the model 5464 # if it has one (giving for each coupling the CT parameters whcih 5465 # are necessary and which should be exported to the model. 5466 if hasattr(self.model,'map_CTcoup_CTparam'): 5467 for coup, ctparams in self.model.map_CTcoup_CTparam: 5468 for i, ctparam in enumerate(ctparams): 5469 try: 5470 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5471 except KeyError: 5472 pass 5473 5474 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5475 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5476 5477 # change parameters 5478 for key in keys: 5479 if key == ('external',): 5480 continue 5481 for param in self.model['parameters'][key]: 5482 param.expr = rep_pattern.sub(replace, param.expr) 5483 5484 # change couplings 5485 for key in self.model['couplings'].keys(): 5486 for coup in self.model['couplings'][key]: 5487 coup.expr = rep_pattern.sub(replace, coup.expr) 5488 5489 # change mass/width 5490 for part in self.model['particles']: 5491 if str(part.get('mass')) in to_change: 5492 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5493 if str(part.get('width')) in to_change: 5494 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5495
5496 - def refactorize(self, wanted_couplings = []):
5497 """modify the couplings to fit with MG4 convention """ 5498 5499 # Keep only separation in alphaS 5500 keys = self.model['parameters'].keys() 5501 keys.sort(key=len) 5502 for key in keys: 5503 to_add = [o for o in self.model['parameters'][key] if o.name] 5504 5505 if key == ('external',): 5506 self.params_ext += to_add 5507 elif any([(k in key) for k in self.PS_dependent_key]): 5508 self.params_dep += to_add 5509 else: 5510 self.params_indep += to_add 5511 # same for couplings 5512 keys = self.model['couplings'].keys() 5513 keys.sort(key=len) 5514 for key, coup_list in self.model['couplings'].items(): 5515 if any([(k in key) for k in self.PS_dependent_key]): 5516 self.coups_dep += [c for c in coup_list if 5517 (not wanted_couplings or c.name in \ 5518 wanted_couplings)] 5519 else: 5520 self.coups_indep += [c for c in coup_list if 5521 (not wanted_couplings or c.name in \ 5522 wanted_couplings)] 5523 5524 # MG4 use G and not aS as it basic object for alphas related computation 5525 #Pass G in the independant list 5526 if 'G' in self.params_dep: 5527 index = self.params_dep.index('G') 5528 G = self.params_dep.pop(index) 5529 # G.expr = '2*cmath.sqrt(as*pi)' 5530 # self.params_indep.insert(0, self.params_dep.pop(index)) 5531 # No need to add it if not defined 5532 5533 if 'aS' not in self.params_ext: 5534 logger.critical('aS not define as external parameter adding it!') 5535 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5536 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5537 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5538 - def build(self, wanted_couplings = [], full=True):
5539 """modify the couplings to fit with MG4 convention and creates all the 5540 different files""" 5541 5542 self.pass_parameter_to_case_insensitive() 5543 self.refactorize(wanted_couplings) 5544 5545 # write the files 5546 if full: 5547 if wanted_couplings: 5548 # extract the wanted ct parameters 5549 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5550 self.write_all()
5551 5552
5553 - def open(self, name, comment='c', format='default'):
5554 """ Open the file name in the correct directory and with a valid 5555 header.""" 5556 5557 file_path = pjoin(self.dir_path, name) 5558 5559 if format == 'fortran': 5560 fsock = writers.FortranWriter(file_path, 'w') 5561 else: 5562 fsock = open(file_path, 'w') 5563 5564 file.writelines(fsock, comment * 77 + '\n') 5565 file.writelines(fsock,'%(comment)s written by the UFO converter\n' % \ 5566 {'comment': comment + (6 - len(comment)) * ' '}) 5567 file.writelines(fsock, comment * 77 + '\n\n') 5568 return fsock
5569 5570
5571 - def write_all(self):
5572 """ write all the files """ 5573 #write the part related to the external parameter 5574 self.create_ident_card() 5575 self.create_param_read() 5576 5577 #write the definition of the parameter 5578 self.create_input() 5579 self.create_intparam_def(dp=True,mp=False) 5580 if self.opt['mp']: 5581 self.create_intparam_def(dp=False,mp=True) 5582 5583 # definition of the coupling. 5584 self.create_actualize_mp_ext_param_inc() 5585 self.create_coupl_inc() 5586 self.create_write_couplings() 5587 self.create_couplings() 5588 5589 # the makefile 5590 self.create_makeinc() 5591 self.create_param_write() 5592 5593 # The model functions 5594 self.create_model_functions_inc() 5595 self.create_model_functions_def() 5596 5597 # The param_card.dat 5598 self.create_param_card() 5599 5600 5601 # All the standard files 5602 self.copy_standard_file()
5603 5604 ############################################################################ 5605 ## ROUTINE CREATING THE FILES ############################################ 5606 ############################################################################ 5607
5608 - def copy_standard_file(self):
5609 """Copy the standard files for the fortran model.""" 5610 5611 #copy the library files 5612 file_to_link = ['formats.inc','printout.f', \ 5613 'rw_para.f', 'testprog.f'] 5614 5615 for filename in file_to_link: 5616 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5617 self.dir_path) 5618 5619 file = open(os.path.join(MG5DIR,\ 5620 'models/template_files/fortran/rw_para.f')).read() 5621 5622 includes=["include \'coupl.inc\'","include \'input.inc\'", 5623 "include \'model_functions.inc\'"] 5624 if self.opt['mp']: 5625 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5626 # In standalone and madloop we do no use the compiled param card but 5627 # still parse the .dat one so we must load it. 5628 if self.opt['loop_induced']: 5629 #loop induced follow MadEvent way to handle the card. 5630 load_card = '' 5631 lha_read_filename='lha_read.f' 5632 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5633 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5634 lha_read_filename='lha_read_mp.f' 5635 elif self.opt['export_format'].startswith('standalone') \ 5636 or self.opt['export_format'] in ['madweight', 'plugin']\ 5637 or self.opt['export_format'].startswith('matchbox'): 5638 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5639 lha_read_filename='lha_read.f' 5640 else: 5641 load_card = '' 5642 lha_read_filename='lha_read.f' 5643 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5644 os.path.join(self.dir_path,'lha_read.f')) 5645 5646 file=file%{'includes':'\n '.join(includes), 5647 'load_card':load_card} 5648 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5649 writer.writelines(file) 5650 writer.close() 5651 5652 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5653 or self.opt['loop_induced']: 5654 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5655 self.dir_path + '/makefile') 5656 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5657 path = pjoin(self.dir_path, 'makefile') 5658 text = open(path).read() 5659 text = text.replace('madevent','aMCatNLO') 5660 open(path, 'w').writelines(text) 5661 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5662 'madloop','madloop_optimized', 'standalone_rw', 5663 'madweight','matchbox','madloop_matchbox', 'plugin']: 5664 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5665 self.dir_path + '/makefile') 5666 #elif self.opt['export_format'] in []: 5667 #pass 5668 else: 5669 raise MadGraph5Error('Unknown format')
5670
5671 - def create_coupl_inc(self):
5672 """ write coupling.inc """ 5673 5674 fsock = self.open('coupl.inc', format='fortran') 5675 if self.opt['mp']: 5676 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5677 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5678 format='fortran') 5679 5680 # Write header 5681 header = """double precision G 5682 common/strong/ G 5683 5684 double complex gal(2) 5685 common/weak/ gal 5686 5687 double precision MU_R 5688 common/rscale/ MU_R 5689 5690 """ 5691 # Nf is the number of light quark flavours 5692 header = header+"""double precision Nf 5693 parameter(Nf=%dd0) 5694 """ % self.model.get_nflav() 5695 #Nl is the number of massless leptons 5696 header = header+"""double precision Nl 5697 parameter(Nl=%dd0) 5698 """ % self.model.get_nleps() 5699 5700 fsock.writelines(header) 5701 5702 if self.opt['mp']: 5703 header = """%(real_mp_format)s %(mp_prefix)sG 5704 common/MP_strong/ %(mp_prefix)sG 5705 5706 %(complex_mp_format)s %(mp_prefix)sgal(2) 5707 common/MP_weak/ %(mp_prefix)sgal 5708 5709 %(complex_mp_format)s %(mp_prefix)sMU_R 5710 common/MP_rscale/ %(mp_prefix)sMU_R 5711 5712 """ 5713 5714 5715 5716 5717 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5718 'complex_mp_format':self.mp_complex_format, 5719 'mp_prefix':self.mp_prefix}) 5720 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5721 'complex_mp_format':self.mp_complex_format, 5722 'mp_prefix':''}) 5723 5724 # Write the Mass definition/ common block 5725 masses = set() 5726 widths = set() 5727 if self.opt['complex_mass']: 5728 complex_mass = set() 5729 5730 for particle in self.model.get('particles'): 5731 #find masses 5732 one_mass = particle.get('mass') 5733 if one_mass.lower() != 'zero': 5734 masses.add(one_mass) 5735 5736 # find width 5737 one_width = particle.get('width') 5738 if one_width.lower() != 'zero': 5739 widths.add(one_width) 5740 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5741 complex_mass.add('CMASS_%s' % one_mass) 5742 5743 if masses: 5744 fsock.writelines('double precision '+','.join(masses)+'\n') 5745 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5746 if self.opt['mp']: 5747 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5748 ','.join(masses)+'\n') 5749 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5750 ','.join(masses)+'\n\n') 5751 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5752 self.mp_prefix+m for m in masses])+'\n') 5753 mp_fsock.writelines('common/MP_masses/ '+\ 5754 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5755 5756 if widths: 5757 fsock.writelines('double precision '+','.join(widths)+'\n') 5758 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5759 if self.opt['mp']: 5760 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5761 ','.join(widths)+'\n') 5762 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5763 ','.join(widths)+'\n\n') 5764 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5765 self.mp_prefix+w for w in widths])+'\n') 5766 mp_fsock.writelines('common/MP_widths/ '+\ 5767 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5768 5769 # Write the Couplings 5770 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5771 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5772 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5773 if self.opt['mp']: 5774 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5775 ','.join(coupling_list)+'\n') 5776 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5777 ','.join(coupling_list)+'\n\n') 5778 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5779 self.mp_prefix+c for c in coupling_list])+'\n') 5780 mp_fsock.writelines('common/MP_couplings/ '+\ 5781 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5782 5783 # Write complex mass for complex mass scheme (if activated) 5784 if self.opt['complex_mass'] and complex_mass: 5785 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5786 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5787 if self.opt['mp']: 5788 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5789 ','.join(complex_mass)+'\n') 5790 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5791 ','.join(complex_mass)+'\n\n') 5792 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5793 self.mp_prefix+cm for cm in complex_mass])+'\n') 5794 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5795 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5796
5797 - def create_write_couplings(self):
5798 """ write the file coupl_write.inc """ 5799 5800 fsock = self.open('coupl_write.inc', format='fortran') 5801 5802 fsock.writelines("""write(*,*) ' Couplings of %s' 5803 write(*,*) ' ---------------------------------' 5804 write(*,*) ' '""" % self.model_name) 5805 def format(coupl): 5806 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5807 5808 # Write the Couplings 5809 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5810 fsock.writelines('\n'.join(lines)) 5811 5812
5813 - def create_input(self):
5814 """create input.inc containing the definition of the parameters""" 5815 5816 fsock = self.open('input.inc', format='fortran') 5817 if self.opt['mp']: 5818 mp_fsock = self.open('mp_input.inc', format='fortran') 5819 5820 #find mass/ width since they are already define 5821 already_def = set() 5822 for particle in self.model.get('particles'): 5823 already_def.add(particle.get('mass').lower()) 5824 already_def.add(particle.get('width').lower()) 5825 if self.opt['complex_mass']: 5826 already_def.add('cmass_%s' % particle.get('mass').lower()) 5827 5828 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5829 name.lower() not in already_def 5830 5831 real_parameters = [param.name for param in self.params_dep + 5832 self.params_indep if param.type == 'real' 5833 and is_valid(param.name)] 5834 5835 real_parameters += [param.name for param in self.params_ext 5836 if param.type == 'real'and 5837 is_valid(param.name)] 5838 5839 # check the parameter is a CT parameter or not 5840 # if yes, just use the needed ones 5841 real_parameters = [param for param in real_parameters \ 5842 if self.check_needed_param(param)] 5843 5844 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 5845 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 5846 if self.opt['mp']: 5847 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5848 self.mp_prefix+p for p in real_parameters])+'\n') 5849 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 5850 self.mp_prefix+p for p in real_parameters])+'\n\n') 5851 5852 complex_parameters = [param.name for param in self.params_dep + 5853 self.params_indep if param.type == 'complex' and 5854 is_valid(param.name)] 5855 5856 # check the parameter is a CT parameter or not 5857 # if yes, just use the needed ones 5858 complex_parameters = [param for param in complex_parameters \ 5859 if self.check_needed_param(param)] 5860 5861 if complex_parameters: 5862 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 5863 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 5864 if self.opt['mp']: 5865 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5866 self.mp_prefix+p for p in complex_parameters])+'\n') 5867 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 5868 self.mp_prefix+p for p in complex_parameters])+'\n\n')
5869
5870 - def check_needed_param(self, param):
5871 """ Returns whether the parameter in argument is needed for this 5872 specific computation or not.""" 5873 5874 # If this is a leading order model or if there was no CT parameter 5875 # employed in this NLO model, one can directly return that the 5876 # parameter is needed since only CTParameters are filtered. 5877 if not hasattr(self, 'allCTparameters') or \ 5878 self.allCTparameters is None or self.usedCTparameters is None or \ 5879 len(self.allCTparameters)==0: 5880 return True 5881 5882 # We must allow the conjugate shorthand for the complex parameter as 5883 # well so we check wether either the parameter name or its name with 5884 # 'conjg__' substituted with '' is present in the list. 5885 # This is acceptable even if some parameter had an original name 5886 # including 'conjg__' in it, because at worst we export a parameter 5887 # was not needed. 5888 param = param.lower() 5889 cjg_param = param.replace('conjg__','',1) 5890 5891 # First make sure it is a CTparameter 5892 if param not in self.allCTparameters and \ 5893 cjg_param not in self.allCTparameters: 5894 return True 5895 5896 # Now check if it is in the list of CTparameters actually used 5897 return (param in self.usedCTparameters or \ 5898 cjg_param in self.usedCTparameters)
5899
5900 - def extract_needed_CTparam(self,wanted_couplings=[]):
5901 """ Extract what are the needed CT parameters given the wanted_couplings""" 5902 5903 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 5904 # Setting these lists to none wil disable the filtering in 5905 # check_needed_param 5906 self.allCTparameters = None 5907 self.usedCTparameters = None 5908 return 5909 5910 # All CTparameters appearin in all CT couplings 5911 allCTparameters=self.model.map_CTcoup_CTparam.values() 5912 # Define in this class the list of all CT parameters 5913 self.allCTparameters=list(\ 5914 set(itertools.chain.from_iterable(allCTparameters))) 5915 5916 # All used CT couplings 5917 w_coupls = [coupl.lower() for coupl in wanted_couplings] 5918 allUsedCTCouplings = [coupl for coupl in 5919 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 5920 5921 # Now define the list of all CT parameters that are actually used 5922 self.usedCTparameters=list(\ 5923 set(itertools.chain.from_iterable([ 5924 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 5925 ]))) 5926 5927 # Now at last, make these list case insensitive 5928 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 5929 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
5930
5931 - def create_intparam_def(self, dp=True, mp=False):
5932 """ create intparam_definition.inc setting the internal parameters. 5933 Output the double precision and/or the multiple precision parameters 5934 depending on the parameters dp and mp. If mp only, then the file names 5935 get the 'mp_' prefix. 5936 """ 5937 5938 fsock = self.open('%sintparam_definition.inc'% 5939 ('mp_' if mp and not dp else ''), format='fortran') 5940 5941 fsock.write_comments(\ 5942 "Parameters that should not be recomputed event by event.\n") 5943 fsock.writelines("if(readlha) then\n") 5944 if dp: 5945 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 5946 if mp: 5947 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 5948 5949 for param in self.params_indep: 5950 if param.name == 'ZERO': 5951 continue 5952 # check whether the parameter is a CT parameter 5953 # if yes,just used the needed ones 5954 if not self.check_needed_param(param.name): 5955 continue 5956 if dp: 5957 fsock.writelines("%s = %s\n" % (param.name, 5958 self.p_to_f.parse(param.expr))) 5959 if mp: 5960 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5961 self.mp_p_to_f.parse(param.expr))) 5962 5963 fsock.writelines('endif') 5964 5965 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 5966 if dp: 5967 fsock.writelines("aS = G**2/4/pi\n") 5968 if mp: 5969 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 5970 for param in self.params_dep: 5971 # check whether the parameter is a CT parameter 5972 # if yes,just used the needed ones 5973 if not self.check_needed_param(param.name): 5974 continue 5975 if dp: 5976 fsock.writelines("%s = %s\n" % (param.name, 5977 self.p_to_f.parse(param.expr))) 5978 elif mp: 5979 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5980 self.mp_p_to_f.parse(param.expr))) 5981 5982 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 5983 5984 # Let us not necessarily investigate the presence of alpha_EW^-1 of Gf as an external parameter, but also just as a parameter 5985 if ('aEWM1',) in self.model['parameters'] or \ 5986 any( ('aEWM1'.lower() in [p.name.lower() for p in p_list]) for p_list in self.model['parameters'].values() ): 5987 if dp: 5988 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(ABS(aEWM1)) 5989 gal(2) = 1d0 5990 """) 5991 elif mp: 5992 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/ABS(MP__aEWM1)) 5993 %(mp_prefix)sgal(2) = 1d0 5994 """ %{'mp_prefix':self.mp_prefix}) 5995 pass 5996 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 5997 elif ('Gf',) in self.model['parameters']: 5998 # Make sure to consider complex masses if the complex mass scheme is activated 5999 if self.opt['complex_mass']: 6000 mass_prefix = 'CMASS_MDL_' 6001 else: 6002 mass_prefix = 'MDL_' 6003 6004 if dp: 6005 if self.opt['complex_mass']: 6006 fsock.writelines(""" gal(1) = ABS(2.378414230005442133435d0*%(mass_prefix)sMW*SQRT(DCMPLX(1.0D0,0.0d0)-%(mass_prefix)sMW**2/%(mass_prefix)sMZ**2)*DSQRT(MDL_Gf)) 6007 gal(2) = 1d0 6008 """%{'mass_prefix':mass_prefix}) 6009 else: 6010 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*%(mass_prefix)sMW*DSQRT(1D0-%(mass_prefix)sMW**2/%(mass_prefix)sMZ**2)*DSQRT(MDL_Gf) 6011 gal(2) = 1d0 6012 """%{'mass_prefix':mass_prefix}) 6013 elif mp: 6014 if self.opt['complex_mass']: 6015 fsock.writelines(""" %(mp_prefix)sgal(1) = ABS(2*%(mp_prefix)s%(mass_prefix)sMW*SQRT(CMPLX(1e0_16,0.0e0_16,KIND=16)-%(mp_prefix)s%(mass_prefix)sMW**2/%(mp_prefix)s%(mass_prefix)sMZ**2)*SQRT(SQRT(2e0_16)*%(mp_prefix)sMDL_Gf)) 6016 %(mp_prefix)sgal(2) = 1e0_16 6017 """ %{'mp_prefix':self.mp_prefix,'mass_prefix':mass_prefix}) 6018 else: 6019 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*%(mp_prefix)s%(mass_prefix)sMW*SQRT(1e0_16-%(mp_prefix)s%(mass_prefix)sMW**2/%(mp_prefix)s%(mass_prefix)sMZ**2)*SQRT(SQRT(2e0_16)*%(mp_prefix)sMDL_Gf) 6020 %(mp_prefix)sgal(2) = 1e0_16 6021 """ %{'mp_prefix':self.mp_prefix,'mass_prefix':mass_prefix}) 6022 6023 pass 6024 else: 6025 if dp: 6026 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 6027 fsock.writelines(""" gal(1) = 1d0 6028 gal(2) = 1d0 6029 """) 6030 elif mp: 6031 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 6032 %(mp_prefix)sgal(2) = 1e0_16 6033 """%{'mp_prefix':self.mp_prefix})
6034 6035
6036 - def create_couplings(self):
6037 """ create couplings.f and all couplingsX.f """ 6038 6039 nb_def_by_file = 25 6040 6041 self.create_couplings_main(nb_def_by_file) 6042 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6043 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6044 6045 for i in range(nb_coup_indep): 6046 # For the independent couplings, we compute the double and multiple 6047 # precision ones together 6048 data = self.coups_indep[nb_def_by_file * i: 6049 min(len(self.coups_indep), nb_def_by_file * (i+1))] 6050 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 6051 6052 for i in range(nb_coup_dep): 6053 # For the dependent couplings, we compute the double and multiple 6054 # precision ones in separate subroutines. 6055 data = self.coups_dep[nb_def_by_file * i: 6056 min(len(self.coups_dep), nb_def_by_file * (i+1))] 6057 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6058 dp=True,mp=False) 6059 if self.opt['mp']: 6060 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6061 dp=False,mp=True)
6062 6063
6064 - def create_couplings_main(self, nb_def_by_file=25):
6065 """ create couplings.f """ 6066 6067 fsock = self.open('couplings.f', format='fortran') 6068 6069 fsock.writelines("""subroutine coup() 6070 6071 implicit none 6072 double precision PI, ZERO 6073 logical READLHA 6074 parameter (PI=3.141592653589793d0) 6075 parameter (ZERO=0d0) 6076 include \'model_functions.inc\'""") 6077 if self.opt['mp']: 6078 fsock.writelines("""%s MP__PI, MP__ZERO 6079 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6080 parameter (MP__ZERO=0e0_16) 6081 include \'mp_input.inc\' 6082 include \'mp_coupl.inc\' 6083 """%self.mp_real_format) 6084 fsock.writelines("""include \'input.inc\' 6085 include \'coupl.inc\' 6086 READLHA = .true. 6087 include \'intparam_definition.inc\'""") 6088 if self.opt['mp']: 6089 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 6090 6091 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6092 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6093 6094 fsock.writelines('\n'.join(\ 6095 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 6096 6097 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6098 6099 fsock.writelines('\n'.join(\ 6100 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6101 for i in range(nb_coup_dep)])) 6102 if self.opt['mp']: 6103 fsock.writelines('\n'.join(\ 6104 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6105 for i in range(nb_coup_dep)])) 6106 fsock.writelines('''\n return \n end\n''') 6107 6108 fsock.writelines("""subroutine update_as_param() 6109 6110 implicit none 6111 double precision PI, ZERO 6112 logical READLHA 6113 parameter (PI=3.141592653589793d0) 6114 parameter (ZERO=0d0) 6115 include \'model_functions.inc\'""") 6116 fsock.writelines("""include \'input.inc\' 6117 include \'coupl.inc\' 6118 READLHA = .false.""") 6119 fsock.writelines(""" 6120 include \'intparam_definition.inc\'\n 6121 """) 6122 6123 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6124 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6125 6126 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6127 6128 fsock.writelines('\n'.join(\ 6129 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6130 for i in range(nb_coup_dep)])) 6131 fsock.writelines('''\n return \n end\n''') 6132 6133 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 6134 6135 implicit none 6136 double precision PI 6137 parameter (PI=3.141592653589793d0) 6138 double precision mu_r2, as2 6139 include \'model_functions.inc\'""") 6140 fsock.writelines("""include \'input.inc\' 6141 include \'coupl.inc\'""") 6142 fsock.writelines(""" 6143 if (mu_r2.gt.0d0) MU_R = mu_r2 6144 G = SQRT(4.0d0*PI*AS2) 6145 AS = as2 6146 6147 CALL UPDATE_AS_PARAM() 6148 """) 6149 fsock.writelines('''\n return \n end\n''') 6150 6151 if self.opt['mp']: 6152 fsock.writelines("""subroutine mp_update_as_param() 6153 6154 implicit none 6155 logical READLHA 6156 include \'model_functions.inc\'""") 6157 fsock.writelines("""%s MP__PI, MP__ZERO 6158 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6159 parameter (MP__ZERO=0e0_16) 6160 include \'mp_input.inc\' 6161 include \'mp_coupl.inc\' 6162 """%self.mp_real_format) 6163 fsock.writelines("""include \'input.inc\' 6164 include \'coupl.inc\' 6165 include \'actualize_mp_ext_params.inc\' 6166 READLHA = .false. 6167 include \'mp_intparam_definition.inc\'\n 6168 """) 6169 6170 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6171 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6172 6173 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6174 6175 fsock.writelines('\n'.join(\ 6176 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6177 for i in range(nb_coup_dep)])) 6178 fsock.writelines('''\n return \n end\n''')
6179
6180 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
6181 """ create couplings[nb_file].f containing information coming from data. 6182 Outputs the computation of the double precision and/or the multiple 6183 precision couplings depending on the parameters dp and mp. 6184 If mp is True and dp is False, then the prefix 'MP_' is appended to the 6185 filename and subroutine name. 6186 """ 6187 6188 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 6189 nb_file), format='fortran') 6190 fsock.writelines("""subroutine %scoup%s() 6191 6192 implicit none 6193 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 6194 if dp: 6195 fsock.writelines(""" 6196 double precision PI, ZERO 6197 parameter (PI=3.141592653589793d0) 6198 parameter (ZERO=0d0) 6199 include 'input.inc' 6200 include 'coupl.inc'""") 6201 if mp: 6202 fsock.writelines("""%s MP__PI, MP__ZERO 6203 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6204 parameter (MP__ZERO=0e0_16) 6205 include \'mp_input.inc\' 6206 include \'mp_coupl.inc\' 6207 """%self.mp_real_format) 6208 6209 for coupling in data: 6210 if dp: 6211 fsock.writelines('%s = %s' % (coupling.name, 6212 self.p_to_f.parse(coupling.expr))) 6213 if mp: 6214 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 6215 self.mp_p_to_f.parse(coupling.expr))) 6216 fsock.writelines('end')
6217
6218 - def create_model_functions_inc(self):
6219 """ Create model_functions.inc which contains the various declarations 6220 of auxiliary functions which might be used in the couplings expressions 6221 """ 6222 6223 additional_fct = [] 6224 # check for functions define in the UFO model 6225 ufo_fct = self.model.get('functions') 6226 if ufo_fct: 6227 for fct in ufo_fct: 6228 # already handle by default 6229 if fct.name not in ["complexconjugate", "re", "im", "sec", 6230 "csc", "asec", "acsc", "theta_function", "cond", 6231 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot", 6232 "grreglog","regsqrt"]: 6233 additional_fct.append(fct.name) 6234 6235 6236 fsock = self.open('model_functions.inc', format='fortran') 6237 fsock.writelines("""double complex cond 6238 double complex condif 6239 double complex reglog 6240 double complex reglogp 6241 double complex reglogm 6242 double complex regsqrt 6243 double complex grreglog 6244 double complex recms 6245 double complex arg 6246 %s 6247 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 6248 6249 6250 if self.opt['mp']: 6251 fsock.writelines("""%(complex_mp_format)s mp_cond 6252 %(complex_mp_format)s mp_condif 6253 %(complex_mp_format)s mp_reglog 6254 %(complex_mp_format)s mp_reglogp 6255 %(complex_mp_format)s mp_reglogm 6256 %(complex_mp_format)s mp_regsqrt 6257 %(complex_mp_format)s mp_grreglog 6258 %(complex_mp_format)s mp_recms 6259 %(complex_mp_format)s mp_arg 6260 %(additional)s 6261 """ %\ 6262 {"additional": "\n".join([" %s mp_%s" % (self.mp_complex_format, i) for i in additional_fct]), 6263 'complex_mp_format':self.mp_complex_format 6264 })
6265
6266 - def create_model_functions_def(self):
6267 """ Create model_functions.f which contains the various definitions 6268 of auxiliary functions which might be used in the couplings expressions 6269 Add the functions.f functions for formfactors support 6270 """ 6271 6272 fsock = self.open('model_functions.f', format='fortran') 6273 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 6274 implicit none 6275 double complex condition,truecase,falsecase 6276 if(condition.eq.(0.0d0,0.0d0)) then 6277 cond=truecase 6278 else 6279 cond=falsecase 6280 endif 6281 end 6282 6283 double complex function condif(condition,truecase,falsecase) 6284 implicit none 6285 logical condition 6286 double complex truecase,falsecase 6287 if(condition) then 6288 condif=truecase 6289 else 6290 condif=falsecase 6291 endif 6292 end 6293 6294 double complex function recms(condition,expr) 6295 implicit none 6296 logical condition 6297 double complex expr 6298 if(condition)then 6299 recms=expr 6300 else 6301 recms=dcmplx(dble(expr)) 6302 endif 6303 end 6304 6305 double complex function reglog(arg_in) 6306 implicit none 6307 double complex TWOPII 6308 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6309 double complex arg_in 6310 double complex arg 6311 arg=arg_in 6312 if(dabs(dimag(arg)).eq.0.0d0)then 6313 arg=dcmplx(dble(arg),0.0d0) 6314 endif 6315 if(dabs(dble(arg)).eq.0.0d0)then 6316 arg=dcmplx(0.0d0,dimag(arg)) 6317 endif 6318 if(arg.eq.(0.0d0,0.0d0)) then 6319 reglog=(0.0d0,0.0d0) 6320 else 6321 reglog=log(arg) 6322 endif 6323 end 6324 6325 double complex function reglogp(arg_in) 6326 implicit none 6327 double complex TWOPII 6328 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6329 double complex arg_in 6330 double complex arg 6331 arg=arg_in 6332 if(dabs(dimag(arg)).eq.0.0d0)then 6333 arg=dcmplx(dble(arg),0.0d0) 6334 endif 6335 if(dabs(dble(arg)).eq.0.0d0)then 6336 arg=dcmplx(0.0d0,dimag(arg)) 6337 endif 6338 if(arg.eq.(0.0d0,0.0d0))then 6339 reglogp=(0.0d0,0.0d0) 6340 else 6341 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 6342 reglogp=log(arg) + TWOPII 6343 else 6344 reglogp=log(arg) 6345 endif 6346 endif 6347 end 6348 6349 double complex function reglogm(arg_in) 6350 implicit none 6351 double complex TWOPII 6352 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6353 double complex arg_in 6354 double complex arg 6355 arg=arg_in 6356 if(dabs(dimag(arg)).eq.0.0d0)then 6357 arg=dcmplx(dble(arg),0.0d0) 6358 endif 6359 if(dabs(dble(arg)).eq.0.0d0)then 6360 arg=dcmplx(0.0d0,dimag(arg)) 6361 endif 6362 if(arg.eq.(0.0d0,0.0d0))then 6363 reglogm=(0.0d0,0.0d0) 6364 else 6365 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 6366 reglogm=log(arg) - TWOPII 6367 else 6368 reglogm=log(arg) 6369 endif 6370 endif 6371 end 6372 6373 double complex function regsqrt(arg_in) 6374 implicit none 6375 double complex arg_in 6376 double complex arg 6377 arg=arg_in 6378 if(dabs(dimag(arg)).eq.0.0d0)then 6379 arg=dcmplx(dble(arg),0.0d0) 6380 endif 6381 if(dabs(dble(arg)).eq.0.0d0)then 6382 arg=dcmplx(0.0d0,dimag(arg)) 6383 endif 6384 regsqrt=sqrt(arg) 6385 end 6386 6387 double complex function grreglog(logsw,expr1_in,expr2_in) 6388 implicit none 6389 double complex TWOPII 6390 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6391 double complex expr1_in,expr2_in 6392 double complex expr1,expr2 6393 double precision logsw 6394 double precision imagexpr 6395 logical firstsheet 6396 expr1=expr1_in 6397 expr2=expr2_in 6398 if(dabs(dimag(expr1)).eq.0.0d0)then 6399 expr1=dcmplx(dble(expr1),0.0d0) 6400 endif 6401 if(dabs(dble(expr1)).eq.0.0d0)then 6402 expr1=dcmplx(0.0d0,dimag(expr1)) 6403 endif 6404 if(dabs(dimag(expr2)).eq.0.0d0)then 6405 expr2=dcmplx(dble(expr2),0.0d0) 6406 endif 6407 if(dabs(dble(expr2)).eq.0.0d0)then 6408 expr2=dcmplx(0.0d0,dimag(expr2)) 6409 endif 6410 if(expr1.eq.(0.0d0,0.0d0))then 6411 grreglog=(0.0d0,0.0d0) 6412 else 6413 imagexpr=dimag(expr1)*dimag(expr2) 6414 firstsheet=imagexpr.ge.0.0d0 6415 firstsheet=firstsheet.or.dble(expr1).ge.0.0d0 6416 firstsheet=firstsheet.or.dble(expr2).ge.0.0d0 6417 if(firstsheet)then 6418 grreglog=log(expr1) 6419 else 6420 if(dimag(expr1).gt.0.0d0)then 6421 grreglog=log(expr1) - logsw*TWOPII 6422 else 6423 grreglog=log(expr1) + logsw*TWOPII 6424 endif 6425 endif 6426 endif 6427 end 6428 6429 double complex function arg(comnum) 6430 implicit none 6431 double complex comnum 6432 double complex iim 6433 iim = (0.0d0,1.0d0) 6434 if(comnum.eq.(0.0d0,0.0d0)) then 6435 arg=(0.0d0,0.0d0) 6436 else 6437 arg=log(comnum/abs(comnum))/iim 6438 endif 6439 end""") 6440 if self.opt['mp']: 6441 fsock.writelines(""" 6442 6443 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 6444 implicit none 6445 %(complex_mp_format)s condition,truecase,falsecase 6446 if(condition.eq.(0.0e0_16,0.0e0_16)) then 6447 mp_cond=truecase 6448 else 6449 mp_cond=falsecase 6450 endif 6451 end 6452 6453 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 6454 implicit none 6455 logical condition 6456 %(complex_mp_format)s truecase,falsecase 6457 if(condition) then 6458 mp_condif=truecase 6459 else 6460 mp_condif=falsecase 6461 endif 6462 end 6463 6464 %(complex_mp_format)s function mp_recms(condition,expr) 6465 implicit none 6466 logical condition 6467 %(complex_mp_format)s expr 6468 if(condition)then 6469 mp_recms=expr 6470 else 6471 mp_recms=cmplx(real(expr),kind=16) 6472 endif 6473 end 6474 6475 6476 %(complex_mp_format)s function mp_reglog(arg_in) 6477 implicit none 6478 %(complex_mp_format)s TWOPII 6479 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6480 %(complex_mp_format)s arg_in 6481 %(complex_mp_format)s arg 6482 arg=arg_in 6483 if(abs(imagpart(arg)).eq.0.0e0_16)then 6484 arg=cmplx(real(arg,kind=16),0.0e0_16) 6485 endif 6486 if(abs(real(arg,kind=16)).eq.0.0e0_16)then 6487 arg=cmplx(0.0e0_16,imagpart(arg)) 6488 endif 6489 if(arg.eq.(0.0e0_16,0.0e0_16)) then 6490 mp_reglog=(0.0e0_16,0.0e0_16) 6491 else 6492 mp_reglog=log(arg) 6493 endif 6494 end 6495 6496 %(complex_mp_format)s function mp_reglogp(arg_in) 6497 implicit none 6498 %(complex_mp_format)s TWOPII 6499 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6500 %(complex_mp_format)s arg_in 6501 %(complex_mp_format)s arg 6502 arg=arg_in 6503 if(abs(imagpart(arg)).eq.0.0e0_16)then 6504 arg=cmplx(real(arg,kind=16),0.0e0_16) 6505 endif 6506 if(abs(real(arg,kind=16)).eq.0.0e0_16)then 6507 arg=cmplx(0.0e0_16,imagpart(arg)) 6508 endif 6509 if(arg.eq.(0.0e0_16,0.0e0_16))then 6510 mp_reglogp=(0.0e0_16,0.0e0_16) 6511 else 6512 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 6513 mp_reglogp=log(arg) + TWOPII 6514 else 6515 mp_reglogp=log(arg) 6516 endif 6517 endif 6518 end 6519 6520 %(complex_mp_format)s function mp_reglogm(arg_in) 6521 implicit none 6522 %(complex_mp_format)s TWOPII 6523 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6524 %(complex_mp_format)s arg_in 6525 %(complex_mp_format)s arg 6526 arg=arg_in 6527 if(abs(imagpart(arg)).eq.0.0e0_16)then 6528 arg=cmplx(real(arg,kind=16),0.0e0_16) 6529 endif 6530 if(abs(real(arg,kind=16)).eq.0.0e0_16)then 6531 arg=cmplx(0.0e0_16,imagpart(arg)) 6532 endif 6533 if(arg.eq.(0.0e0_16,0.0e0_16))then 6534 mp_reglogm=(0.0e0_16,0.0e0_16) 6535 else 6536 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 6537 mp_reglogm=log(arg) - TWOPII 6538 else 6539 mp_reglogm=log(arg) 6540 endif 6541 endif 6542 end 6543 6544 %(complex_mp_format)s function mp_regsqrt(arg_in) 6545 implicit none 6546 %(complex_mp_format)s arg_in 6547 %(complex_mp_format)s arg 6548 arg=arg_in 6549 if(abs(imagpart(arg)).eq.0.0e0_16)then 6550 arg=cmplx(real(arg,kind=16),0.0e0_16) 6551 endif 6552 if(abs(real(arg,kind=16)).eq.0.0e0_16)then 6553 arg=cmplx(0.0e0_16,imagpart(arg)) 6554 endif 6555 mp_regsqrt=sqrt(arg) 6556 end 6557 6558 %(complex_mp_format)s function mp_grreglog(logsw,expr1_in,expr2_in) 6559 implicit none 6560 %(complex_mp_format)s TWOPII 6561 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6562 %(complex_mp_format)s expr1_in,expr2_in 6563 %(complex_mp_format)s expr1,expr2 6564 %(real_mp_format)s logsw 6565 %(real_mp_format)s imagexpr 6566 logical firstsheet 6567 expr1=expr1_in 6568 expr2=expr2_in 6569 if(abs(imagpart(expr1)).eq.0.0e0_16)then 6570 expr1=cmplx(real(expr1,kind=16),0.0e0_16) 6571 endif 6572 if(abs(real(expr1,kind=16)).eq.0.0e0_16)then 6573 expr1=cmplx(0.0e0_16,imagpart(expr1)) 6574 endif 6575 if(abs(imagpart(expr2)).eq.0.0e0_16)then 6576 expr2=cmplx(real(expr2,kind=16),0.0e0_16) 6577 endif 6578 if(abs(real(expr2,kind=16)).eq.0.0e0_16)then 6579 expr2=cmplx(0.0e0_16,imagpart(expr2)) 6580 endif 6581 if(expr1.eq.(0.0e0_16,0.0e0_16))then 6582 mp_grreglog=(0.0e0_16,0.0e0_16) 6583 else 6584 imagexpr=imagpart(expr1)*imagpart(expr2) 6585 firstsheet=imagexpr.ge.0.0e0_16 6586 firstsheet=firstsheet.or.real(expr1,kind=16).ge.0.0e0_16 6587 firstsheet=firstsheet.or.real(expr2,kind=16).ge.0.0e0_16 6588 if(firstsheet)then 6589 mp_grreglog=log(expr1) 6590 else 6591 if(imagpart(expr1).gt.0.0e0_16)then 6592 mp_grreglog=log(expr1) - logsw*TWOPII 6593 else 6594 mp_grreglog=log(expr1) + logsw*TWOPII 6595 endif 6596 endif 6597 endif 6598 end 6599 6600 %(complex_mp_format)s function mp_arg(comnum) 6601 implicit none 6602 %(complex_mp_format)s comnum 6603 %(complex_mp_format)s imm 6604 imm = (0.0e0_16,1.0e0_16) 6605 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 6606 mp_arg=(0.0e0_16,0.0e0_16) 6607 else 6608 mp_arg=log(comnum/abs(comnum))/imm 6609 endif 6610 end"""%{'complex_mp_format':self.mp_complex_format,'real_mp_format':self.mp_real_format}) 6611 6612 6613 #check for the file functions.f 6614 model_path = self.model.get('modelpath') 6615 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 6616 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 6617 input = pjoin(model_path,'Fortran','functions.f') 6618 file.writelines(fsock, open(input).read()) 6619 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 6620 6621 # check for functions define in the UFO model 6622 ufo_fct = self.model.get('functions') 6623 if ufo_fct: 6624 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 6625 for fct in ufo_fct: 6626 # already handle by default 6627 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 6628 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg", 6629 "grreglog","regsqrt"]: 6630 6631 ufo_fct_template = """ 6632 double complex function %(name)s(%(args)s) 6633 implicit none 6634 double complex %(args)s 6635 %(definitions)s 6636 %(name)s = %(fct)s 6637 6638 return 6639 end 6640 """ 6641 str_fct = self.p_to_f.parse(fct.expr) 6642 if not self.p_to_f.to_define: 6643 definitions = [] 6644 else: 6645 definitions=[] 6646 for d in self.p_to_f.to_define: 6647 if d == 'pi': 6648 definitions.append(' double precision pi') 6649 definitions.append(' data pi /3.1415926535897932d0/') 6650 else: 6651 definitions.append(' double complex %s' % d) 6652 6653 text = ufo_fct_template % { 6654 'name': fct.name, 6655 'args': ", ".join(fct.arguments), 6656 'fct': str_fct, 6657 'definitions': '\n'.join(definitions) 6658 } 6659 6660 fsock.writelines(text) 6661 if self.opt['mp']: 6662 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 6663 for fct in ufo_fct: 6664 # already handle by default 6665 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 6666 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg", 6667 "grreglog","regsqrt"]: 6668 6669 ufo_fct_template = """ 6670 %(complex_mp_format)s function mp_%(name)s(mp__%(args)s) 6671 implicit none 6672 %(complex_mp_format)s mp__%(args)s 6673 %(definitions)s 6674 mp_%(name)s = %(fct)s 6675 6676 return 6677 end 6678 """ 6679 str_fct = self.mp_p_to_f.parse(fct.expr) 6680 if not self.mp_p_to_f.to_define: 6681 definitions = [] 6682 else: 6683 definitions=[] 6684 for d in self.mp_p_to_f.to_define: 6685 if d == 'pi': 6686 definitions.append(' %s mp__pi' % self.mp_real_format) 6687 definitions.append(' data mp__pi /3.141592653589793238462643383279502884197e+00_16/') 6688 else: 6689 definitions.append(' %s mp_%s' % (self.mp_complex_format,d)) 6690 text = ufo_fct_template % { 6691 'name': fct.name, 6692 'args': ", mp__".join(fct.arguments), 6693 'fct': str_fct, 6694 'definitions': '\n'.join(definitions), 6695 'complex_mp_format': self.mp_complex_format 6696 } 6697 fsock.writelines(text) 6698 6699 6700 6701 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6702 6703 6704
6705 - def create_makeinc(self):
6706 """create makeinc.inc containing the file to compile """ 6707 6708 fsock = self.open('makeinc.inc', comment='#') 6709 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6710 text += ' model_functions.o ' 6711 6712 nb_coup_indep = 1 + len(self.coups_dep) // 25 6713 nb_coup_dep = 1 + len(self.coups_indep) // 25 6714 couplings_files=['couplings%s.o' % (i+1) \ 6715 for i in range(nb_coup_dep + nb_coup_indep) ] 6716 if self.opt['mp']: 6717 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6718 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6719 text += ' '.join(couplings_files) 6720 fsock.writelines(text)
6721
6722 - def create_param_write(self):
6723 """ create param_write """ 6724 6725 fsock = self.open('param_write.inc', format='fortran') 6726 6727 fsock.writelines("""write(*,*) ' External Params' 6728 write(*,*) ' ---------------------------------' 6729 write(*,*) ' '""") 6730 def format(name): 6731 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6732 6733 # Write the external parameter 6734 lines = [format(param.name) for param in self.params_ext] 6735 fsock.writelines('\n'.join(lines)) 6736 6737 fsock.writelines("""write(*,*) ' Internal Params' 6738 write(*,*) ' ---------------------------------' 6739 write(*,*) ' '""") 6740 lines = [format(data.name) for data in self.params_indep 6741 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6742 fsock.writelines('\n'.join(lines)) 6743 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6744 write(*,*) ' ----------------------------------------' 6745 write(*,*) ' '""") 6746 lines = [format(data.name) for data in self.params_dep \ 6747 if self.check_needed_param(data.name)] 6748 6749 fsock.writelines('\n'.join(lines)) 6750 6751 6752
6753 - def create_ident_card(self):
6754 """ create the ident_card.dat """ 6755 6756 def format(parameter): 6757 """return the line for the ident_card corresponding to this parameter""" 6758 colum = [parameter.lhablock.lower()] + \ 6759 [str(value) for value in parameter.lhacode] + \ 6760 [parameter.name] 6761 if not parameter.name: 6762 return '' 6763 return ' '.join(colum)+'\n'
6764 6765 fsock = self.open('ident_card.dat') 6766 6767 external_param = [format(param) for param in self.params_ext] 6768 fsock.writelines('\n'.join(external_param)) 6769
6770 - def create_actualize_mp_ext_param_inc(self):
6771 """ create the actualize_mp_ext_params.inc code """ 6772 6773 # In principle one should actualize all external, but for now, it is 6774 # hardcoded that only AS and MU_R can by dynamically changed by the user 6775 # so that we only update those ones. 6776 # Of course, to be on the safe side, one could decide to update all 6777 # external parameters. 6778 update_params_list=[p for p in self.params_ext if p.name in 6779 self.PS_dependent_key] 6780 6781 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6782 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6783 for param in update_params_list] 6784 # When read_lha is false, it is G which is taken in input and not AS, so 6785 # this is what should be reset here too. 6786 if 'aS' in [param.name for param in update_params_list]: 6787 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6788 6789 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6790 fsock.writelines('\n'.join(res_strings))
6791
6792 - def create_param_read(self):
6793 """create param_read""" 6794 6795 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6796 or self.opt['loop_induced']: 6797 fsock = self.open('param_read.inc', format='fortran') 6798 fsock.writelines(' include \'../param_card.inc\'') 6799 return 6800 6801 def format_line(parameter): 6802 """return the line for the ident_card corresponding to this 6803 parameter""" 6804 template = \ 6805 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6806 % {'name': parameter.name, 6807 'value': self.p_to_f.parse(str(parameter.value.real))} 6808 if self.opt['mp']: 6809 template = template+ \ 6810 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6811 "%(mp_prefix)s%(name)s,%(value)s)") \ 6812 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6813 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6814 return template 6815 6816 fsock = self.open('param_read.inc', format='fortran') 6817 res_strings = [format_line(param) \ 6818 for param in self.params_ext] 6819 6820 # Correct width sign for Majorana particles (where the width 6821 # and mass need to have the same sign) 6822 for particle in self.model.get('particles'): 6823 if particle.is_fermion() and particle.get('self_antipart') and \ 6824 particle.get('width').lower() != 'zero': 6825 6826 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6827 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6828 if self.opt['mp']: 6829 res_strings.append(\ 6830 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6831 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6832 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6833 6834 fsock.writelines('\n'.join(res_strings)) 6835 6836 6837 @staticmethod
6838 - def create_param_card_static(model, output_path, rule_card_path=False, 6839 mssm_convert=True):
6840 """ create the param_card.dat for a givent model --static method-- """ 6841 #1. Check if a default param_card is present: 6842 done = False 6843 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 6844 restrict_name = os.path.basename(model.restrict_card)[9:-4] 6845 model_path = model.get('modelpath') 6846 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6847 done = True 6848 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6849 output_path) 6850 if not done: 6851 param_writer.ParamCardWriter(model, output_path) 6852 6853 if rule_card_path: 6854 if hasattr(model, 'rule_card'): 6855 model.rule_card.write_file(rule_card_path) 6856 6857 if mssm_convert: 6858 model_name = model.get('name') 6859 # IF MSSM convert the card to SLAH1 6860 if model_name == 'mssm' or model_name.startswith('mssm-'): 6861 import models.check_param_card as translator 6862 # Check the format of the param_card for Pythia and make it correct 6863 if rule_card_path: 6864 translator.make_valid_param_card(output_path, rule_card_path) 6865 translator.convert_to_slha1(output_path)
6866
6867 - def create_param_card(self):
6868 """ create the param_card.dat """ 6869 6870 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 6871 if not hasattr(self.model, 'rule_card'): 6872 rule_card=False 6873 self.create_param_card_static(self.model, 6874 output_path=pjoin(self.dir_path, 'param_card.dat'), 6875 rule_card_path=rule_card, 6876 mssm_convert=True)
6877
6878 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True, cmd_options={}):
6879 """ Determine which Export_v4 class is required. cmd is the command 6880 interface containing all potential usefull information. 6881 The output_type argument specifies from which context the output 6882 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 6883 and 'default' for tree-level outputs.""" 6884 6885 opt = dict(cmd.options) 6886 opt['output_options'] = cmd_options 6887 6888 # ========================================================================== 6889 # First check whether Ninja must be installed. 6890 # Ninja would only be required if: 6891 # a) Loop optimized output is selected 6892 # b) the process gathered from the amplitude generated use loops 6893 6894 if len(cmd._curr_amps)>0: 6895 try: 6896 curr_proc = cmd._curr_amps[0].get('process') 6897 except base_objects.PhysicsObject.PhysicsObjectError: 6898 curr_proc = None 6899 elif hasattr(cmd,'_fks_multi_proc') and \ 6900 len(cmd._fks_multi_proc.get('process_definitions'))>0: 6901 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 6902 else: 6903 curr_proc = None 6904 6905 requires_reduction_tool = opt['loop_optimized_output'] and \ 6906 (not curr_proc is None) and \ 6907 (curr_proc.get('perturbation_couplings') != [] and \ 6908 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 6909 6910 # An installation is required then, but only if the specified path is the 6911 # default local one and that the Ninja library appears missing. 6912 if requires_reduction_tool: 6913 cmd.install_reduction_library() 6914 6915 # ========================================================================== 6916 # First treat the MadLoop5 standalone case 6917 MadLoop_SA_options = {'clean': not noclean, 6918 'complex_mass':cmd.options['complex_mass_scheme'], 6919 'export_format':'madloop', 6920 'mp':True, 6921 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 6922 'cuttools_dir': cmd._cuttools_dir, 6923 'iregi_dir':cmd._iregi_dir, 6924 'pjfry_dir':cmd.options['pjfry'], 6925 'golem_dir':cmd.options['golem'], 6926 'samurai_dir':cmd.options['samurai'], 6927 'ninja_dir':cmd.options['ninja'], 6928 'collier_dir':cmd.options['collier'], 6929 'fortran_compiler':cmd.options['fortran_compiler'], 6930 'f2py_compiler':cmd.options['f2py_compiler'], 6931 'output_dependencies':cmd.options['output_dependencies'], 6932 'SubProc_prefix':'P', 6933 'compute_color_flows':cmd.options['loop_color_flows'], 6934 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 6935 'cluster_local_path': cmd.options['cluster_local_path'], 6936 'output_options': cmd_options 6937 } 6938 6939 if output_type.startswith('madloop'): 6940 import madgraph.loop.loop_exporters as loop_exporters 6941 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 6942 ExporterClass=None 6943 if not cmd.options['loop_optimized_output']: 6944 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 6945 else: 6946 if output_type == "madloop": 6947 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 6948 MadLoop_SA_options['export_format'] = 'madloop_optimized' 6949 elif output_type == "madloop_matchbox": 6950 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 6951 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 6952 else: 6953 raise Exception, "output_type not recognize %s" % output_type 6954 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 6955 else: 6956 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 6957 ' in %s'%str(cmd._mgme_dir)) 6958 6959 # Then treat the aMC@NLO output 6960 elif output_type=='amcatnlo': 6961 import madgraph.iolibs.export_fks as export_fks 6962 ExporterClass=None 6963 amcatnlo_options = dict(opt) 6964 amcatnlo_options.update(MadLoop_SA_options) 6965 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 6966 if not cmd.options['loop_optimized_output']: 6967 logger.info("Writing out the aMC@NLO code") 6968 ExporterClass = export_fks.ProcessExporterFortranFKS 6969 amcatnlo_options['export_format']='FKS5_default' 6970 else: 6971 logger.info("Writing out the aMC@NLO code, using optimized Loops") 6972 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 6973 amcatnlo_options['export_format']='FKS5_optimized' 6974 return ExporterClass(cmd._export_dir, amcatnlo_options) 6975 6976 6977 # Then the default tree-level output 6978 elif output_type=='default': 6979 assert group_subprocesses in [True, False] 6980 6981 opt = dict(opt) 6982 opt.update({'clean': not noclean, 6983 'complex_mass': cmd.options['complex_mass_scheme'], 6984 'export_format':cmd._export_format, 6985 'mp': False, 6986 'sa_symmetry':False, 6987 'model': cmd._curr_model.get('name'), 6988 'v5_model': False if cmd._model_v4_path else True }) 6989 6990 format = cmd._export_format #shortcut 6991 6992 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 6993 opt['sa_symmetry'] = True 6994 elif format == 'plugin': 6995 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 6996 6997 loop_induced_opt = dict(opt) 6998 loop_induced_opt.update(MadLoop_SA_options) 6999 loop_induced_opt['export_format'] = 'madloop_optimized' 7000 loop_induced_opt['SubProc_prefix'] = 'PV' 7001 # For loop_induced output with MadEvent, we must have access to the 7002 # color flows. 7003 loop_induced_opt['compute_color_flows'] = True 7004 for key in opt: 7005 if key not in loop_induced_opt: 7006 loop_induced_opt[key] = opt[key] 7007 7008 # Madevent output supports MadAnalysis5 7009 if format in ['madevent']: 7010 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 7011 7012 if format == 'matrix' or format.startswith('standalone'): 7013 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 7014 7015 elif format in ['madevent'] and group_subprocesses: 7016 if isinstance(cmd._curr_amps[0], 7017 loop_diagram_generation.LoopAmplitude): 7018 import madgraph.loop.loop_exporters as loop_exporters 7019 return loop_exporters.LoopInducedExporterMEGroup( 7020 cmd._export_dir,loop_induced_opt) 7021 else: 7022 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 7023 elif format in ['madevent']: 7024 if isinstance(cmd._curr_amps[0], 7025 loop_diagram_generation.LoopAmplitude): 7026 import madgraph.loop.loop_exporters as loop_exporters 7027 return loop_exporters.LoopInducedExporterMENoGroup( 7028 cmd._export_dir,loop_induced_opt) 7029 else: 7030 return ProcessExporterFortranME(cmd._export_dir,opt) 7031 elif format in ['matchbox']: 7032 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 7033 elif cmd._export_format in ['madweight'] and group_subprocesses: 7034 7035 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 7036 elif cmd._export_format in ['madweight']: 7037 return ProcessExporterFortranMW(cmd._export_dir, opt) 7038 elif format == 'plugin': 7039 if isinstance(cmd._curr_amps[0], 7040 loop_diagram_generation.LoopAmplitude): 7041 return cmd._export_plugin(cmd._export_dir, loop_induced_opt) 7042 else: 7043 return cmd._export_plugin(cmd._export_dir, opt) 7044 7045 else: 7046 raise Exception, 'Wrong export_v4 format' 7047 else: 7048 raise MadGraph5Error, 'Output type %s not reckognized in ExportV4Factory.'
7049
7050 7051 7052 7053 #=============================================================================== 7054 # ProcessExporterFortranMWGroup 7055 #=============================================================================== 7056 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
7057 """Class to take care of exporting a set of matrix elements to 7058 MadEvent subprocess group format.""" 7059 7060 matrix_file = "matrix_madweight_group_v4.inc" 7061 grouped_mode = 'madweight' 7062 #=========================================================================== 7063 # generate_subprocess_directory 7064 #===========================================================================
7065 - def generate_subprocess_directory(self, subproc_group, 7066 fortran_model, 7067 group_number):
7068 """Generate the Pn directory for a subprocess group in MadEvent, 7069 including the necessary matrix_N.f files, configs.inc and various 7070 other helper files.""" 7071 7072 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 7073 raise base_objects.PhysicsObject.PhysicsObjectError,\ 7074 "subproc_group object not SubProcessGroup" 7075 7076 if not self.model: 7077 self.model = subproc_group.get('matrix_elements')[0].\ 7078 get('processes')[0].get('model') 7079 7080 pathdir = os.path.join(self.dir_path, 'SubProcesses') 7081 7082 # Create the directory PN in the specified path 7083 subprocdir = "P%d_%s" % (subproc_group.get('number'), 7084 subproc_group.get('name')) 7085 try: 7086 os.mkdir(pjoin(pathdir, subprocdir)) 7087 except os.error as error: 7088 logger.warning(error.strerror + " " + subprocdir) 7089 7090 7091 logger.info('Creating files in directory %s' % subprocdir) 7092 Ppath = pjoin(pathdir, subprocdir) 7093 7094 # Create the matrix.f files, auto_dsig.f files and all inc files 7095 # for all subprocesses in the group 7096 7097 maxamps = 0 7098 maxflows = 0 7099 tot_calls = 0 7100 7101 matrix_elements = subproc_group.get('matrix_elements') 7102 7103 for ime, matrix_element in \ 7104 enumerate(matrix_elements): 7105 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 7106 calls, ncolor = \ 7107 self.write_matrix_element_v4(writers.FortranWriter(filename), 7108 matrix_element, 7109 fortran_model, 7110 str(ime+1), 7111 subproc_group.get('diagram_maps')[\ 7112 ime]) 7113 7114 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 7115 self.write_auto_dsig_file(writers.FortranWriter(filename), 7116 matrix_element, 7117 str(ime+1)) 7118 7119 # Keep track of needed quantities 7120 tot_calls += int(calls) 7121 maxflows = max(maxflows, ncolor) 7122 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 7123 7124 # Draw diagrams 7125 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 7126 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 7127 get('diagrams'), 7128 filename, 7129 model = \ 7130 matrix_element.get('processes')[0].\ 7131 get('model'), 7132 amplitude=True) 7133 logger.info("Generating Feynman diagrams for " + \ 7134 matrix_element.get('processes')[0].nice_string()) 7135 plot.draw() 7136 7137 # Extract number of external particles 7138 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 7139 7140 # Generate a list of diagrams corresponding to each configuration 7141 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 7142 # If a subprocess has no diagrams for this config, the number is 0 7143 7144 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 7145 7146 filename = pjoin(Ppath, 'auto_dsig.f') 7147 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 7148 subproc_group) 7149 7150 filename = pjoin(Ppath,'configs.inc') 7151 nconfigs, s_and_t_channels = self.write_configs_file(\ 7152 writers.FortranWriter(filename), 7153 subproc_group, 7154 subproc_diagrams_for_config) 7155 7156 filename = pjoin(Ppath, 'leshouche.inc') 7157 self.write_leshouche_file(writers.FortranWriter(filename), 7158 subproc_group) 7159 7160 filename = pjoin(Ppath, 'phasespace.inc') 7161 self.write_phasespace_file(writers.FortranWriter(filename), 7162 nconfigs) 7163 7164 7165 filename = pjoin(Ppath, 'maxamps.inc') 7166 self.write_maxamps_file(writers.FortranWriter(filename), 7167 maxamps, 7168 maxflows, 7169 max([len(me.get('processes')) for me in \ 7170 matrix_elements]), 7171 len(matrix_elements)) 7172 7173 filename = pjoin(Ppath, 'mirrorprocs.inc') 7174 self.write_mirrorprocs(writers.FortranWriter(filename), 7175 subproc_group) 7176 7177 filename = pjoin(Ppath, 'nexternal.inc') 7178 self.write_nexternal_file(writers.FortranWriter(filename), 7179 nexternal, ninitial) 7180 7181 filename = pjoin(Ppath, 'pmass.inc') 7182 self.write_pmass_file(writers.FortranWriter(filename), 7183 matrix_element) 7184 7185 filename = pjoin(Ppath, 'props.inc') 7186 self.write_props_file(writers.FortranWriter(filename), 7187 matrix_element, 7188 s_and_t_channels) 7189 7190 # filename = pjoin(Ppath, 'processes.dat') 7191 # files.write_to_file(filename, 7192 # self.write_processes_file, 7193 # subproc_group) 7194 7195 # Generate jpgs -> pass in make_html 7196 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 7197 7198 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 7199 7200 for file in linkfiles: 7201 ln('../%s' % file, cwd=Ppath) 7202 7203 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 7204 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 7205 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 7206 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 7207 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 7208 ln('phasespace.inc', '../', log=True, cwd=Ppath) 7209 if not tot_calls: 7210 tot_calls = 0 7211 return tot_calls
7212 7213 7214 #=========================================================================== 7215 # Helper functions 7216 #===========================================================================
7217 - def modify_grouping(self, matrix_element):
7218 """allow to modify the grouping (if grouping is in place) 7219 return two value: 7220 - True/False if the matrix_element was modified 7221 - the new(or old) matrix element""" 7222 7223 return True, matrix_element.split_lepton_grouping()
7224 7225 #=========================================================================== 7226 # write_super_auto_dsig_file 7227 #===========================================================================
7228 - def write_super_auto_dsig_file(self, writer, subproc_group):
7229 """Write the auto_dsig.f file selecting between the subprocesses 7230 in subprocess group mode""" 7231 7232 replace_dict = {} 7233 7234 # Extract version number and date from VERSION file 7235 info_lines = self.get_mg5_info_lines() 7236 replace_dict['info_lines'] = info_lines 7237 7238 matrix_elements = subproc_group.get('matrix_elements') 7239 7240 # Extract process info lines 7241 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 7242 matrix_elements]) 7243 replace_dict['process_lines'] = process_lines 7244 7245 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 7246 replace_dict['nexternal'] = nexternal 7247 7248 replace_dict['nsprocs'] = 2*len(matrix_elements) 7249 7250 # Generate dsig definition line 7251 dsig_def_line = "DOUBLE PRECISION " + \ 7252 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 7253 range(len(matrix_elements))]) 7254 replace_dict["dsig_def_line"] = dsig_def_line 7255 7256 # Generate dsig process lines 7257 call_dsig_proc_lines = [] 7258 for iproc in range(len(matrix_elements)): 7259 call_dsig_proc_lines.append(\ 7260 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 7261 {"num": iproc + 1, 7262 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 7263 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 7264 7265 if writer: 7266 file = open(os.path.join(_file_path, \ 7267 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 7268 file = file % replace_dict 7269 # Write the file 7270 writer.writelines(file) 7271 else: 7272 return replace_dict
7273 7274 #=========================================================================== 7275 # write_mirrorprocs 7276 #===========================================================================
7277 - def write_mirrorprocs(self, writer, subproc_group):
7278 """Write the mirrorprocs.inc file determining which processes have 7279 IS mirror process in subprocess group mode.""" 7280 7281 lines = [] 7282 bool_dict = {True: '.true.', False: '.false.'} 7283 matrix_elements = subproc_group.get('matrix_elements') 7284 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 7285 (len(matrix_elements), 7286 ",".join([bool_dict[me.get('has_mirror_process')] for \ 7287 me in matrix_elements]))) 7288 # Write the file 7289 writer.writelines(lines)
7290 7291 #=========================================================================== 7292 # write_configs_file 7293 #===========================================================================
7294 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
7295 """Write the configs.inc file with topology information for a 7296 subprocess group. Use the first subprocess with a diagram for each 7297 configuration.""" 7298 7299 matrix_elements = subproc_group.get('matrix_elements') 7300 model = matrix_elements[0].get('processes')[0].get('model') 7301 7302 diagrams = [] 7303 config_numbers = [] 7304 for iconfig, config in enumerate(diagrams_for_config): 7305 # Check if any diagrams correspond to this config 7306 if set(config) == set([0]): 7307 continue 7308 subproc_diags = [] 7309 for s,d in enumerate(config): 7310 if d: 7311 subproc_diags.append(matrix_elements[s].\ 7312 get('diagrams')[d-1]) 7313 else: 7314 subproc_diags.append(None) 7315 diagrams.append(subproc_diags) 7316 config_numbers.append(iconfig + 1) 7317 7318 # Extract number of external particles 7319 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 7320 7321 return len(diagrams), \ 7322 self.write_configs_file_from_diagrams(writer, diagrams, 7323 config_numbers, 7324 nexternal, ninitial, 7325 matrix_elements[0],model)
7326 7327 #=========================================================================== 7328 # write_run_configs_file 7329 #===========================================================================
7330 - def write_run_config_file(self, writer):
7331 """Write the run_configs.inc file for MadEvent""" 7332 7333 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 7334 text = open(path).read() % {'chanperjob':'2'} 7335 writer.write(text) 7336 return True
7337 7338 7339 #=========================================================================== 7340 # write_leshouche_file 7341 #===========================================================================
7342 - def write_leshouche_file(self, writer, subproc_group):
7343 """Write the leshouche.inc file for MG4""" 7344 7345 all_lines = [] 7346 7347 for iproc, matrix_element in \ 7348 enumerate(subproc_group.get('matrix_elements')): 7349 all_lines.extend(self.get_leshouche_lines(matrix_element, 7350 iproc)) 7351 7352 # Write the file 7353 writer.writelines(all_lines) 7354 7355 return True
7356