Package madgraph :: Package iolibs :: Module export_cpp
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_cpp

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15   
  16  """Methods and classes to export models and matrix elements to Pythia 8 
  17  and C++ Standalone format.""" 
  18   
  19  import fractions 
  20  import glob 
  21  import itertools 
  22  import logging 
  23  from math import fmod 
  24  import os 
  25  import re 
  26  import shutil 
  27  import subprocess 
  28   
  29  import madgraph.core.base_objects as base_objects 
  30  import madgraph.core.color_algebra as color 
  31  import madgraph.core.helas_objects as helas_objects 
  32  import madgraph.iolibs.drawing_eps as draw 
  33  import madgraph.iolibs.files as files 
  34  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  35  import madgraph.iolibs.file_writers as writers 
  36  import madgraph.iolibs.template_files as template_files 
  37  import madgraph.iolibs.ufo_expression_parsers as parsers 
  38  import madgraph.various.banner as banner_mod 
  39  from madgraph import MadGraph5Error, InvalidCmd, MG5DIR 
  40  from madgraph.iolibs.files import cp, ln, mv 
  41   
  42  from madgraph.iolibs.export_v4 import VirtualExporter 
  43  import madgraph.various.misc as misc 
  44   
  45  import aloha.create_aloha as create_aloha 
  46  import aloha.aloha_writers as aloha_writers 
  47   
  48  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  49  logger = logging.getLogger('madgraph.export_pythia8') 
  50  pjoin = os.path.join 
51 52 53 -def make_model_cpp(dir_path):
54 """Make the model library in a C++ standalone directory""" 55 56 source_dir = os.path.join(dir_path, "src") 57 # Run standalone 58 logger.info("Running make for src") 59 misc.compile(cwd=source_dir)
60
61 62 -class OneProcessExporterCPP(object):
63 """Class to take care of exporting a set of matrix elements to 64 C++ format.""" 65 66 # Static variables (for inheritance) 67 process_dir = '.' 68 include_dir = '.' 69 template_path = os.path.join(_file_path, 'iolibs', 'template_files') 70 __template_path = os.path.join(_file_path, 'iolibs', 'template_files') 71 process_template_h = 'cpp_process_h.inc' 72 process_template_cc = 'cpp_process_cc.inc' 73 process_class_template = 'cpp_process_class.inc' 74 process_definition_template = 'cpp_process_function_definitions.inc' 75 process_wavefunction_template = 'cpp_process_wavefunctions.inc' 76 process_sigmaKin_function_template = 'cpp_process_sigmaKin_function.inc' 77 single_process_template = 'cpp_process_matrix.inc' 78
79 - class ProcessExporterCPPError(Exception):
80 pass
81
82 - def __init__(self, matrix_elements, cpp_helas_call_writer, process_string = "", 83 process_number = 0, path = os.getcwd()):
84 """Initiate with matrix elements, helas call writer, process 85 string, path. Generate the process .h and .cc files.""" 86 87 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 88 self.matrix_elements = matrix_elements.get('matrix_elements') 89 elif isinstance(matrix_elements, helas_objects.HelasMatrixElement): 90 self.matrix_elements = \ 91 helas_objects.HelasMatrixElementList([matrix_elements]) 92 elif isinstance(matrix_elements, helas_objects.HelasMatrixElementList): 93 self.matrix_elements = matrix_elements 94 else: 95 raise base_objects.PhysicsObject.PhysicsObjectError,\ 96 "Wrong object type for matrix_elements: %s" % type(matrix_elements) 97 98 if not self.matrix_elements: 99 raise MadGraph5Error("No matrix elements to export") 100 101 self.model = self.matrix_elements[0].get('processes')[0].get('model') 102 self.model_name = ProcessExporterCPP.get_model_name(self.model.get('name')) 103 104 self.processes = sum([me.get('processes') for \ 105 me in self.matrix_elements], []) 106 self.processes.extend(sum([me.get_mirror_processes() for \ 107 me in self.matrix_elements], [])) 108 109 self.nprocesses = len(self.matrix_elements) 110 if any([m.get('has_mirror_process') for m in self.matrix_elements]): 111 self.nprocesses = 2*len(self.matrix_elements) 112 113 if process_string: 114 self.process_string = process_string 115 else: 116 self.process_string = self.processes[0].base_string() 117 118 if process_number: 119 self.process_number = process_number 120 else: 121 self.process_number = self.processes[0].get('id') 122 123 self.process_name = self.get_process_name() 124 self.process_class = "CPPProcess" 125 126 self.path = path 127 self.helas_call_writer = cpp_helas_call_writer 128 129 if not isinstance(self.helas_call_writer, helas_call_writers.CPPUFOHelasCallWriter): 130 raise self.ProcessExporterCPPError, \ 131 "helas_call_writer not CPPUFOHelasCallWriter" 132 133 self.nexternal, self.ninitial = \ 134 self.matrix_elements[0].get_nexternal_ninitial() 135 self.nfinal = self.nexternal - self.ninitial 136 137 # Check if we can use the same helicities for all matrix 138 # elements 139 140 self.single_helicities = True 141 142 hel_matrix = self.get_helicity_matrix(self.matrix_elements[0]) 143 144 for me in self.matrix_elements[1:]: 145 if self.get_helicity_matrix(me) != hel_matrix: 146 self.single_helicities = False 147 148 if self.single_helicities: 149 # If all processes have the same helicity structure, this 150 # allows us to reuse the same wavefunctions for the 151 # different processes 152 153 self.wavefunctions = [] 154 wf_number = 0 155 156 for me in self.matrix_elements: 157 for iwf, wf in enumerate(me.get_all_wavefunctions()): 158 try: 159 old_wf = \ 160 self.wavefunctions[self.wavefunctions.index(wf)] 161 wf.set('number', old_wf.get('number')) 162 except ValueError: 163 wf_number += 1 164 wf.set('number', wf_number) 165 self.wavefunctions.append(wf) 166 167 # Also combine amplitudes 168 self.amplitudes = helas_objects.HelasAmplitudeList() 169 amp_number = 0 170 for me in self.matrix_elements: 171 for iamp, amp in enumerate(me.get_all_amplitudes()): 172 try: 173 old_amp = \ 174 self.amplitudes[self.amplitudes.index(amp)] 175 amp.set('number', old_amp.get('number')) 176 except ValueError: 177 amp_number += 1 178 amp.set('number', amp_number) 179 self.amplitudes.append(amp) 180 diagram = helas_objects.HelasDiagram({'amplitudes': self.amplitudes}) 181 self.amplitudes = helas_objects.HelasMatrixElement({\ 182 'diagrams': helas_objects.HelasDiagramList([diagram])})
183 184 #=============================================================================== 185 # Global helper methods 186 #=============================================================================== 187 @classmethod
188 - def read_template_file(cls, filename, classpath=False):
189 """Open a template file and return the contents.""" 190 191 if isinstance(filename, tuple): 192 file_path = filename[0] 193 filename = filename[1] 194 elif isinstance(filename, str): 195 if classpath: 196 file_path = cls.__template_path 197 else: 198 file_path = cls.template_path 199 else: 200 raise MadGraph5Error('Argument should be string or tuple.') 201 202 return open(os.path.join(file_path, filename)).read()
203 204 205 206 207 208 209 # Methods for generation of process files for C++
210 - def generate_process_files(self):
211 """Generate the .h and .cc files needed for C++, for the 212 processes described by multi_matrix_element""" 213 214 # Create the files 215 if not os.path.isdir(os.path.join(self.path, self.include_dir)): 216 os.makedirs(os.path.join(self.path, self.include_dir)) 217 filename = os.path.join(self.path, self.include_dir, 218 '%s.h' % self.process_class) 219 self.write_process_h_file(writers.CPPWriter(filename)) 220 221 if not os.path.isdir(os.path.join(self.path, self.process_dir)): 222 os.makedirs(os.path.join(self.path, self.process_dir)) 223 filename = os.path.join(self.path, self.process_dir, 224 '%s.cc' % self.process_class) 225 self.write_process_cc_file(writers.CPPWriter(filename)) 226 227 logger.info('Created files %(process)s.h and %(process)s.cc in' % \ 228 {'process': self.process_class} + \ 229 ' directory %(dir)s' % {'dir': os.path.split(filename)[0]})
230 231
232 - def get_default_converter(self):
233 234 replace_dict = {} 235 236 237 return replace_dict
238 239 #=========================================================================== 240 # write_process_h_file 241 #===========================================================================
242 - def write_process_h_file(self, writer):
243 """Write the class definition (.h) file for the process""" 244 245 if writer and not isinstance(writer, writers.CPPWriter): 246 raise writers.CPPWriter.CPPWriterError(\ 247 "writer not CPPWriter") 248 249 replace_dict = self.get_default_converter() 250 251 # Extract version number and date from VERSION file 252 info_lines = get_mg5_info_lines() 253 replace_dict['info_lines'] = info_lines 254 255 # Extract model name 256 replace_dict['model_name'] = \ 257 self.model_name 258 259 # Extract process file name 260 replace_dict['process_file_name'] = self.process_name 261 262 # Extract class definitions 263 process_class_definitions = self.get_process_class_definitions() 264 replace_dict['process_class_definitions'] = process_class_definitions 265 266 if writer: 267 file = self.read_template_file(self.process_template_h) % replace_dict 268 # Write the file 269 writer.writelines(file) 270 else: 271 return replace_dict
272 #=========================================================================== 273 # write_process_cc_file 274 #===========================================================================
275 - def write_process_cc_file(self, writer):
276 """Write the class member definition (.cc) file for the process 277 described by matrix_element""" 278 279 if writer: 280 if not isinstance(writer, writers.CPPWriter): 281 raise writers.CPPWriter.CPPWriterError(\ 282 "writer not CPPWriter") 283 284 replace_dict = self.get_default_converter() 285 286 # Extract version number and date from VERSION file 287 info_lines = get_mg5_info_lines() 288 replace_dict['info_lines'] = info_lines 289 290 # Extract process file name 291 replace_dict['process_file_name'] = self.process_name 292 293 # Extract model name 294 replace_dict['model_name'] = self.model_name 295 296 297 # Extract class function definitions 298 process_function_definitions = \ 299 self.get_process_function_definitions() 300 replace_dict['process_function_definitions'] = \ 301 process_function_definitions 302 303 if writer: 304 file = self.read_template_file(self.process_template_cc) % replace_dict 305 # Write the file 306 writer.writelines(file) 307 else: 308 return replace_dict
309 310 #=========================================================================== 311 # Process export helper functions 312 #===========================================================================
313 - def get_process_class_definitions(self, write=True):
314 """The complete class definition for the process""" 315 316 replace_dict = {} 317 318 # Extract model name 319 replace_dict['model_name'] = self.model_name 320 321 # Extract process info lines for all processes 322 process_lines = "\n".join([self.get_process_info_lines(me) for me in \ 323 self.matrix_elements]) 324 325 replace_dict['process_lines'] = process_lines 326 327 # Extract number of external particles 328 replace_dict['nfinal'] = self.nfinal 329 330 # Extract number of external particles 331 replace_dict['ninitial'] = self.ninitial 332 333 # Extract process class name (for the moment same as file name) 334 replace_dict['process_class_name'] = self.process_name 335 336 # Extract process definition 337 process_definition = "%s (%s)" % (self.process_string, 338 self.model_name) 339 replace_dict['process_definition'] = process_definition 340 341 process = self.processes[0] 342 343 replace_dict['process_code'] = self.process_number 344 replace_dict['nexternal'] = self.nexternal 345 replace_dict['nprocesses'] = self.nprocesses 346 347 348 color_amplitudes = self.matrix_elements[0].get_color_amplitudes() 349 # Number of color flows 350 replace_dict['ncolor'] = len(color_amplitudes) 351 352 if self.single_helicities: 353 replace_dict['all_sigma_kin_definitions'] = \ 354 """// Calculate wavefunctions 355 void calculate_wavefunctions(const int perm[], const int hel[]); 356 static const int nwavefuncs = %d; 357 std::complex<double> w[nwavefuncs][18]; 358 static const int namplitudes = %d; 359 std::complex<double> amp[namplitudes];""" % \ 360 (len(self.wavefunctions), 361 len(self.amplitudes.get_all_amplitudes())) 362 replace_dict['all_matrix_definitions'] = \ 363 "\n".join(["double matrix_%s();" % \ 364 me.get('processes')[0].shell_string().\ 365 replace("0_", "") \ 366 for me in self.matrix_elements]) 367 368 else: 369 replace_dict['all_sigma_kin_definitions'] = \ 370 "\n".join(["void sigmaKin_%s();" % \ 371 me.get('processes')[0].shell_string().\ 372 replace("0_", "") \ 373 for me in self.matrix_elements]) 374 replace_dict['all_matrix_definitions'] = \ 375 "\n".join(["double matrix_%s(const int hel[]);" % \ 376 me.get('processes')[0].shell_string().\ 377 replace("0_", "") \ 378 for me in self.matrix_elements]) 379 380 if write: 381 file = self.read_template_file(self.process_class_template) % replace_dict 382 return file 383 else: 384 return replace_dict
385
387 """The complete Pythia 8 class definition for the process""" 388 389 replace_dict = {} 390 391 # Extract model name 392 replace_dict['model_name'] = self.model_name 393 394 # Extract process info lines 395 replace_dict['process_lines'] = \ 396 "\n".join([self.get_process_info_lines(me) for \ 397 me in self.matrix_elements]) 398 399 # Extract process class name (for the moment same as file name) 400 replace_dict['process_class_name'] = self.process_name 401 402 color_amplitudes = [me.get_color_amplitudes() for me in \ 403 self.matrix_elements] 404 405 replace_dict['initProc_lines'] = \ 406 self.get_initProc_lines(self.matrix_elements[0], 407 color_amplitudes) 408 replace_dict['reset_jamp_lines'] = \ 409 self.get_reset_jamp_lines(color_amplitudes) 410 replace_dict['sigmaKin_lines'] = \ 411 self.get_sigmaKin_lines(color_amplitudes) 412 replace_dict['sigmaHat_lines'] = \ 413 self.get_sigmaHat_lines() 414 415 replace_dict['all_sigmaKin'] = \ 416 self.get_all_sigmaKin_lines(color_amplitudes, 417 'CPPProcess') 418 419 file = self.read_template_file(self.process_definition_template) %\ 420 replace_dict 421 422 return file
423
424 - def get_process_name(self):
425 """Return process file name for the process in matrix_element""" 426 427 process_string = self.process_string 428 429 # Extract process number 430 proc_number_pattern = re.compile("^(.+)@\s*(\d+)\s*(.*)$") 431 proc_number_re = proc_number_pattern.match(process_string) 432 proc_number = 0 433 if proc_number_re: 434 proc_number = int(proc_number_re.group(2)) 435 process_string = proc_number_re.group(1) + \ 436 proc_number_re.group(3) 437 438 # Remove order information 439 order_pattern = re.compile("^(.+)\s+(\w+)\s*=\s*(\d+)\s*$") 440 order_re = order_pattern.match(process_string) 441 while order_re: 442 process_string = order_re.group(1) 443 order_re = order_pattern.match(process_string) 444 445 process_string = process_string.replace(' ', '') 446 process_string = process_string.replace('>', '_') 447 process_string = process_string.replace('+', 'p') 448 process_string = process_string.replace('-', 'm') 449 process_string = process_string.replace('~', 'x') 450 process_string = process_string.replace('/', '_no_') 451 process_string = process_string.replace('$', '_nos_') 452 process_string = process_string.replace('|', '_or_') 453 if proc_number != 0: 454 process_string = "%d_%s" % (proc_number, process_string) 455 456 process_string = "Sigma_%s_%s" % (self.model_name, 457 process_string) 458 return process_string
459
460 - def get_process_info_lines(self, matrix_element):
461 """Return info lines describing the processes for this matrix element""" 462 463 return"\n".join([ "# " + process.nice_string().replace('\n', '\n# * ') \ 464 for process in matrix_element.get('processes')])
465 466
467 - def get_initProc_lines(self, matrix_element, color_amplitudes):
468 """Get initProc_lines for function definition for Pythia 8 .cc file""" 469 470 initProc_lines = [] 471 472 initProc_lines.append("// Set external particle masses for this matrix element") 473 474 for part in matrix_element.get_external_wavefunctions(): 475 initProc_lines.append("mME.push_back(pars->%s);" % part.get('mass')) 476 for i, colamp in enumerate(color_amplitudes): 477 initProc_lines.append("jamp2[%d] = new double[%d];" % \ 478 (i, len(colamp))) 479 480 return "\n".join(initProc_lines)
481
482 - def get_reset_jamp_lines(self, color_amplitudes):
483 """Get lines to reset jamps""" 484 485 ret_lines = "" 486 for icol, col_amp in enumerate(color_amplitudes): 487 ret_lines+= """for(int i=0;i < %(ncolor)d; i++) 488 jamp2[%(proc_number)d][i]=0.;\n""" % \ 489 {"ncolor": len(col_amp), "proc_number": icol} 490 return ret_lines
491 492
493 - def get_calculate_wavefunctions(self, wavefunctions, amplitudes, write=True):
494 """Return the lines for optimized calculation of the 495 wavefunctions for all subprocesses""" 496 497 replace_dict = {} 498 499 replace_dict['nwavefuncs'] = len(wavefunctions) 500 501 #ensure no recycling of wavefunction ! incompatible with some output 502 for me in self.matrix_elements: 503 me.restore_original_wavefunctions() 504 505 replace_dict['wavefunction_calls'] = "\n".join(\ 506 self.helas_call_writer.get_wavefunction_calls(\ 507 helas_objects.HelasWavefunctionList(wavefunctions))) 508 509 replace_dict['amplitude_calls'] = "\n".join(\ 510 self.helas_call_writer.get_amplitude_calls(amplitudes)) 511 512 if write: 513 file = self.read_template_file(self.process_wavefunction_template) % \ 514 replace_dict 515 return file 516 else: 517 return replace_dict
518 519
520 - def get_sigmaKin_lines(self, color_amplitudes, write=True):
521 """Get sigmaKin_lines for function definition for Pythia 8 .cc file""" 522 523 524 if self.single_helicities: 525 replace_dict = {} 526 527 # Number of helicity combinations 528 replace_dict['ncomb'] = \ 529 self.matrix_elements[0].get_helicity_combinations() 530 531 # Process name 532 replace_dict['process_class_name'] = self.process_name 533 534 # Particle ids for the call to setupForME 535 replace_dict['id1'] = self.processes[0].get('legs')[0].get('id') 536 replace_dict['id2'] = self.processes[0].get('legs')[1].get('id') 537 538 # Extract helicity matrix 539 replace_dict['helicity_matrix'] = \ 540 self.get_helicity_matrix(self.matrix_elements[0]) 541 542 # Extract denominator 543 den_factors = [str(me.get_denominator_factor()) for me in \ 544 self.matrix_elements] 545 if self.nprocesses != len(self.matrix_elements): 546 den_factors.extend(den_factors) 547 replace_dict['den_factors'] = ",".join(den_factors) 548 replace_dict['get_matrix_t_lines'] = "\n".join( 549 ["t[%(iproc)d]=matrix_%(proc_name)s();" % \ 550 {"iproc": i, "proc_name": \ 551 me.get('processes')[0].shell_string().replace("0_", "")} \ 552 for i, me in enumerate(self.matrix_elements)]) 553 554 # Generate lines for mirror matrix element calculation 555 mirror_matrix_lines = "" 556 557 if any([m.get('has_mirror_process') for m in self.matrix_elements]): 558 mirror_matrix_lines += \ 559 """ // Mirror initial state momenta for mirror process 560 perm[0]=1; 561 perm[1]=0; 562 // Calculate wavefunctions 563 calculate_wavefunctions(perm, helicities[ihel]); 564 // Mirror back 565 perm[0]=0; 566 perm[1]=1; 567 // Calculate matrix elements 568 """ 569 570 mirror_matrix_lines += "\n".join( 571 ["t[%(iproc)d]=matrix_%(proc_name)s();" % \ 572 {"iproc": i + len(self.matrix_elements), "proc_name": \ 573 me.get('processes')[0].shell_string().replace("0_", "")} \ 574 for i, me in enumerate(self.matrix_elements) if me.get('has_mirror_process')]) 575 576 replace_dict['get_mirror_matrix_lines'] = mirror_matrix_lines 577 578 if write: 579 file = \ 580 self.read_template_file(\ 581 self.process_sigmaKin_function_template) %\ 582 replace_dict 583 return file 584 else: 585 return replace_dict 586 else: 587 ret_lines = "// Call the individual sigmaKin for each process\n" 588 ret_lines = ret_lines + \ 589 "\n".join(["sigmaKin_%s();" % \ 590 me.get('processes')[0].shell_string().\ 591 replace("0_", "") for \ 592 me in self.matrix_elements]) 593 if write: 594 return ret_lines 595 else: 596 replace_dict['get_mirror_matrix_lines'] = ret_lines 597 return replace_dict
598
599 - def get_all_sigmaKin_lines(self, color_amplitudes, class_name):
600 """Get sigmaKin_process for all subprocesses for Pythia 8 .cc file""" 601 602 ret_lines = [] 603 if self.single_helicities: 604 ret_lines.append(\ 605 "void %s::calculate_wavefunctions(const int perm[], const int hel[]){" % \ 606 class_name) 607 ret_lines.append("// Calculate wavefunctions for all processes") 608 ret_lines.append(self.get_calculate_wavefunctions(\ 609 self.wavefunctions, self.amplitudes)) 610 ret_lines.append("}") 611 else: 612 ret_lines.extend([self.get_sigmaKin_single_process(i, me) \ 613 for i, me in enumerate(self.matrix_elements)]) 614 ret_lines.extend([self.get_matrix_single_process(i, me, 615 color_amplitudes[i], 616 class_name) \ 617 for i, me in enumerate(self.matrix_elements)]) 618 return "\n".join(ret_lines)
619 620
621 - def get_sigmaKin_single_process(self, i, matrix_element, write=True):
622 """Write sigmaKin for each process""" 623 624 # Write sigmaKin for the process 625 626 replace_dict = {} 627 628 # Process name 629 replace_dict['proc_name'] = \ 630 matrix_element.get('processes')[0].shell_string().replace("0_", "") 631 632 # Process name 633 replace_dict['process_class_name'] = self.process_name 634 635 # Process number 636 replace_dict['proc_number'] = i 637 638 # Number of helicity combinations 639 replace_dict['ncomb'] = matrix_element.get_helicity_combinations() 640 641 # Extract helicity matrix 642 replace_dict['helicity_matrix'] = \ 643 self.get_helicity_matrix(matrix_element) 644 # Extract denominator 645 replace_dict['den_factor'] = matrix_element.get_denominator_factor() 646 647 if write: 648 file = \ 649 self.read_template_file('cpp_process_sigmaKin_subproc_function.inc') %\ 650 replace_dict 651 return file 652 else: 653 return replace_dict
654
655 - def get_matrix_single_process(self, i, matrix_element, color_amplitudes, 656 class_name, write=True):
657 """Write matrix() for each process""" 658 659 # Write matrix() for the process 660 661 replace_dict = {} 662 663 # Process name 664 replace_dict['proc_name'] = \ 665 matrix_element.get('processes')[0].shell_string().replace("0_", "") 666 667 668 # Wavefunction and amplitude calls 669 if self.single_helicities: 670 replace_dict['matrix_args'] = "" 671 replace_dict['all_wavefunction_calls'] = "int i, j;" 672 else: 673 replace_dict['matrix_args'] = "const int hel[]" 674 wavefunctions = matrix_element.get_all_wavefunctions() 675 replace_dict['all_wavefunction_calls'] = \ 676 """const int nwavefuncs = %d; 677 std::complex<double> w[nwavefuncs][18]; 678 """ % len(wavefunctions)+ \ 679 self.get_calculate_wavefunctions(wavefunctions, []) 680 681 # Process name 682 replace_dict['process_class_name'] = class_name 683 684 # Process number 685 replace_dict['proc_number'] = i 686 687 # Number of color flows 688 replace_dict['ncolor'] = len(color_amplitudes) 689 690 replace_dict['ngraphs'] = matrix_element.get_number_of_amplitudes() 691 692 # Extract color matrix 693 replace_dict['color_matrix_lines'] = \ 694 self.get_color_matrix_lines(matrix_element) 695 696 697 replace_dict['jamp_lines'] = self.get_jamp_lines(color_amplitudes) 698 699 700 #specific exporter hack 701 replace_dict = self.get_class_specific_definition_matrix(replace_dict, matrix_element) 702 703 if write: 704 file = self.read_template_file(self.single_process_template) % \ 705 replace_dict 706 return file 707 else: 708 return replace_dict
709
710 - def get_class_specific_definition_matrix(self, converter, matrix_element):
711 """place to add some specific hack to a given exporter. 712 Please always use Super in that case""" 713 714 return converter
715
716 - def get_sigmaHat_lines(self):
717 """Get sigmaHat_lines for function definition for Pythia 8 .cc file""" 718 719 # Create a set with the pairs of incoming partons 720 beams = set([(process.get('legs')[0].get('id'), 721 process.get('legs')[1].get('id')) \ 722 for process in self.processes]) 723 724 res_lines = [] 725 726 # Write a selection routine for the different processes with 727 # the same beam particles 728 res_lines.append("// Select between the different processes") 729 for ibeam, beam_parts in enumerate(beams): 730 731 if ibeam == 0: 732 res_lines.append("if(id1 == %d && id2 == %d){" % beam_parts) 733 else: 734 res_lines.append("else if(id1 == %d && id2 == %d){" % beam_parts) 735 736 # Pick out all processes with this beam pair 737 beam_processes = [(i, me) for (i, me) in \ 738 enumerate(self.matrix_elements) if beam_parts in \ 739 [(process.get('legs')[0].get('id'), 740 process.get('legs')[1].get('id')) \ 741 for process in me.get('processes')]] 742 743 # Add mirror processes, 744 beam_processes.extend([(len(self.matrix_elements) + i, me) for (i, me) in \ 745 enumerate(self.matrix_elements) if beam_parts in \ 746 [(process.get('legs')[0].get('id'), 747 process.get('legs')[1].get('id')) \ 748 for process in me.get_mirror_processes()]]) 749 750 # Now add matrix elements for the processes with the right factors 751 res_lines.append("// Add matrix elements for processes with beams %s" % \ 752 repr(beam_parts)) 753 res_lines.append("return %s;" % \ 754 ("+".join(["matrix_element[%i]*%i" % \ 755 (i, len([proc for proc in \ 756 me.get('processes') if beam_parts == \ 757 (proc.get('legs')[0].get('id'), 758 proc.get('legs')[1].get('id')) or \ 759 me.get('has_mirror_process') and \ 760 beam_parts == \ 761 (proc.get('legs')[1].get('id'), 762 proc.get('legs')[0].get('id'))])) \ 763 for (i, me) in beam_processes]).\ 764 replace('*1', ''))) 765 res_lines.append("}") 766 767 768 res_lines.append("else {") 769 res_lines.append("// Return 0 if not correct initial state assignment") 770 res_lines.append(" return 0.;}") 771 772 return "\n".join(res_lines)
773 774
775 - def get_helicity_matrix(self, matrix_element):
776 """Return the Helicity matrix definition lines for this matrix element""" 777 778 helicity_line = "static const int helicities[ncomb][nexternal] = {"; 779 helicity_line_list = [] 780 781 for helicities in matrix_element.get_helicity_matrix(allow_reverse=False): 782 helicity_line_list.append("{"+",".join(['%d'] * len(helicities)) % \ 783 tuple(helicities) + "}") 784 785 return helicity_line + ",".join(helicity_line_list) + "};"
786
787 - def get_den_factor_line(self, matrix_element):
788 """Return the denominator factor line for this matrix element""" 789 790 return "const int denominator = %d;" % \ 791 matrix_element.get_denominator_factor()
792
793 - def get_color_matrix_lines(self, matrix_element):
794 """Return the color matrix definition lines for this matrix element. Split 795 rows in chunks of size n.""" 796 797 if not matrix_element.get('color_matrix'): 798 return "\n".join(["static const double denom[1] = {1.};", 799 "static const double cf[1][1] = {1.};"]) 800 else: 801 color_denominators = matrix_element.get('color_matrix').\ 802 get_line_denominators() 803 denom_string = "static const double denom[ncolor] = {%s};" % \ 804 ",".join(["%i" % denom for denom in color_denominators]) 805 806 matrix_strings = [] 807 my_cs = color.ColorString() 808 for index, denominator in enumerate(color_denominators): 809 # Then write the numerators for the matrix elements 810 num_list = matrix_element.get('color_matrix').\ 811 get_line_numerators(index, denominator) 812 813 matrix_strings.append("{%s}" % \ 814 ",".join(["%d" % i for i in num_list])) 815 matrix_string = "static const double cf[ncolor][ncolor] = {" + \ 816 ",".join(matrix_strings) + "};" 817 return "\n".join([denom_string, matrix_string])
818 819 820 821 822 823
824 - def get_jamp_lines(self, color_amplitudes):
825 """Return the jamp = sum(fermionfactor * amp[i]) lines""" 826 827 res_list = [] 828 829 for i, coeff_list in enumerate(color_amplitudes): 830 831 res = "jamp[%i]=" % i 832 833 # Optimization: if all contributions to that color basis element have 834 # the same coefficient (up to a sign), put it in front 835 list_fracs = [abs(coefficient[0][1]) for coefficient in coeff_list] 836 common_factor = False 837 diff_fracs = list(set(list_fracs)) 838 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 839 common_factor = True 840 global_factor = diff_fracs[0] 841 res = res + '%s(' % coeff(1, global_factor, False, 0) 842 843 for (coefficient, amp_number) in coeff_list: 844 if common_factor: 845 res = res + "%samp[%d]" % (coeff(coefficient[0], 846 coefficient[1] / abs(coefficient[1]), 847 coefficient[2], 848 coefficient[3]), 849 amp_number - 1) 850 else: 851 res = res + "%samp[%d]" % (coeff(coefficient[0], 852 coefficient[1], 853 coefficient[2], 854 coefficient[3]), 855 amp_number - 1) 856 857 if common_factor: 858 res = res + ')' 859 860 res += ';' 861 862 res_list.append(res) 863 864 return "\n".join(res_list)
865
866 867 -class OneProcessExporterMatchbox(OneProcessExporterCPP):
868 """Class to take care of exporting a set of matrix elements to 869 Matchbox format.""" 870 871 # Static variables (for inheritance) 872 process_class_template = 'matchbox_class.inc' 873 single_process_template = 'matchbox_matrix.inc' 874 process_definition_template = 'matchbox_function_definitions.inc' 875
876 - def get_initProc_lines(self, matrix_element, color_amplitudes):
877 """Get initProc_lines for function definition for Pythia 8 .cc file""" 878 879 initProc_lines = [] 880 881 initProc_lines.append("// Set external particle masses for this matrix element") 882 883 for part in matrix_element.get_external_wavefunctions(): 884 initProc_lines.append("mME.push_back(pars->%s);" % part.get('mass')) 885 return "\n".join(initProc_lines)
886 887
888 - def get_class_specific_definition_matrix(self, converter, matrix_element):
889 """ """ 890 891 converter = super(OneProcessExporterMatchbox, self).get_class_specific_definition_matrix(converter, matrix_element) 892 893 # T(....) 894 converter['color_sting_lines'] = \ 895 self.get_color_string_lines(matrix_element) 896 897 return converter
898
899 - def get_all_sigmaKin_lines(self, color_amplitudes, class_name):
900 """Get sigmaKin_process for all subprocesses for MAtchbox .cc file""" 901 902 ret_lines = [] 903 if self.single_helicities: 904 ret_lines.append(\ 905 "void %s::calculate_wavefunctions(const int perm[], const int hel[]){" % \ 906 class_name) 907 ret_lines.append("// Calculate wavefunctions for all processes") 908 ret_lines.append(self.get_calculate_wavefunctions(\ 909 self.wavefunctions, self.amplitudes)) 910 ret_lines.append(self.get_jamp_lines(color_amplitudes[0])) 911 ret_lines.append("}") 912 else: 913 ret_lines.extend([self.get_sigmaKin_single_process(i, me) \ 914 for i, me in enumerate(self.matrix_elements)]) 915 ret_lines.extend([self.get_matrix_single_process(i, me, 916 color_amplitudes[i], 917 class_name) \ 918 for i, me in enumerate(self.matrix_elements)]) 919 return "\n".join(ret_lines)
920 921
922 - def get_color_string_lines(self, matrix_element):
923 """Return the color matrix definition lines for this matrix element. Split 924 rows in chunks of size n.""" 925 926 if not matrix_element.get('color_matrix'): 927 return "\n".join(["static const double res[1][1] = {-1.};"]) 928 929 #start the real work 930 color_denominators = matrix_element.get('color_matrix').\ 931 get_line_denominators() 932 matrix_strings = [] 933 my_cs = color.ColorString() 934 935 for i_color in xrange(len(color_denominators)): 936 # Then write the numerators for the matrix elements 937 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 938 t_str=repr(my_cs) 939 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 940 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 941 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 942 all_matches = t_match.findall(t_str) 943 tmp_color = [] 944 for match in all_matches: 945 ctype, arg = match[0], [m.strip() for m in match[1].split(',')] 946 if ctype not in ['T', 'Tr']: 947 raise self.ProcessExporterCPPError, 'Color Structure not handle by Matchbox' 948 tmp_color.append(arg) 949 #compute the maximal size of the vector 950 nb_index = sum(len(o) for o in tmp_color) 951 max_len = nb_index + (nb_index//2) -1 952 #create the list with the 0 separator 953 curr_color = tmp_color[0] 954 for tcolor in tmp_color[1:]: 955 curr_color += ['0'] + tcolor 956 curr_color += ['0'] * (max_len- len(curr_color)) 957 #format the output 958 matrix_strings.append('{%s}' % ','.join(curr_color)) 959 960 matrix_string = 'static const double res[%s][%s] = {%s};' % \ 961 (len(color_denominators), max_len, ",".join(matrix_strings)) 962 963 return matrix_string
964
965 966 #=============================================================================== 967 # ProcessExporterPythia8 968 #=============================================================================== 969 -class OneProcessExporterPythia8(OneProcessExporterCPP):
970 """Class to take care of exporting a set of matrix elements to 971 Pythia 8 format.""" 972 973 # Static variables (for inheritance) 974 process_template_h = 'pythia8_process_h.inc' 975 process_template_cc = 'pythia8_process_cc.inc' 976 process_class_template = 'pythia8_process_class.inc' 977 process_definition_template = 'pythia8_process_function_definitions.inc' 978 process_wavefunction_template = 'pythia8_process_wavefunctions.inc' 979 process_sigmaKin_function_template = 'pythia8_process_sigmaKin_function.inc' 980 template_path = os.path.join(_file_path, 'iolibs', 'template_files', 'pythia8') 981 982
983 - def __init__(self, *args, **opts):
984 """Set process class name""" 985 986 if 'version' in opts: 987 self.version = opts['version'] 988 del opts['version'] 989 else: 990 self.version='8.2' 991 super(OneProcessExporterPythia8, self).__init__(*args, **opts) 992 993 # Check if any processes are not 2->1,2,3 994 for me in self.matrix_elements: 995 if me.get_nexternal_ninitial() not in [(3,2),(4,2),(5,2)]: 996 nex,nin = me.get_nexternal_ninitial() 997 raise InvalidCmd,\ 998 "Pythia 8 can only handle 2->1,2,3 processes, not %d->%d" % \ 999 (nin,nex-nin) 1000 1001 self.process_class = self.process_name
1002 1003 # Methods for generation of process files for Pythia 8 1004
1005 - def get_default_converter(self):
1006 1007 replace_dict = {} 1008 # Extract model name 1009 replace_dict['model_name'] = self.model_name 1010 if self.version =="8.2": 1011 replace_dict['include_prefix'] = 'Pythia8/' 1012 else: 1013 replace_dict['include_prefix'] = '' 1014 1015 replace_dict['version'] = self.version 1016 1017 return replace_dict
1018 #=========================================================================== 1019 # Process export helper functions 1020 #===========================================================================
1021 - def get_process_class_definitions(self, write=True):
1022 """The complete Pythia 8 class definition for the process""" 1023 1024 replace_dict = self.get_default_converter() 1025 1026 1027 # Extract process info lines for all processes 1028 process_lines = "\n".join([self.get_process_info_lines(me) for me in \ 1029 self.matrix_elements]) 1030 1031 replace_dict['process_lines'] = process_lines 1032 1033 # Extract number of external particles 1034 replace_dict['nfinal'] = self.nfinal 1035 1036 # Extract process class name (for the moment same as file name) 1037 replace_dict['process_class_name'] = self.process_name 1038 1039 # Extract process definition 1040 process_definition = "%s (%s)" % (self.process_string, 1041 self.model_name) 1042 replace_dict['process_definition'] = process_definition 1043 1044 process = self.processes[0] 1045 replace_dict['process_code'] = 10000 + \ 1046 100*process.get('id') + \ 1047 self.process_number 1048 1049 replace_dict['inFlux'] = self.get_process_influx() 1050 1051 replace_dict['id_masses'] = self.get_id_masses(process) 1052 replace_dict['resonances'] = self.get_resonance_lines() 1053 1054 replace_dict['nexternal'] = self.nexternal 1055 replace_dict['nprocesses'] = self.nprocesses 1056 1057 if self.single_helicities: 1058 replace_dict['all_sigma_kin_definitions'] = \ 1059 """// Calculate wavefunctions 1060 void calculate_wavefunctions(const int perm[], const int hel[]); 1061 static const int nwavefuncs = %d; 1062 std::complex<double> w[nwavefuncs][18]; 1063 static const int namplitudes = %d; 1064 std::complex<double> amp[namplitudes];""" % \ 1065 (len(self.wavefunctions), 1066 len(self.amplitudes.get_all_amplitudes())) 1067 replace_dict['all_matrix_definitions'] = \ 1068 "\n".join(["double matrix_%s();" % \ 1069 me.get('processes')[0].shell_string().\ 1070 replace("0_", "") \ 1071 for me in self.matrix_elements]) 1072 1073 else: 1074 replace_dict['all_sigma_kin_definitions'] = \ 1075 "\n".join(["void sigmaKin_%s();" % \ 1076 me.get('processes')[0].shell_string().\ 1077 replace("0_", "") \ 1078 for me in self.matrix_elements]) 1079 replace_dict['all_matrix_definitions'] = \ 1080 "\n".join(["double matrix_%s(const int hel[]);" % \ 1081 me.get('processes')[0].shell_string().\ 1082 replace("0_", "") \ 1083 for me in self.matrix_elements]) 1084 1085 if write: 1086 file = self.read_template_file('pythia8_process_class.inc') % replace_dict 1087 return file 1088 else: 1089 return replace_dict
1090
1091 - def get_process_function_definitions(self, write=True):
1092 """The complete Pythia 8 class definition for the process""" 1093 1094 1095 replace_dict = self.get_default_converter() 1096 1097 # Extract process info lines 1098 replace_dict['process_lines'] = \ 1099 "\n".join([self.get_process_info_lines(me) for \ 1100 me in self.matrix_elements]) 1101 1102 # Extract process class name (for the moment same as file name) 1103 replace_dict['process_class_name'] = self.process_name 1104 1105 color_amplitudes = [me.get_color_amplitudes() for me in \ 1106 self.matrix_elements] 1107 1108 replace_dict['initProc_lines'] = \ 1109 self.get_initProc_lines(color_amplitudes) 1110 replace_dict['reset_jamp_lines'] = \ 1111 self.get_reset_jamp_lines(color_amplitudes) 1112 replace_dict['sigmaKin_lines'] = \ 1113 self.get_sigmaKin_lines(color_amplitudes) 1114 replace_dict['sigmaHat_lines'] = \ 1115 self.get_sigmaHat_lines() 1116 1117 replace_dict['setIdColAcol_lines'] = \ 1118 self.get_setIdColAcol_lines(color_amplitudes) 1119 1120 replace_dict['weightDecay_lines'] = \ 1121 self.get_weightDecay_lines() 1122 1123 replace_dict['all_sigmaKin'] = \ 1124 self.get_all_sigmaKin_lines(color_amplitudes, 1125 self.process_name) 1126 if write: 1127 file = self.read_template_file('pythia8_process_function_definitions.inc') %\ 1128 replace_dict 1129 return file 1130 else: 1131 return replace_dict
1132
1133 - def get_process_influx(self):
1134 """Return process file name for the process in matrix_element""" 1135 1136 # Create a set with the pairs of incoming partons in definite order, 1137 # e.g., g g >... u d > ... d~ u > ... gives ([21,21], [1,2], [-2,1]) 1138 beams = set([tuple(sorted([process.get('legs')[0].get('id'), 1139 process.get('legs')[1].get('id')])) \ 1140 for process in self.processes]) 1141 1142 # Define a number of useful sets 1143 antiquarks = range(-1, -6, -1) 1144 quarks = range(1,6) 1145 antileptons = range(-11, -17, -1) 1146 leptons = range(11, 17, 1) 1147 allquarks = antiquarks + quarks 1148 antifermions = antiquarks + antileptons 1149 fermions = quarks + leptons 1150 allfermions = allquarks + antileptons + leptons 1151 downfermions = range(-2, -5, -2) + range(-1, -5, -2) + \ 1152 range(-12, -17, -2) + range(-11, -17, -2) 1153 upfermions = range(1, 5, 2) + range(2, 5, 2) + \ 1154 range(11, 17, 2) + range(12, 17, 2) 1155 1156 # The following gives a list from flavor combinations to "inFlux" values 1157 # allowed by Pythia8, see Pythia 8 document SemiInternalProcesses.html 1158 set_tuples = [(set([(21, 21)]), "gg"), 1159 (set(list(itertools.product(allquarks, [21]))), "qg"), 1160 (set(zip(antiquarks, quarks)), "qqbarSame"), 1161 (set(list(itertools.product(allquarks, 1162 allquarks))), "qq"), 1163 (set(zip(antifermions, fermions)),"ffbarSame"), 1164 (set(zip(downfermions, upfermions)),"ffbarChg"), 1165 (set(list(itertools.product(allfermions, 1166 allfermions))), "ff"), 1167 (set(list(itertools.product(allfermions, [22]))), "fgm"), 1168 (set([(21, 22)]), "ggm"), 1169 (set([(22, 22)]), "gmgm")] 1170 1171 for set_tuple in set_tuples: 1172 if beams.issubset(set_tuple[0]): 1173 return set_tuple[1] 1174 1175 raise InvalidCmd('Pythia 8 cannot handle incoming flavors %s' %\ 1176 repr(beams)) 1177 1178 return
1179 1180 #=============================================================================== 1181 # Global helper methods 1182 #=============================================================================== 1183 @classmethod
1184 - def read_template_file(cls, filename):
1185 """Open a template file and return the contents.""" 1186 1187 try: 1188 return super(OneProcessExporterPythia8, cls).read_template_file(filename) 1189 except: 1190 return super(OneProcessExporterPythia8, cls).read_template_file(filename, classpath=True)
1191 1192
1193 - def get_id_masses(self, process):
1194 """Return the lines which define the ids for the final state particles, 1195 for the Pythia phase space""" 1196 1197 if self.nfinal == 1: 1198 return "" 1199 1200 mass_strings = [] 1201 for i in range(2, len(process.get_legs_with_decays())): 1202 if self.model.get_particle(process.get_legs_with_decays()[i].get('id')).\ 1203 get('mass') not in ['zero', 'ZERO']: 1204 mass_strings.append("int id%dMass() const {return %d;}" % \ 1205 (i + 1, abs(process.get_legs_with_decays()[i].get('id')))) 1206 1207 return "\n".join(mass_strings)
1208
1209 - def get_resonance_lines(self):
1210 """Return the lines which define the ids for intermediate resonances 1211 for the Pythia phase space""" 1212 1213 if self.nfinal == 1: 1214 return "virtual int resonanceA() const {return %d;}" % \ 1215 abs(self.processes[0].get('legs')[2].get('id')) 1216 1217 res_strings = [] 1218 res_letters = ['A', 'B'] 1219 1220 sids, singleres, schannel = self.get_resonances() 1221 1222 for i, sid in enumerate(sids[:2]): 1223 res_strings.append("virtual int resonance%s() const {return %d;}"\ 1224 % (res_letters[i], sid)) 1225 1226 if schannel: 1227 res_strings.append("virtual bool isSChannel() const {return true;}") 1228 1229 if singleres != 0: 1230 res_strings.append("virtual int idSChannel() const {return %d;}" \ 1231 % singleres) 1232 1233 return "\n".join(res_strings)
1234
1235 - def get_resonances(self):
1236 """Return the PIDs for any resonances in 2->2 and 2->3 processes.""" 1237 1238 model = self.matrix_elements[0].get('processes')[0].get('model') 1239 new_pdg = model.get_first_non_pdg() 1240 # Get a list of all resonant s-channel contributions 1241 diagrams = sum([me.get('diagrams') for me in self.matrix_elements], []) 1242 resonances = [] 1243 no_t_channels = True 1244 final_s_channels = [] 1245 for diagram in diagrams: 1246 schannels, tchannels = diagram.get('amplitudes')[0].\ 1247 get_s_and_t_channels(self.ninitial, model, 1248 new_pdg) 1249 for schannel in schannels: 1250 sid = schannel.get('legs')[-1].get('id') 1251 part = self.model.get_particle(sid) 1252 if part: 1253 width = self.model.get_particle(sid).get('width') 1254 if width.lower() != 'zero': 1255 # Only care about absolute value of resonance PIDs: 1256 resonances.append(abs(sid)) 1257 else: 1258 sid = 0 1259 if len(tchannels) == 1 and schannel == schannels[-1]: 1260 final_s_channels.append(abs(sid)) 1261 1262 if len(tchannels) > 1: 1263 # There are t-channel diagrams 1264 no_t_channels = False 1265 1266 resonance_set = set(resonances) 1267 final_s_set = set(final_s_channels) 1268 1269 singleres = 0 1270 # singleres is set if all diagrams have the same final resonance 1271 if len(final_s_channels) == len(diagrams) and len(final_s_set) == 1 \ 1272 and final_s_channels[0] != 0: 1273 singleres = final_s_channels[0] 1274 1275 resonance_set = list(set([pid for pid in resonance_set])) 1276 1277 # schannel is True if all diagrams are pure s-channel and there are 1278 # no QCD vertices 1279 schannel = no_t_channels and \ 1280 not any(['QCD' in d.calculate_orders() for d in diagrams]) 1281 1282 return resonance_set, singleres, schannel
1283
1284 - def get_initProc_lines(self, color_amplitudes):
1285 """Get initProc_lines for function definition for Pythia 8 .cc file""" 1286 1287 initProc_lines = [] 1288 1289 initProc_lines.append("// Set massive/massless matrix elements for c/b/mu/tau") 1290 # Add lines to set c/b/tau/mu kinematics massive/massless 1291 if not self.model.get_particle(4) or \ 1292 self.model.get_particle(4).get('mass').lower() == 'zero': 1293 cMassiveME = "0." 1294 else: 1295 cMassiveME = "particleDataPtr->m0(4)" 1296 initProc_lines.append("mcME = %s;" % cMassiveME) 1297 if not self.model.get_particle(5) or \ 1298 self.model.get_particle(5).get('mass').lower() == 'zero': 1299 bMassiveME = "0." 1300 else: 1301 bMassiveME = "particleDataPtr->m0(5)" 1302 initProc_lines.append("mbME = %s;" % bMassiveME) 1303 if not self.model.get_particle(13) or \ 1304 self.model.get_particle(13).get('mass').lower() == 'zero': 1305 muMassiveME = "0." 1306 else: 1307 muMassiveME = "particleDataPtr->m0(13)" 1308 initProc_lines.append("mmuME = %s;" % muMassiveME) 1309 if not self.model.get_particle(15) or \ 1310 self.model.get_particle(15).get('mass').lower() == 'zero': 1311 tauMassiveME = "0." 1312 else: 1313 tauMassiveME = "particleDataPtr->m0(15)" 1314 initProc_lines.append("mtauME = %s;" % tauMassiveME) 1315 1316 for i, me in enumerate(self.matrix_elements): 1317 initProc_lines.append("jamp2[%d] = new double[%d];" % \ 1318 (i, len(color_amplitudes[i]))) 1319 1320 return "\n".join(initProc_lines)
1321
1322 - def get_setIdColAcol_lines(self, color_amplitudes):
1323 """Generate lines to set final-state id and color info for process""" 1324 1325 res_lines = [] 1326 1327 # Create a set with the pairs of incoming partons 1328 beams = set([(process.get('legs')[0].get('id'), 1329 process.get('legs')[1].get('id')) \ 1330 for process in self.processes]) 1331 1332 # Now write a selection routine for final state ids 1333 for ibeam, beam_parts in enumerate(beams): 1334 if ibeam == 0: 1335 res_lines.append("if(id1 == %d && id2 == %d){" % beam_parts) 1336 else: 1337 res_lines.append("else if(id1 == %d && id2 == %d){" % beam_parts) 1338 # Pick out all processes with this beam pair 1339 beam_processes = [(i, me) for (i, me) in \ 1340 enumerate(self.matrix_elements) if beam_parts in \ 1341 [(process.get('legs')[0].get('id'), 1342 process.get('legs')[1].get('id')) \ 1343 for process in me.get('processes')]] 1344 # Pick out all mirror processes for this beam pair 1345 beam_mirror_processes = [] 1346 if beam_parts[0] != beam_parts[1]: 1347 beam_mirror_processes = [(i, me) for (i, me) in \ 1348 enumerate(self.matrix_elements) if beam_parts in \ 1349 [(process.get('legs')[1].get('id'), 1350 process.get('legs')[0].get('id')) \ 1351 for process in me.get('processes')]] 1352 1353 final_id_list = [] 1354 final_mirror_id_list = [] 1355 for (i, me) in beam_processes: 1356 final_id_list.extend([tuple([l.get('id') for l in \ 1357 proc.get_legs_with_decays() if l.get('state')]) \ 1358 for proc in me.get('processes') \ 1359 if beam_parts == \ 1360 (proc.get('legs')[0].get('id'), 1361 proc.get('legs')[1].get('id'))]) 1362 for (i, me) in beam_mirror_processes: 1363 final_mirror_id_list.extend([tuple([l.get('id') for l in \ 1364 proc.get_legs_with_decays() if l.get('state')]) \ 1365 for proc in me.get_mirror_processes() \ 1366 if beam_parts == \ 1367 (proc.get('legs')[0].get('id'), 1368 proc.get('legs')[1].get('id'))]) 1369 final_id_list = set(final_id_list) 1370 final_mirror_id_list = set(final_mirror_id_list) 1371 1372 if final_id_list and final_mirror_id_list or \ 1373 not final_id_list and not final_mirror_id_list: 1374 raise self.ProcessExporterCPPError,\ 1375 "Missing processes, or both process and mirror process" 1376 1377 1378 ncombs = len(final_id_list)+len(final_mirror_id_list) 1379 1380 res_lines.append("// Pick one of the flavor combinations %s" % \ 1381 ", ".join([repr(ids) for ids in final_id_list])) 1382 1383 me_weight = [] 1384 for final_ids in final_id_list: 1385 items = [(i, len([ p for p in me.get('processes') \ 1386 if [l.get('id') for l in \ 1387 p.get_legs_with_decays()] == \ 1388 list(beam_parts) + list(final_ids)])) \ 1389 for (i, me) in beam_processes] 1390 me_weight.append("+".join(["matrix_element[%i]*%i" % (i, l) for\ 1391 (i, l) in items if l > 0]).\ 1392 replace('*1', '')) 1393 if any([l>1 for (i, l) in items]): 1394 raise self.ProcessExporterCPPError,\ 1395 "More than one process with identical " + \ 1396 "external particles is not supported" 1397 1398 for final_ids in final_mirror_id_list: 1399 items = [(i, len([ p for p in me.get_mirror_processes() \ 1400 if [l.get('id') for l in p.get_legs_with_decays()] == \ 1401 list(beam_parts) + list(final_ids)])) \ 1402 for (i, me) in beam_mirror_processes] 1403 me_weight.append("+".join(["matrix_element[%i]*%i" % \ 1404 (i+len(self.matrix_elements), l) for\ 1405 (i, l) in items if l > 0]).\ 1406 replace('*1', '')) 1407 if any([l>1 for (i, l) in items]): 1408 raise self.ProcessExporterCPPError,\ 1409 "More than one process with identical " + \ 1410 "external particles is not supported" 1411 1412 if final_id_list: 1413 res_lines.append("int flavors[%d][%d] = {%s};" % \ 1414 (ncombs, self.nfinal, 1415 ",".join(["{" + ",".join([str(id) for id \ 1416 in ids]) + "}" for ids \ 1417 in final_id_list]))) 1418 elif final_mirror_id_list: 1419 res_lines.append("int flavors[%d][%d] = {%s};" % \ 1420 (ncombs, self.nfinal, 1421 ",".join(["{" + ",".join([str(id) for id \ 1422 in ids]) + "}" for ids \ 1423 in final_mirror_id_list]))) 1424 res_lines.append("vector<double> probs;") 1425 res_lines.append("double sum = %s;" % "+".join(me_weight)) 1426 for me in me_weight: 1427 res_lines.append("probs.push_back(%s/sum);" % me) 1428 res_lines.append("int choice = rndmPtr->pick(probs);") 1429 for i in range(self.nfinal): 1430 res_lines.append("id%d = flavors[choice][%d];" % (i+3, i)) 1431 1432 res_lines.append("}") 1433 1434 res_lines.append("setId(%s);" % ",".join(["id%d" % i for i in \ 1435 range(1, self.nexternal + 1)])) 1436 1437 # Now write a selection routine for color flows 1438 1439 # We need separate selection for each flavor combination, 1440 # since the different processes might have different color 1441 # structures. 1442 1443 # Here goes the color connections corresponding to the JAMPs 1444 # Only one output, for the first subproc! 1445 1446 res_lines.append("// Pick color flow") 1447 1448 res_lines.append("int ncolor[%d] = {%s};" % \ 1449 (len(color_amplitudes), 1450 ",".join([str(len(colamp)) for colamp in \ 1451 color_amplitudes]))) 1452 1453 1454 for ime, me in enumerate(self.matrix_elements): 1455 1456 res_lines.append("if((%s)){" % \ 1457 ")||(".join(["&&".join(["id%d == %d" % \ 1458 (i+1, l.get('id')) for (i, l) in \ 1459 enumerate(p.get_legs_with_decays())])\ 1460 for p in me.get('processes')])) 1461 if ime > 0: 1462 res_lines[-1] = "else " + res_lines[-1] 1463 1464 proc = me.get('processes')[0] 1465 if not me.get('color_basis'): 1466 # If no color basis, just output trivial color flow 1467 res_lines.append("setColAcol(%s);" % ",".join(["0"]*2*self.nfinal)) 1468 else: 1469 # Else, build a color representation dictionnary 1470 repr_dict = {} 1471 legs = proc.get_legs_with_decays() 1472 for l in legs: 1473 repr_dict[l.get('number')] = \ 1474 proc.get('model').get_particle(l.get('id')).get_color() 1475 # Get the list of color flows 1476 color_flow_list = \ 1477 me.get('color_basis').color_flow_decomposition(\ 1478 repr_dict, self.ninitial) 1479 # Select a color flow 1480 ncolor = len(me.get('color_basis')) 1481 res_lines.append("""vector<double> probs; 1482 double sum = %s; 1483 for(int i=0;i<ncolor[%i];i++) 1484 probs.push_back(jamp2[%i][i]/sum); 1485 int ic = rndmPtr->pick(probs);""" % \ 1486 ("+".join(["jamp2[%d][%d]" % (ime, i) for i \ 1487 in range(ncolor)]), ime, ime)) 1488 1489 color_flows = [] 1490 for color_flow_dict in color_flow_list: 1491 color_flows.append([int(fmod(color_flow_dict[l.get('number')][i], 500)) \ 1492 for (l,i) in itertools.product(legs, [0,1])]) 1493 1494 # Write out colors for the selected color flow 1495 res_lines.append("static int colors[%d][%d] = {%s};" % \ 1496 (ncolor, 2 * self.nexternal, 1497 ",".join(["{" + ",".join([str(id) for id \ 1498 in flows]) + "}" for flows \ 1499 in color_flows]))) 1500 1501 res_lines.append("setColAcol(%s);" % \ 1502 ",".join(["colors[ic][%d]" % i for i in \ 1503 range(2 * self.nexternal)])) 1504 res_lines.append('}') 1505 1506 # Same thing but for mirror processes 1507 for ime, me in enumerate(self.matrix_elements): 1508 if not me.get('has_mirror_process'): 1509 continue 1510 res_lines.append("else if((%s)){" % \ 1511 ")||(".join(["&&".join(["id%d == %d" % \ 1512 (i+1, l.get('id')) for (i, l) in \ 1513 enumerate(p.get_legs_with_decays())])\ 1514 for p in me.get_mirror_processes()])) 1515 1516 proc = me.get('processes')[0] 1517 if not me.get('color_basis'): 1518 # If no color basis, just output trivial color flow 1519 res_lines.append("setColAcol(%s);" % ",".join(["0"]*2*self.nfinal)) 1520 else: 1521 # Else, build a color representation dictionnary 1522 repr_dict = {} 1523 legs = proc.get_legs_with_decays() 1524 legs[0:2] = [legs[1],legs[0]] 1525 for l in legs: 1526 repr_dict[l.get('number')] = \ 1527 proc.get('model').get_particle(l.get('id')).get_color() 1528 # Get the list of color flows 1529 color_flow_list = \ 1530 me.get('color_basis').color_flow_decomposition(\ 1531 repr_dict, self.ninitial) 1532 # Select a color flow 1533 ncolor = len(me.get('color_basis')) 1534 res_lines.append("""vector<double> probs; 1535 double sum = %s; 1536 for(int i=0;i<ncolor[%i];i++) 1537 probs.push_back(jamp2[%i][i]/sum); 1538 int ic = rndmPtr->pick(probs);""" % \ 1539 ("+".join(["jamp2[%d][%d]" % (ime, i) for i \ 1540 in range(ncolor)]), ime, ime)) 1541 1542 color_flows = [] 1543 for color_flow_dict in color_flow_list: 1544 color_flows.append([color_flow_dict[l.get('number')][i] % 500 \ 1545 for (l,i) in itertools.product(legs, [0,1])]) 1546 1547 # Write out colors for the selected color flow 1548 res_lines.append("static int colors[%d][%d] = {%s};" % \ 1549 (ncolor, 2 * self.nexternal, 1550 ",".join(["{" + ",".join([str(id) for id \ 1551 in flows]) + "}" for flows \ 1552 in color_flows]))) 1553 1554 res_lines.append("setColAcol(%s);" % \ 1555 ",".join(["colors[ic][%d]" % i for i in \ 1556 range(2 * self.nexternal)])) 1557 res_lines.append('}') 1558 1559 return "\n".join(res_lines)
1560 1561
1562 - def get_weightDecay_lines(self):
1563 """Get weightDecay_lines for function definition for Pythia 8 .cc file""" 1564 1565 weightDecay_lines = "// Just use isotropic decay (default)\n" 1566 weightDecay_lines += "return 1.;" 1567 1568 return weightDecay_lines
1569 1570 #=============================================================================== 1571 # Routines to export/output UFO models in Pythia8 format 1572 #===============================================================================
1573 - def convert_model_to_pythia8(self, model, pythia_dir):
1574 """Create a full valid Pythia 8 model from an MG5 model (coming from UFO)""" 1575 1576 if not os.path.isfile(os.path.join(pythia_dir, 'include', 'Pythia.h'))\ 1577 and not os.path.isfile(os.path.join(pythia_dir, 'include', 'Pythia8', 'Pythia.h')): 1578 logger.warning('Directory %s is not a valid Pythia 8 main dir.' % pythia_dir) 1579 1580 # create the model parameter files 1581 model_builder = UFOModelConverterPythia8(model, pythia_dir, replace_dict=self.get_default_converter()) 1582 model_builder.cc_file_dir = "Processes_" + model_builder.model_name 1583 model_builder.include_dir = model_builder.cc_file_dir 1584 1585 model_builder.write_files() 1586 # Write makefile 1587 model_builder.write_makefile() 1588 # Write param_card 1589 model_builder.write_param_card() 1590 return model_builder.model_name, model_builder.cc_file_dir
1591
1592 1593 #=============================================================================== 1594 # ProcessExporterCPP 1595 #=============================================================================== 1596 -class ProcessExporterCPP(VirtualExporter):
1597 """Class to take care of exporting a set of matrix elements to 1598 Fortran (v4) format.""" 1599 1600 grouped_mode = False 1601 exporter = 'cpp' 1602 1603 default_opt = {'clean': False, 'complex_mass':False, 1604 'export_format':'madevent', 'mp': False, 1605 'v5_model': True 1606 } 1607 1608 oneprocessclass = OneProcessExporterCPP 1609 s= _file_path + 'iolibs/template_files/' 1610 from_template = {'src': [s+'rambo.h', s+'rambo.cc', s+'read_slha.h', s+'read_slha.cc'], 1611 'SubProcesses': [s+'check_sa.cpp']} 1612 to_link_in_P = ['check_sa.cpp', 'Makefile'] 1613 template_src_make = pjoin(_file_path, 'iolibs', 'template_files','Makefile_sa_cpp_src') 1614 template_Sub_make = template_src_make 1615 1616 1617
1618 - def __init__(self, dir_path = "", opt=None):
1619 """Initiate the ProcessExporterFortran with directory information""" 1620 self.mgme_dir = MG5DIR 1621 self.dir_path = dir_path 1622 self.model = None 1623 1624 self.opt = dict(self.default_opt) 1625 if opt: 1626 self.opt.update(opt) 1627 1628 #place holder to pass information to the run_interface 1629 self.proc_characteristic = banner_mod.ProcCharacteristic()
1630
1631 - def copy_template(self, model):
1632 """Prepare export_dir as standalone_cpp directory, including: 1633 src (for RAMBO, model and ALOHA files + makefile) 1634 lib (with compiled libraries from src) 1635 SubProcesses (with check_sa.cpp + makefile and Pxxxxx directories) 1636 """ 1637 1638 try: 1639 os.mkdir(self.dir_path) 1640 except os.error as error: 1641 logger.warning(error.strerror + " " + self.dir_path) 1642 1643 with misc.chdir(self.dir_path): 1644 logger.info('Creating subdirectories in directory %s' % self.dir_path) 1645 1646 for d in ['src', 'lib', 'Cards', 'SubProcesses']: 1647 try: 1648 os.mkdir(d) 1649 except os.error as error: 1650 logger.warning(error.strerror + " " + self.dir_path) 1651 1652 # Write param_card 1653 open(os.path.join("Cards","param_card.dat"), 'w').write(\ 1654 model.write_param_card()) 1655 1656 1657 # Copy the needed src files 1658 for key in self.from_template: 1659 for f in self.from_template[key]: 1660 cp(f, key) 1661 1662 # Copy src Makefile 1663 makefile = self.read_template_file('Makefile_sa_cpp_src') % \ 1664 {'model': self.get_model_name(model.get('name'))} 1665 open(os.path.join('src', 'Makefile'), 'w').write(makefile) 1666 1667 # Copy SubProcesses Makefile 1668 makefile = self.read_template_file('Makefile_sa_cpp_sp') % \ 1669 {'model': self.get_model_name(model.get('name'))} 1670 open(os.path.join('SubProcesses', 'Makefile'), 'w').write(makefile)
1671 1672 #=========================================================================== 1673 # Helper functions 1674 #===========================================================================
1675 - def modify_grouping(self, matrix_element):
1676 """allow to modify the grouping (if grouping is in place) 1677 return two value: 1678 - True/False if the matrix_element was modified 1679 - the new(or old) matrix element""" 1680 1681 return False, matrix_element
1682 1683 1684
1685 - def convert_model(self, model, wanted_lorentz = [], 1686 wanted_couplings = []):
1687 # create the model parameter files 1688 model_builder = UFOModelConverterCPP(model, 1689 os.path.join(self.dir_path, 'src'), 1690 wanted_lorentz, 1691 wanted_couplings) 1692 model_builder.write_files()
1693
1694 - def compile_model(self):
1695 make_model_cpp(self.dir_path)
1696 1697 @classmethod
1698 - def read_template_file(cls, *args, **opts):
1699 """Open a template file and return the contents.""" 1700 1701 return cls.oneprocessclass.read_template_file(*args, **opts)
1702 1703 #=============================================================================== 1704 # generate_subprocess_directory 1705 #===============================================================================
1706 - def generate_subprocess_directory(self, matrix_element, cpp_helas_call_writer, 1707 proc_number=None):
1708 """Generate the Pxxxxx directory for a subprocess in C++ standalone, 1709 including the necessary .h and .cc files""" 1710 1711 1712 process_exporter_cpp = self.oneprocessclass(matrix_element,cpp_helas_call_writer) 1713 1714 1715 # Create the directory PN_xx_xxxxx in the specified path 1716 dirpath = pjoin(self.dir_path, 'SubProcesses', "P%d_%s" % (process_exporter_cpp.process_number, 1717 process_exporter_cpp.process_name)) 1718 try: 1719 os.mkdir(dirpath) 1720 except os.error as error: 1721 logger.warning(error.strerror + " " + dirpath) 1722 1723 with misc.chdir(dirpath): 1724 logger.info('Creating files in directory %s' % dirpath) 1725 process_exporter_cpp.path = dirpath 1726 # Create the process .h and .cc files 1727 process_exporter_cpp.generate_process_files() 1728 for file in self.to_link_in_P: 1729 ln('../%s' % file) 1730 return
1731 1732 @staticmethod
1733 - def get_model_name(name):
1734 """Replace - with _, + with _plus_ in a model name.""" 1735 1736 name = name.replace('-', '_') 1737 name = name.replace('+', '_plus_') 1738 return name
1739
1740 - def finalize(self, *args, **opts):
1741 """ """ 1742 self.compile_model() 1743 pass
1744
1745 -class ProcessExporterMatchbox(ProcessExporterCPP):
1746 oneprocessclass = OneProcessExporterMatchbox
1747
1748 -class ProcessExporterPythia8(ProcessExporterCPP):
1749 oneprocessclass = OneProcessExporterPythia8 1750 grouped_mode = 'madevent' 1751 1752 #=============================================================================== 1753 # generate_process_files_pythia8 1754 #===============================================================================
1755 - def generate_process_directory(self, multi_matrix_element, cpp_helas_call_writer, 1756 process_string = "", 1757 process_number = 0, 1758 version='8.2'):
1759 1760 """Generate the .h and .cc files needed for Pythia 8, for the 1761 processes described by multi_matrix_element""" 1762 1763 process_exporter_pythia8 = OneProcessExporterPythia8(multi_matrix_element, 1764 cpp_helas_call_writer, 1765 process_string, 1766 process_number, 1767 self.dir_path, 1768 version=version) 1769 1770 # Set process directory 1771 model = process_exporter_pythia8.model 1772 model_name = process_exporter_pythia8.model_name 1773 process_exporter_pythia8.process_dir = \ 1774 'Processes_%(model)s' % {'model': \ 1775 model_name} 1776 process_exporter_pythia8.include_dir = process_exporter_pythia8.process_dir 1777 process_exporter_pythia8.generate_process_files() 1778 return process_exporter_pythia8
1779 1780 #=============================================================================== 1781 # generate_example_file_pythia8 1782 #=============================================================================== 1783 @staticmethod
1784 - def generate_example_file_pythia8(path, 1785 model_path, 1786 process_names, 1787 exporter, 1788 main_file_name = "", 1789 example_dir = "examples", 1790 version="8.2"):
1791 """Generate the main_model_name.cc file and Makefile in the examples dir""" 1792 1793 filepath = os.path.join(path, example_dir) 1794 if not os.path.isdir(filepath): 1795 os.makedirs(filepath) 1796 1797 replace_dict = {} 1798 1799 # Extract version number and date from VERSION file 1800 info_lines = get_mg5_info_lines() 1801 replace_dict['info_lines'] = info_lines 1802 1803 # Extract model name 1804 replace_dict['model_name'] = exporter.model_name 1805 1806 # Extract include line 1807 replace_dict['include_lines'] = \ 1808 "\n".join(["#include \"%s.h\"" % proc_name \ 1809 for proc_name in process_names]) 1810 1811 # Extract setSigmaPtr line 1812 replace_dict['sigma_pointer_lines'] = \ 1813 "\n".join(["pythia.setSigmaPtr(new %s());" % proc_name \ 1814 for proc_name in process_names]) 1815 1816 # Extract param_card path 1817 replace_dict['param_card'] = os.path.join(os.path.pardir,model_path, 1818 "param_card_%s.dat" % \ 1819 exporter.model_name) 1820 1821 # Create the example main file 1822 if version =="8.2": 1823 template_path = 'pythia8.2_main_example_cc.inc' 1824 makefile_path = 'pythia8.2_main_makefile.inc' 1825 replace_dict['include_prefix'] = 'Pythia8/' 1826 else: 1827 template_path = 'pythia8_main_example_cc.inc' 1828 makefile_path = 'pythia8_main_makefile.inc' 1829 replace_dict['include_prefix'] = '' 1830 1831 1832 file = ProcessExporterPythia8.read_template_file(template_path) % \ 1833 replace_dict 1834 1835 if not main_file_name: 1836 num = 1 1837 while os.path.exists(os.path.join(filepath, 1838 'main_%s_%i.cc' % (exporter.model_name, num))) or \ 1839 os.path.exists(os.path.join(filepath, 1840 'main_%s_%i' % (exporter.model_name, num))): 1841 num += 1 1842 main_file_name = str(num) 1843 1844 main_file = 'main_%s_%s' % (exporter.model_name, 1845 main_file_name) 1846 1847 main_filename = os.path.join(filepath, main_file + '.cc') 1848 1849 # Write the file 1850 writers.CPPWriter(main_filename).writelines(file) 1851 1852 replace_dict = {} 1853 1854 # Extract version number and date from VERSION file 1855 replace_dict['info_lines'] = get_mg5_info_lines() 1856 1857 replace_dict['main_file'] = main_file 1858 1859 replace_dict['process_dir'] = model_path 1860 1861 replace_dict['include_dir'] = exporter.include_dir 1862 1863 # Create the makefile 1864 file = ProcessExporterPythia8.read_template_file(makefile_path) % replace_dict 1865 1866 make_filename = os.path.join(filepath, 'Makefile_%s_%s' % \ 1867 (exporter.model_name, main_file_name)) 1868 1869 # Write the file 1870 open(make_filename, 'w').write(file) 1871 1872 logger.info("Created files %s and %s in directory %s" \ 1873 % (os.path.split(main_filename)[-1], 1874 os.path.split(make_filename)[-1], 1875 os.path.split(make_filename)[0])) 1876 return main_file, make_filename
1877
1878 - def convert_model(self,*args,**opts):
1879 pass
1880 - def finalize(self, *args, **opts):
1881 pass
1882
1883 -def get_mg5_info_lines():
1884 """Return info lines for MG5, suitable to place at beginning of 1885 Fortran files""" 1886 1887 info = misc.get_pkg_info() 1888 info_lines = "" 1889 if info and info.has_key('version') and info.has_key('date'): 1890 info_lines = "# MadGraph5_aMC@NLO v. %s, %s\n" % \ 1891 (info['version'], info['date']) 1892 info_lines = info_lines + \ 1893 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 1894 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 1895 else: 1896 info_lines = "# MadGraph5_aMC@NLO\n" + \ 1897 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 1898 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 1899 1900 return info_lines
1901
1902 -def coeff(ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1903 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1904 1905 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1906 1907 if total_coeff == 1: 1908 if is_imaginary: 1909 return '+std::complex<double>(0,1)*' 1910 else: 1911 return '+' 1912 elif total_coeff == -1: 1913 if is_imaginary: 1914 return '-std::complex<double>(0,1)*' 1915 else: 1916 return '-' 1917 1918 res_str = '%+i.' % total_coeff.numerator 1919 1920 if total_coeff.denominator != 1: 1921 # Check if total_coeff is an integer 1922 res_str = res_str + '/%i.' % total_coeff.denominator 1923 1924 if is_imaginary: 1925 res_str = res_str + '*std::complex<double>(0,1)' 1926 1927 return res_str + '*'
1928
1929 #=============================================================================== 1930 # UFOModelConverterCPP 1931 #=============================================================================== 1932 1933 -class UFOModelConverterCPP(object):
1934 """ A converter of the UFO-MG5 Model to the C++ format """ 1935 1936 # Static variables (for inheritance) 1937 output_name = 'C++ Standalone' 1938 namespace = 'MG5' 1939 1940 # Dictionary from Python type to C++ type 1941 type_dict = {"real": "double", 1942 "complex": "std::complex<double>"} 1943 1944 # Regular expressions for cleaning of lines from Aloha files 1945 compiler_option_re = re.compile('^#\w') 1946 namespace_re = re.compile('^using namespace') 1947 1948 slha_to_depend = {('SMINPUTS', (3,)): ('aS',), 1949 ('SMINPUTS', (1,)): ('aEM',)} 1950 1951 # Template files to use 1952 include_dir = '.' 1953 cc_file_dir = '.' 1954 param_template_h = 'cpp_model_parameters_h.inc' 1955 param_template_cc = 'cpp_model_parameters_cc.inc' 1956 aloha_template_h = 'cpp_hel_amps_h.inc' 1957 aloha_template_cc = 'cpp_hel_amps_cc.inc' 1958 1959 copy_include_files = [] 1960 copy_cc_files = [] 1961
1962 - def __init__(self, model, output_path, wanted_lorentz = [], 1963 wanted_couplings = [], replace_dict={}):
1964 """ initialization of the objects """ 1965 1966 self.model = model 1967 self.model_name = ProcessExporterCPP.get_model_name(model['name']) 1968 1969 self.dir_path = output_path 1970 self.default_replace_dict = dict(replace_dict) 1971 # List of needed ALOHA routines 1972 self.wanted_lorentz = wanted_lorentz 1973 1974 # For dependent couplings, only want to update the ones 1975 # actually used in each process. For other couplings and 1976 # parameters, just need a list of all. 1977 self.coups_dep = {} # name -> base_objects.ModelVariable 1978 self.coups_indep = [] # base_objects.ModelVariable 1979 self.params_dep = [] # base_objects.ModelVariable 1980 self.params_indep = [] # base_objects.ModelVariable 1981 self.p_to_cpp = parsers.UFOExpressionParserCPP() 1982 1983 # Prepare parameters and couplings for writeout in C++ 1984 self.prepare_parameters() 1985 self.prepare_couplings(wanted_couplings)
1986
1987 - def write_files(self):
1988 """Create all necessary files""" 1989 1990 # Write Helas Routines 1991 self.write_aloha_routines() 1992 1993 # Write parameter (and coupling) class files 1994 self.write_parameter_class_files()
1995 1996 # Routines for preparing parameters and couplings from the model 1997
1998 - def prepare_parameters(self):
1999 """Extract the parameters from the model, and store them in 2000 the two lists params_indep and params_dep""" 2001 2002 # Keep only dependences on alphaS, to save time in execution 2003 keys = self.model['parameters'].keys() 2004 keys.sort(key=len) 2005 params_ext = [] 2006 for key in keys: 2007 if key == ('external',): 2008 params_ext += [p for p in self.model['parameters'][key] if p.name] 2009 elif 'aS' in key: 2010 for p in self.model['parameters'][key]: 2011 self.params_dep.append(base_objects.ModelVariable(p.name, 2012 p.name + " = " + \ 2013 self.p_to_cpp.parse(p.expr) + ";", 2014 p.type, 2015 p.depend)) 2016 else: 2017 for p in self.model['parameters'][key]: 2018 if p.name == 'ZERO': 2019 continue 2020 self.params_indep.append(base_objects.ModelVariable(p.name, 2021 p.name + " = " + \ 2022 self.p_to_cpp.parse(p.expr) + ";", 2023 p.type, 2024 p.depend)) 2025 2026 # For external parameters, want to read off the SLHA block code 2027 while params_ext: 2028 param = params_ext.pop(0) 2029 # Read value from the slha variable 2030 expression = "" 2031 assert param.value.imag == 0 2032 if len(param.lhacode) == 1: 2033 expression = "%s = slha.get_block_entry(\"%s\", %d, %e);" % \ 2034 (param.name, param.lhablock.lower(), 2035 param.lhacode[0], param.value.real) 2036 elif len(param.lhacode) == 2: 2037 expression = "indices[0] = %d;\nindices[1] = %d;\n" % \ 2038 (param.lhacode[0], param.lhacode[1]) 2039 expression += "%s = slha.get_block_entry(\"%s\", indices, %e);" \ 2040 % (param.name, param.lhablock.lower(), param.value.real) 2041 else: 2042 raise MadGraph5Error("Only support for SLHA blocks with 1 or 2 indices") 2043 self.params_indep.insert(0, 2044 base_objects.ModelVariable(param.name, 2045 expression, 2046 'real'))
2047
2048 - def prepare_couplings(self, wanted_couplings = []):
2049 """Extract the couplings from the model, and store them in 2050 the two lists coups_indep and coups_dep""" 2051 2052 # Keep only dependences on alphaS, to save time in execution 2053 keys = self.model['couplings'].keys() 2054 keys.sort(key=len) 2055 for key, coup_list in self.model['couplings'].items(): 2056 if "aS" in key: 2057 for c in coup_list: 2058 if not wanted_couplings or c.name in wanted_couplings: 2059 self.coups_dep[c.name] = base_objects.ModelVariable(\ 2060 c.name, 2061 c.expr, 2062 c.type, 2063 c.depend) 2064 else: 2065 for c in coup_list: 2066 if not wanted_couplings or c.name in wanted_couplings: 2067 self.coups_indep.append(base_objects.ModelVariable(\ 2068 c.name, 2069 c.expr, 2070 c.type, 2071 c.depend)) 2072 2073 # Convert coupling expressions from Python to C++ 2074 for coup in self.coups_dep.values() + self.coups_indep: 2075 coup.expr = coup.name + " = " + self.p_to_cpp.parse(coup.expr) + ";"
2076 2077 # Routines for writing the parameter files 2078
2080 """Generate the parameters_model.h and parameters_model.cc 2081 files, which have the parameters and couplings for the model.""" 2082 2083 if not os.path.isdir(os.path.join(self.dir_path, self.include_dir)): 2084 os.makedirs(os.path.join(self.dir_path, self.include_dir)) 2085 if not os.path.isdir(os.path.join(self.dir_path, self.cc_file_dir)): 2086 os.makedirs(os.path.join(self.dir_path, self.cc_file_dir)) 2087 2088 parameter_h_file = os.path.join(self.dir_path, self.include_dir, 2089 'Parameters_%s.h' % self.model_name) 2090 parameter_cc_file = os.path.join(self.dir_path, self.cc_file_dir, 2091 'Parameters_%s.cc' % self.model_name) 2092 2093 file_h, file_cc = self.generate_parameters_class_files() 2094 2095 # Write the files 2096 writers.CPPWriter(parameter_h_file).writelines(file_h) 2097 writers.CPPWriter(parameter_cc_file).writelines(file_cc) 2098 2099 # Copy additional needed files 2100 for copy_file in self.copy_include_files: 2101 shutil.copy(os.path.join(_file_path, 'iolibs', 2102 'template_files',copy_file), 2103 os.path.join(self.dir_path, self.include_dir)) 2104 # Copy additional needed files 2105 for copy_file in self.copy_cc_files: 2106 shutil.copy(os.path.join(_file_path, 'iolibs', 2107 'template_files',copy_file), 2108 os.path.join(self.dir_path, self.cc_file_dir)) 2109 2110 logger.info("Created files %s and %s in directory" \ 2111 % (os.path.split(parameter_h_file)[-1], 2112 os.path.split(parameter_cc_file)[-1])) 2113 logger.info("%s and %s" % \ 2114 (os.path.split(parameter_h_file)[0], 2115 os.path.split(parameter_cc_file)[0]))
2116
2118 """Create the content of the Parameters_model.h and .cc files""" 2119 2120 replace_dict = self.default_replace_dict 2121 2122 replace_dict['info_lines'] = get_mg5_info_lines() 2123 replace_dict['model_name'] = self.model_name 2124 2125 replace_dict['independent_parameters'] = \ 2126 "// Model parameters independent of aS\n" + \ 2127 self.write_parameters(self.params_indep) 2128 replace_dict['independent_couplings'] = \ 2129 "// Model parameters dependent on aS\n" + \ 2130 self.write_parameters(self.params_dep) 2131 replace_dict['dependent_parameters'] = \ 2132 "// Model couplings independent of aS\n" + \ 2133 self.write_parameters(self.coups_indep) 2134 replace_dict['dependent_couplings'] = \ 2135 "// Model couplings dependent on aS\n" + \ 2136 self.write_parameters(self.coups_dep.values()) 2137 2138 replace_dict['set_independent_parameters'] = \ 2139 self.write_set_parameters(self.params_indep) 2140 replace_dict['set_independent_couplings'] = \ 2141 self.write_set_parameters(self.coups_indep) 2142 replace_dict['set_dependent_parameters'] = \ 2143 self.write_set_parameters(self.params_dep) 2144 replace_dict['set_dependent_couplings'] = \ 2145 self.write_set_parameters(self.coups_dep.values()) 2146 2147 replace_dict['print_independent_parameters'] = \ 2148 self.write_print_parameters(self.params_indep) 2149 replace_dict['print_independent_couplings'] = \ 2150 self.write_print_parameters(self.coups_indep) 2151 replace_dict['print_dependent_parameters'] = \ 2152 self.write_print_parameters(self.params_dep) 2153 replace_dict['print_dependent_couplings'] = \ 2154 self.write_print_parameters(self.coups_dep.values()) 2155 2156 if 'include_prefix' not in replace_dict: 2157 replace_dict['include_prefix'] = '' 2158 2159 2160 file_h = self.read_template_file(self.param_template_h) % \ 2161 replace_dict 2162 file_cc = self.read_template_file(self.param_template_cc) % \ 2163 replace_dict 2164 2165 return file_h, file_cc
2166
2167 - def write_parameters(self, params):
2168 """Write out the definitions of parameters""" 2169 2170 # Create a dictionary from parameter type to list of parameter names 2171 type_param_dict = {} 2172 2173 for param in params: 2174 type_param_dict[param.type] = \ 2175 type_param_dict.setdefault(param.type, []) + [param.name] 2176 2177 # For each parameter type, write out the definition string 2178 # type parameters; 2179 res_strings = [] 2180 for key in type_param_dict: 2181 res_strings.append("%s %s;" % (self.type_dict[key], 2182 ",".join(type_param_dict[key]))) 2183 2184 return "\n".join(res_strings)
2185
2186 - def write_set_parameters(self, params):
2187 """Write out the lines of independent parameters""" 2188 2189 # For each parameter, write name = expr; 2190 2191 res_strings = [] 2192 for param in params: 2193 res_strings.append("%s" % param.expr) 2194 2195 # Correct width sign for Majorana particles (where the width 2196 # and mass need to have the same sign) 2197 for particle in self.model.get('particles'): 2198 if particle.is_fermion() and particle.get('self_antipart') and \ 2199 particle.get('width').lower() != 'zero': 2200 res_strings.append("if (%s < 0)" % particle.get('mass')) 2201 res_strings.append("%(width)s = -abs(%(width)s);" % \ 2202 {"width": particle.get('width')}) 2203 2204 return "\n".join(res_strings)
2205
2206 - def write_print_parameters(self, params):
2207 """Write out the lines of independent parameters""" 2208 2209 # For each parameter, write name = expr; 2210 2211 res_strings = [] 2212 for param in params: 2213 res_strings.append("cout << setw(20) << \"%s \" << \"= \" << setiosflags(ios::scientific) << setw(10) << %s << endl;" % (param.name, param.name)) 2214 2215 return "\n".join(res_strings)
2216 2217 # Routines for writing the ALOHA files 2218
2219 - def write_aloha_routines(self):
2220 """Generate the hel_amps_model.h and hel_amps_model.cc files, which 2221 have the complete set of generalized Helas routines for the model""" 2222 2223 if not os.path.isdir(os.path.join(self.dir_path, self.include_dir)): 2224 os.makedirs(os.path.join(self.dir_path, self.include_dir)) 2225 if not os.path.isdir(os.path.join(self.dir_path, self.cc_file_dir)): 2226 os.makedirs(os.path.join(self.dir_path, self.cc_file_dir)) 2227 2228 model_h_file = os.path.join(self.dir_path, self.include_dir, 2229 'HelAmps_%s.h' % self.model_name) 2230 model_cc_file = os.path.join(self.dir_path, self.cc_file_dir, 2231 'HelAmps_%s.cc' % self.model_name) 2232 2233 replace_dict = {} 2234 2235 replace_dict['output_name'] = self.output_name 2236 replace_dict['info_lines'] = get_mg5_info_lines() 2237 replace_dict['namespace'] = self.namespace 2238 replace_dict['model_name'] = self.model_name 2239 2240 # Read in the template .h and .cc files, stripped of compiler 2241 # commands and namespaces 2242 template_h_files = self.read_aloha_template_files(ext = 'h') 2243 template_cc_files = self.read_aloha_template_files(ext = 'cc') 2244 2245 aloha_model = create_aloha.AbstractALOHAModel(self.model.get('name')) 2246 aloha_model.add_Lorentz_object(self.model.get('lorentz')) 2247 2248 if self.wanted_lorentz: 2249 aloha_model.compute_subset(self.wanted_lorentz) 2250 else: 2251 aloha_model.compute_all(save=False, custom_propa=True) 2252 2253 for abstracthelas in dict(aloha_model).values(): 2254 h_rout, cc_rout = abstracthelas.write(output_dir=None, language='CPP', 2255 mode='no_include') 2256 2257 template_h_files.append(h_rout) 2258 template_cc_files.append(cc_rout) 2259 2260 #aloha_writer = aloha_writers.ALOHAWriterForCPP(abstracthelas, 2261 # self.dir_path) 2262 #header = aloha_writer.define_header() 2263 #template_h_files.append(self.write_function_declaration(\ 2264 # aloha_writer, header)) 2265 #template_cc_files.append(self.write_function_definition(\ 2266 # aloha_writer, header)) 2267 2268 replace_dict['function_declarations'] = '\n'.join(template_h_files) 2269 replace_dict['function_definitions'] = '\n'.join(template_cc_files) 2270 2271 file_h = self.read_template_file(self.aloha_template_h) % replace_dict 2272 file_cc = self.read_template_file(self.aloha_template_cc) % replace_dict 2273 2274 # Write the files 2275 writers.CPPWriter(model_h_file).writelines(file_h) 2276 writers.CPPWriter(model_cc_file).writelines(file_cc) 2277 2278 logger.info("Created files %s and %s in directory" \ 2279 % (os.path.split(model_h_file)[-1], 2280 os.path.split(model_cc_file)[-1])) 2281 logger.info("%s and %s" % \ 2282 (os.path.split(model_h_file)[0], 2283 os.path.split(model_cc_file)[0]))
2284 2285
2286 - def read_aloha_template_files(self, ext):
2287 """Read all ALOHA template files with extension ext, strip them of 2288 compiler options and namespace options, and return in a list""" 2289 2290 template_files = [] 2291 for filename in misc.glob('*.%s' % ext, pjoin(MG5DIR, 'aloha','template_files')): 2292 file = open(filename, 'r') 2293 template_file_string = "" 2294 while file: 2295 line = file.readline() 2296 if len(line) == 0: break 2297 line = self.clean_line(line) 2298 if not line: 2299 continue 2300 template_file_string += line.strip() + '\n' 2301 template_files.append(template_file_string) 2302 2303 return template_files
2304 2305 # def write_function_declaration(self, aloha_writer, header): 2306 # """Write the function declaration for the ALOHA routine""" 2307 # 2308 # ret_lines = [] 2309 # for line in aloha_writer.write_h(header).split('\n'): 2310 # if self.compiler_option_re.match(line) or self.namespace_re.match(line): 2311 # # Strip out compiler flags and namespaces 2312 # continue 2313 # ret_lines.append(line) 2314 # return "\n".join(ret_lines) 2315 # 2316 # def write_function_definition(self, aloha_writer, header): 2317 # """Write the function definition for the ALOHA routine""" 2318 # 2319 # ret_lines = [] 2320 # for line in aloha_writer.write_cc(header).split('\n'): 2321 # if self.compiler_option_re.match(line) or self.namespace_re.match(line): 2322 # # Strip out compiler flags and namespaces 2323 # continue 2324 # ret_lines.append(line) 2325 # return "\n".join(ret_lines) 2326
2327 - def clean_line(self, line):
2328 """Strip a line of compiler options and namespace options.""" 2329 2330 if self.compiler_option_re.match(line) or self.namespace_re.match(line): 2331 return "" 2332 2333 return line
2334 2335 #=============================================================================== 2336 # Global helper methods 2337 #=============================================================================== 2338 @classmethod
2339 - def read_template_file(cls, filename, classpath=False):
2340 """Open a template file and return the contents.""" 2341 2342 return OneProcessExporterCPP.read_template_file(filename, classpath)
2343
2344 2345 #=============================================================================== 2346 # UFOModelConverterPythia8 2347 #=============================================================================== 2348 2349 -class UFOModelConverterPythia8(UFOModelConverterCPP):
2350 """ A converter of the UFO-MG5 Model to the Pythia 8 format """ 2351 2352 # Static variables (for inheritance) 2353 output_name = 'Pythia 8' 2354 namespace = 'Pythia8' 2355 2356 # Dictionaries for expression of MG5 SM parameters into Pythia 8 2357 slha_to_expr = {('SMINPUTS', (1,)): '1./csm->alphaEM(((pd->m0(23))*(pd->m0(23))))', 2358 ('SMINPUTS', (2,)): 'M_PI*csm->alphaEM(((pd->m0(23))*(pd->m0(23))))*((pd->m0(23))*(pd->m0(23)))/(sqrt(2.)*((pd->m0(24))*(pd->m0(24)))*(((pd->m0(23))*(pd->m0(23)))-((pd->m0(24))*(pd->m0(24)))))', 2359 ('SMINPUTS', (3,)): 'alpS', 2360 ('CKMBLOCK', (1,)): 'csm->VCKMgen(1,2)', 2361 } 2362 2363 # Template files to use 2364 param_template_h = 'pythia8_model_parameters_h.inc' 2365 param_template_cc = 'pythia8_model_parameters_cc.inc' 2366 template_paths = os.path.join(_file_path, 'iolibs', 'template_files', 'pythia8') 2367
2368 - def prepare_parameters(self):
2369 """Extract the model parameters from Pythia 8, and store them in 2370 the two lists params_indep and params_dep""" 2371 2372 # Keep only dependences on alphaS, to save time in execution 2373 keys = self.model['parameters'].keys() 2374 keys.sort(key=len) 2375 params_ext = [] 2376 for key in keys: 2377 if key == ('external',): 2378 params_ext += [p for p in self.model['parameters'][key] if p.name] 2379 elif 'aS' in key: 2380 for p in self.model['parameters'][key]: 2381 self.params_dep.append(base_objects.ModelVariable(p.name, 2382 p.name + " = " + \ 2383 self.p_to_cpp.parse(p.expr) + ';', 2384 p.type, 2385 p.depend)) 2386 else: 2387 for p in self.model['parameters'][key]: 2388 self.params_indep.append(base_objects.ModelVariable(p.name, 2389 p.name + " = " + \ 2390 self.p_to_cpp.parse(p.expr) + ';', 2391 p.type, 2392 p.depend)) 2393 2394 # For external parameters, want to use the internal Pythia 2395 # parameters for SM params and masses and widths. For other 2396 # parameters, want to read off the SLHA block code 2397 while params_ext: 2398 param = params_ext.pop(0) 2399 key = (param.lhablock, tuple(param.lhacode)) 2400 if 'aS' in self.slha_to_depend.setdefault(key, ()): 2401 # This value needs to be set event by event 2402 self.params_dep.insert(0, 2403 base_objects.ModelVariable(param.name, 2404 param.name + ' = ' + \ 2405 self.slha_to_expr[key] + ';', 2406 'real')) 2407 else: 2408 try: 2409 # This is an SM parameter defined above 2410 self.params_indep.insert(0, 2411 base_objects.ModelVariable(param.name, 2412 param.name + ' = ' + \ 2413 self.slha_to_expr[key] + ';', 2414 'real')) 2415 except Exception: 2416 # For Yukawa couplings, masses and widths, insert 2417 # the Pythia 8 value 2418 if param.lhablock == 'YUKAWA': 2419 self.slha_to_expr[key] = 'pd->mRun(%i, pd->m0(24))' \ 2420 % param.lhacode[0] 2421 if param.lhablock == 'MASS': 2422 self.slha_to_expr[key] = 'pd->m0(%i)' \ 2423 % param.lhacode[0] 2424 if param.lhablock == 'DECAY': 2425 self.slha_to_expr[key] = \ 2426 'pd->mWidth(%i)' % param.lhacode[0] 2427 if key in self.slha_to_expr: 2428 self.params_indep.insert(0,\ 2429 base_objects.ModelVariable(param.name, 2430 param.name + "=" + self.slha_to_expr[key] \ 2431 + ';', 2432 'real')) 2433 else: 2434 # This is a BSM parameter which is read from SLHA 2435 if len(param.lhacode) == 1: 2436 expression = "if(!slhaPtr->getEntry<double>(\"%s\", %d, %s)){\n" % \ 2437 (param.lhablock.lower(), 2438 param.lhacode[0], 2439 param.name) + \ 2440 ("cout << \"Warning, setting %s to %e\" << endl;\n" \ 2441 + "%s = %e;}") % (param.name, param.value.real, 2442 param.name, param.value.real) 2443 elif len(param.lhacode) == 2: 2444 expression = "if(!slhaPtr->getEntry<double>(\"%s\", %d, %d, %s)){\n" % \ 2445 (param.lhablock.lower(), 2446 param.lhacode[0], 2447 param.lhacode[1], 2448 param.name) + \ 2449 ("cout << \"Warning, setting %s to %e\" << endl;\n" \ 2450 + "%s = %e;}") % (param.name, param.value.real, 2451 param.name, param.value.real) 2452 elif len(param.lhacode) == 3: 2453 expression = "if(!slhaPtr->getEntry<double>(\"%s\", %d, %d, %d, %s)){\n" % \ 2454 (param.lhablock.lower(), 2455 param.lhacode[0], 2456 param.lhacode[1], 2457 param.lhacode[2], 2458 param.name) + \ 2459 ("cout << \"Warning, setting %s to %e\" << endl;\n" \ 2460 + "%s = %e;}") % (param.name, param.value.real, 2461 param.name, param.value.real) 2462 else: 2463 raise MadGraph5Error("Only support for SLHA blocks with 1 or 2 indices") 2464 self.params_indep.insert(0, 2465 base_objects.ModelVariable(param.name, 2466 expression, 2467 'real'))
2468
2469 - def write_makefile(self):
2470 """Generate the Makefile, which creates library files.""" 2471 2472 makefilename = os.path.join(self.dir_path, self.cc_file_dir, 2473 'Makefile') 2474 2475 replace_dict = {} 2476 2477 replace_dict['info_lines'] = get_mg5_info_lines() 2478 replace_dict['model'] = self.model_name 2479 2480 if self.default_replace_dict['version'] == "8.2": 2481 path = 'pythia8.2_makefile.inc' 2482 else: 2483 path = 'pythia8_makefile.inc' 2484 makefile = self.read_template_file(path) % replace_dict 2485 2486 # Write the files 2487 open(makefilename, 'w').write(makefile) 2488 2489 logger.info("Created %s in directory %s" \ 2490 % (os.path.split(makefilename)[-1], 2491 os.path.split(makefilename)[0]))
2492
2493 - def write_param_card(self):
2494 """Generate the param_card for the model.""" 2495 2496 paramcardname = os.path.join(self.dir_path, self.cc_file_dir, 2497 'param_card_%s.dat' % self.model_name) 2498 # Write out param_card 2499 open(paramcardname, 'w').write(\ 2500 self.model.write_param_card()) 2501 2502 logger.info("Created %s in directory %s" \ 2503 % (os.path.split(paramcardname)[-1], 2504 os.path.split(paramcardname)[0]))
2505 2506 #=============================================================================== 2507 # Global helper methods 2508 #=============================================================================== 2509 @classmethod
2510 - def read_template_file(cls, *args, **opts):
2511 """Open a template file and return the contents.""" 2512 2513 return OneProcessExporterPythia8.read_template_file(*args, **opts)
2514
2515 -def ExportCPPFactory(cmd, group_subprocesses=False, cmd_options={}):
2516 """ Determine which Export class is required. cmd is the command 2517 interface containing all potential usefull information. 2518 """ 2519 2520 opt = dict(cmd.options) 2521 opt['output_options'] = cmd_options 2522 cformat = cmd._export_format 2523 2524 if cformat == 'pythia8': 2525 return ProcessExporterPythia8(cmd._export_dir, opt) 2526 elif cformat == 'standalone_cpp': 2527 return ProcessExporterCPP(cmd._export_dir, opt) 2528 elif cformat == 'matchbox_cpp': 2529 return ProcessExporterMatchbox(cmd._export_dir, opt) 2530 elif cformat == 'plugin': 2531 return cmd._export_plugin(cmd._export_dir, opt)
2532