Package aloha :: Module create_aloha
[hide private]
[frames] | no frames]

Source Code for Module aloha.create_aloha

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2010 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  from __future__ import division 
  16  import cmath 
  17  import copy 
  18  import cPickle 
  19  import glob 
  20  import logging 
  21  import numbers 
  22  import os 
  23  import re 
  24  import shutil 
  25  import sys 
  26  import time 
  27  from madgraph.interface.tutorial_text import output 
  28   
  29  root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0] 
  30  sys.path.append(root_path) 
  31  from aloha.aloha_object import * 
  32  import aloha 
  33  import aloha.aloha_writers as aloha_writers 
  34  import aloha.aloha_lib as aloha_lib 
  35  import aloha.aloha_object as aloha_object 
  36  import aloha.aloha_parsers as aloha_parsers 
  37  import aloha.aloha_fct as aloha_fct 
  38  try: 
  39      import madgraph.iolibs.files as files 
  40      import madgraph.various.misc as misc 
  41  except Exception: 
  42      import aloha.files as files 
  43      import aloha.misc as misc 
  44       
  45  aloha_path = os.path.dirname(os.path.realpath(__file__)) 
  46  logger = logging.getLogger('ALOHA') 
  47   
  48  _conjugate_gap = 50 
  49  _spin2_mult = 1000 
  50   
  51  pjoin = os.path.join 
  52   
  53  ALOHAERROR = aloha.ALOHAERROR 
54 55 -class AbstractRoutine(object):
56 """ store the result of the computation of Helicity Routine 57 this is use for storing and passing to writer """ 58
59 - def __init__(self, expr, outgoing, spins, name, infostr, denom=None):
60 """ store the information """ 61 62 self.spins = spins 63 self.expr = expr 64 self.denominator = denom 65 self.name = name 66 self.outgoing = outgoing 67 self.infostr = infostr 68 self.symmetries = [] 69 self.combined = [] 70 self.tag = [] 71 self.contracted = {}
72 73 74
75 - def add_symmetry(self, outgoing):
76 """ add an outgoing """ 77 78 if not outgoing in self.symmetries: 79 self.symmetries.append(outgoing)
80
81 - def add_combine(self, lor_list):
82 """add a combine rule """ 83 84 if lor_list not in self.combined: 85 self.combined.append(lor_list)
86
87 - def write(self, output_dir, language='Fortran', mode='self', combine=True,**opt):
88 """ write the content of the object """ 89 writer = aloha_writers.WriterFactory(self, language, output_dir, self.tag) 90 text = writer.write(mode=mode, **opt) 91 if combine: 92 for grouped in self.combined: 93 if isinstance(text, tuple): 94 text = tuple([old.__add__(new) for old, new in zip(text, 95 writer.write_combined(grouped, mode=mode+'no_include', **opt))]) 96 else: 97 text += writer.write_combined(grouped, mode=mode+'no_include', **opt) 98 if aloha.mp_precision and 'MP' not in self.tag: 99 self.tag.append('MP') 100 text += self.write(output_dir, language, mode, **opt) 101 return text
102
103 - def get_info(self, info):
104 """return some information on the routine 105 """ 106 if info == "rank": 107 assert isinstance(self.expr, aloha_lib.SplitCoefficient) 108 rank= 1 109 for coeff in self.expr: 110 rank = max(sum(coeff), rank) 111 return rank -1 # due to the coefficient associate to the wavefunctions 112 else: 113 raise ALOHAERROR, '%s is not a valid information that can be computed' % info
114
115 116 -class AbstractRoutineBuilder(object):
117 """ Launch the creation of the Helicity Routine""" 118 119 prop_lib = {} # Store computation for the propagator 120 counter = 0 # counter for statistic only 121
122 - class AbstractALOHAError(Exception):
123 """ An error class for ALOHA"""
124
125 - def __init__(self, lorentz, model=None):
126 """ initialize the run 127 lorentz: the lorentz information analyzed (UFO format) 128 language: define in which language we write the output 129 modes: 0 for all incoming particles 130 >0 defines the outgoing part (start to count at 1) 131 """ 132 133 self.spins = [s for s in lorentz.spins] 134 self.name = lorentz.name 135 self.conjg = [] 136 self.tag = [] 137 self.outgoing = None 138 self.lorentz_expr = lorentz.structure 139 self.routine_kernel = None 140 self.spin2_massless = False 141 self.spin32_massless = False 142 self.contracted = {} 143 self.fct = {} 144 self.model = model 145 self.denominator = None 146 # assert model 147 148 self.lastprint = 0 # to avoid that ALOHA makes too many printout 149 150 if hasattr(lorentz, 'formfactors') and lorentz.formfactors: 151 for formf in lorentz.formfactors: 152 pat = re.compile(r'\b%s\b' % formf.name) 153 self.lorentz_expr = pat.sub('(%s)' % formf.value, self.lorentz_expr)
154
155 - def compute_routine(self, mode, tag=[], factorize=True):
156 """compute the expression and return it""" 157 self.outgoing = mode 158 self.tag = tag 159 if __debug__: 160 if mode == 0: 161 assert not any(t.startswith('L') for t in tag) 162 self.expr = self.compute_aloha_high_kernel(mode, factorize) 163 return self.define_simple_output()
164
165 - def define_all_conjugate_builder(self, pair_list):
166 """ return the full set of AbstractRoutineBuilder linked to fermion 167 clash""" 168 169 solution = [] 170 171 for i, pair in enumerate(pair_list): 172 new_builder = self.define_conjugate_builder(pair) 173 solution.append(new_builder) 174 solution += new_builder.define_all_conjugate_builder(pair_list[i+1:]) 175 return solution
176
177 - def define_conjugate_builder(self, pairs=1):
178 """ return a AbstractRoutineBuilder for the conjugate operation. 179 If they are more than one pair of fermion. Then use pair to claim which 180 one is conjugated""" 181 182 new_builder = copy.copy(self) 183 new_builder.conjg = self.conjg[:] 184 try: 185 for index in pairs: 186 new_builder.apply_conjugation(index) 187 except TypeError: 188 new_builder.apply_conjugation(pairs) 189 return new_builder
190
191 - def apply_conjugation(self, pair=1):
192 """ apply conjugation on self object""" 193 194 nb_fermion = len([1 for s in self.spins if s % 2 == 0]) 195 if isinstance(pair, tuple): 196 if len(pair) ==1 : 197 pair = pair[0] 198 else: 199 raise Exception 200 201 202 if (pair > 1 or nb_fermion >2) and not self.conjg: 203 # self.conjg avoif multiple check 204 data = aloha_fct.get_fermion_flow(self.lorentz_expr, nb_fermion) 205 target = dict([(2*i+1,2*i+2) for i in range(nb_fermion//2)]) 206 if not data == target: 207 text = """Unable to deal with 4(or more) point interactions 208 in presence of majorana particle/flow violation""" 209 raise ALOHAERROR, text 210 211 old_id = 2 * pair - 1 212 new_id = _conjugate_gap + old_id 213 214 self.kernel_tag = set() 215 aloha_lib.KERNEL.use_tag = set() 216 if not self.routine_kernel or isinstance(self.routine_kernel, str): 217 self.routine_kernel = eval(self.parse_expression(self.lorentz_expr)) 218 self.kernel_tag = aloha_lib.KERNEL.use_tag 219 # We need to compute C Gamma^T C^-1 = C_ab G_cb (-1) C_cd 220 # = C_ac G_bc (-1) C_bd = C_ac G_bc C_db 221 self.routine_kernel = \ 222 C(new_id, old_id + 1) * self.routine_kernel * C(new_id + 1, old_id) 223 224 self.lorentz_expr = '('+self.lorentz_expr+') * C(%s,%s) * C(%s,%s)' % \ 225 (new_id, old_id + 1, new_id + 1, old_id ) 226 227 self.conjg.append(pair)
228 229
230 - def define_simple_output(self):
231 """ define a simple output for this AbstractRoutine """ 232 233 infostr = str(self.lorentz_expr) 234 235 output = AbstractRoutine(self.expr, self.outgoing, self.spins, self.name, \ 236 infostr, self.denominator) 237 output.contracted = dict([(name, aloha_lib.KERNEL.reduced_expr2[name]) 238 for name in aloha_lib.KERNEL.use_tag 239 if name.startswith('TMP')]) 240 241 output.fct = dict([(name, aloha_lib.KERNEL.reduced_expr2[name]) 242 for name in aloha_lib.KERNEL.use_tag 243 if name.startswith('FCT')]) 244 245 output.tag = [t for t in self.tag if not t.startswith('C')] 246 output.tag += ['C%s' % pair for pair in self.conjg] 247 return output
248
249 - def parse_expression(self, expr=None, need_P_sign=False):
250 """change the sign of P for outcoming fermion in order to 251 correct the mismatch convention between HELAS and FR""" 252 253 if not expr: 254 expr = self.lorentz_expr 255 256 if need_P_sign: 257 expr = re.sub(r'\b(P|PSlash)\(', r'-\1(', expr) 258 259 calc = aloha_parsers.ALOHAExpressionParser() 260 lorentz_expr = calc.parse(expr) 261 return lorentz_expr
262
263 - def compute_aloha_high_kernel(self, mode, factorize=True):
264 """compute the abstract routine associate to this mode """ 265 266 # reset tag for particles 267 aloha_lib.KERNEL.use_tag=set() 268 #multiply by the wave functions 269 nb_spinor = 0 270 outgoing = self.outgoing 271 if (outgoing + 1) // 2 in self.conjg: 272 #flip the outgoing tag if in conjugate 273 outgoing = outgoing + outgoing % 2 - (outgoing +1) % 2 274 275 if not self.routine_kernel: 276 AbstractRoutineBuilder.counter += 1 277 if self.tag == []: 278 logger.debug( 'aloha creates %s routines', self.name) 279 else: 280 logger.debug('aloha creates %s set of routines with options: %s' \ 281 % (self.name, ','.join(self.tag)) ) 282 try: 283 lorentz = self.parse_expression() 284 self.routine_kernel = lorentz 285 lorentz = eval(lorentz) 286 except NameError as error: 287 logger.error('unknow type in Lorentz Evaluation:%s'%str(error)) 288 raise ALOHAERROR, 'unknow type in Lorentz Evaluation: %s ' % str(error) 289 else: 290 self.kernel_tag = set(aloha_lib.KERNEL.use_tag) 291 elif isinstance(self.routine_kernel,str): 292 lorentz = eval(self.routine_kernel) 293 aloha_lib.KERNEL.use_tag = set(self.kernel_tag) 294 else: 295 lorentz = copy.copy(self.routine_kernel) 296 aloha_lib.KERNEL.use_tag = set(self.kernel_tag) 297 for (i, spin ) in enumerate(self.spins): 298 id = i + 1 299 #Check if this is the outgoing particle 300 if id == outgoing: 301 302 # check if we need a special propagator 303 propa = [t[1:] for t in self.tag if t.startswith('P')] 304 if propa == ['0']: 305 massless = True 306 self.denominator = None 307 elif propa == []: 308 massless = False 309 self.denominator = None 310 else: 311 lorentz *= complex(0,1) * self.get_custom_propa(propa[0], spin, id) 312 continue 313 314 315 316 if spin in [1,-1]: 317 lorentz *= complex(0,1) 318 elif spin == 2: 319 # shift and flip the tag if we multiply by C matrices 320 if (id + 1) // 2 in self.conjg: 321 id += _conjugate_gap + id % 2 - (id +1) % 2 322 if (id % 2): 323 #propagator outcoming 324 lorentz *= complex(0,1) * SpinorPropagatorout(id, 'I2', outgoing) 325 else: 326 # #propagator incoming 327 lorentz *= complex(0,1) * SpinorPropagatorin('I2', id, outgoing) 328 elif spin == 3 : 329 if massless or not aloha.unitary_gauge: 330 lorentz *= VectorPropagatorMassless(id, 'I2', id) 331 else: 332 lorentz *= VectorPropagator(id, 'I2', id) 333 elif spin == 4: 334 # shift and flip the tag if we multiply by C matrices 335 if (id + 1) // 2 in self.conjg: 336 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2 337 else: 338 spin_id = id 339 nb_spinor += 1 340 if not massless and (spin_id % 2): 341 lorentz *= complex(0,1) * Spin3halfPropagatorout(id, 'I2', spin_id,'I3', outgoing) 342 elif not massless and not (spin_id % 2): 343 lorentz *= complex(0,1) * Spin3halfPropagatorin('I2', id , 'I3', spin_id, outgoing) 344 elif spin_id %2: 345 lorentz *= complex(0,1) * Spin3halfPropagatorMasslessOut(id, 'I2', spin_id,'I3', outgoing) 346 else : 347 lorentz *= complex(0,1) * Spin3halfPropagatorMasslessIn('I2', id, 'I3', spin_id, outgoing) 348 349 elif spin == 5 : 350 #lorentz *= 1 # delayed evaluation (fastenize the code) 351 if massless: 352 lorentz *= complex(0,1) * Spin2masslessPropagator(_spin2_mult + id, \ 353 2 * _spin2_mult + id,'I2','I3') 354 else: 355 lorentz *= complex(0,1) * Spin2Propagator(_spin2_mult + id, \ 356 2 * _spin2_mult + id,'I2','I3', id) 357 else: 358 raise self.AbstractALOHAError( 359 'The spin value %s (2s+1) is not supported yet' % spin) 360 else: 361 # This is an incoming particle 362 if spin in [1,-1]: 363 lorentz *= Scalar(id) 364 elif spin == 2: 365 # shift the tag if we multiply by C matrices 366 if (id+1) // 2 in self.conjg: 367 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2 368 else: 369 spin_id = id 370 lorentz *= Spinor(spin_id, id) 371 elif spin == 3: 372 lorentz *= Vector(id, id) 373 elif spin == 4: 374 # shift the tag if we multiply by C matrices 375 if (id+1) // 2 in self.conjg: 376 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2 377 else: 378 spin_id = id 379 nb_spinor += 1 380 lorentz *= Spin3Half(id, spin_id, id) 381 elif spin == 5: 382 lorentz *= Spin2(1 * _spin2_mult + id, 2 * _spin2_mult + id, id) 383 else: 384 raise self.AbstractALOHAError( 385 'The spin value %s (2s+1) is not supported yet' % spin) 386 387 # If no particle OffShell 388 if not outgoing: 389 lorentz *= complex(0,-1) 390 # Propagator are taken care separately 391 392 lorentz = lorentz.simplify() 393 394 # Modify the expression in case of loop-pozzorini 395 if any((tag.startswith('L') for tag in self.tag if len(tag)>1)): 396 return self.compute_loop_coefficient(lorentz, outgoing) 397 398 lorentz = lorentz.expand() 399 lorentz = lorentz.simplify() 400 401 if factorize: 402 lorentz = lorentz.factorize() 403 404 lorentz.tag = set(aloha_lib.KERNEL.use_tag) 405 return lorentz
406 407 @staticmethod
408 - def mod_propagator_expression(tag, text):
409 """Change the index of the propagator to match the current need""" 410 411 data = re.split(r'(\b[a-zA-Z]\w*?)\(([\'\w,\s\"\+\-]*?)\)',text) 412 to_change = {} 413 for old, new in tag.items(): 414 if isinstance(new, str): 415 new='\'%s\'' % new 416 else: 417 new = str(new) 418 to_change[r'%s' % old] = new 419 pos=-2 420 while pos +3 < len(data): 421 pos = pos+3 422 ltype = data[pos] 423 if ltype != 'complex': 424 data[pos+1] = re.sub(r'\b(?<!-)(%s)\b' % '|'.join(to_change), 425 lambda x: to_change[x.group()], data[pos+1]) 426 data[pos+1] = '(%s)' % data[pos+1] 427 text=''.join(data) 428 return text
429
430 - def get_custom_propa(self, propa, spin, id):
431 """Return the ALOHA object associated to the user define propagator""" 432 433 propagator = getattr(self.model.propagators, propa) 434 numerator = propagator.numerator 435 denominator = propagator.denominator 436 437 # Find how to make the replacement for the various tag in the propagator expression 438 needPflipping = False 439 if spin in [1,-1]: 440 tag = {'id': id} 441 elif spin == 2: 442 # shift and flip the tag if we multiply by C matrices 443 if (id + 1) // 2 in self.conjg: 444 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2 445 else: 446 spin_id = id 447 if (spin_id % 2): 448 #propagator outcoming 449 needPflipping = True 450 tag ={'1': spin_id, '2': 'I2', 'id': id} 451 else: 452 tag ={'1': 'I2', '2': spin_id, 'id': id} 453 elif spin == 3 : 454 tag ={'1': id, '2': 'I2', 'id': id} 455 elif spin == 4: 456 delta = lambda i,j: aloha_object.Identity(i,j) 457 deltaL = lambda i,j: aloha_object.IdentityL(i,j) 458 # shift and flip the tag if we multiply by C matrices 459 if (id + 1) // 2 in self.conjg: 460 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2 461 else: 462 spin_id = id 463 tag = {'1': 'pr1', '2': 'pr2', 'id':id} 464 if spin_id % 2: 465 needPflipping = True 466 # propaR is needed to do the correct contraction since we need to distinguish spin from lorentz index 467 propaR = deltaL('pr1',id) * deltaL('pr2', 'I2') * delta('pr1', spin_id) * delta('pr2', 'I3') 468 else: 469 propaR = deltaL('pr1',id) * deltaL('pr2', 'I2') * delta('pr2', spin_id) * delta('pr1', 'I3') 470 #numerator += "*deltaL('pr_1',id) * deltaL('pr_2', 'I2') * delta('pr_1', spin_id) * delta('pr_2', 'I3')" 471 elif spin == 5 : 472 tag = {'1': _spin2_mult + id, '2': 'I2', 473 '51': 2 * _spin2_mult + id, '52': 'I3', 'id':id} 474 475 numerator = self.mod_propagator_expression(tag, numerator) 476 if denominator: 477 denominator = self.mod_propagator_expression(tag, denominator) 478 numerator = self.parse_expression(numerator, needPflipping) 479 480 if denominator: 481 self.denominator = self.parse_expression(denominator, needPflipping) 482 self.denominator = eval(self.denominator) 483 if not isinstance(self.denominator, numbers.Number): 484 self.denominator = self.denominator.simplify().expand().simplify().get((0,)) 485 486 if spin ==4: 487 return eval(numerator) * propaR 488 else: 489 return eval(numerator)
490 491 492 493
494 - def compute_loop_coefficient(self, lorentz, outgoing):
495 496 497 l_in = [int(tag[1:]) for tag in self.tag if tag.startswith('L')][0] 498 if (l_in + 1) // 2 in self.conjg: 499 #flip the outgoing tag if in conjugate 500 l_in = l_in + l_in % 2 - (l_in +1) % 2 501 assert l_in != outgoing, 'incoming Open Loop can not be the outcoming one' 502 503 # modify the expression for the momenta 504 # P_i -> P_i + P_L and P_o -> -P_o - P_L 505 Pdep = [aloha_lib.KERNEL.get(P) for P in lorentz.get_all_var_names() 506 if P.startswith('_P')] 507 508 Pdep = set([P for P in Pdep if P.particle in [outgoing, l_in]]) 509 for P in Pdep: 510 if P.particle == l_in: 511 sign = 1 512 else: 513 sign = -1 514 id = P.id 515 lorentz_ind = P.lorentz_ind[0] 516 P_Lid = aloha_object.P(lorentz_ind, 'L') 517 P_obj = aloha_object.P(lorentz_ind, P.particle) 518 new_expr = sign*(P_Lid + P_obj) 519 lorentz = lorentz.replace(id, new_expr) 520 521 # Compute the variable from which we need to split the expression 522 var_veto = ['PL_0', 'PL_1', 'PL_2', 'PL_3'] 523 spin = aloha_writers.WriteALOHA.type_to_variable[abs(self.spins[l_in-1])] 524 size = aloha_writers.WriteALOHA.type_to_size[spin]-1 525 var_veto += ['%s%s_%s' % (spin,l_in,i) for i in range(1,size)] 526 # compute their unique identifiant 527 veto_ids = aloha_lib.KERNEL.get_ids(var_veto) 528 529 lorentz = lorentz.expand(veto = veto_ids) 530 lorentz = lorentz.simplify() 531 coeff_expr = lorentz.split(veto_ids) 532 533 for key, expr in coeff_expr.items(): 534 expr = expr.simplify() 535 coeff_expr[key] = expr.factorize() 536 coeff_expr.tag = set(aloha_lib.KERNEL.use_tag) 537 538 return coeff_expr
539
540 - def define_lorentz_expr(self, lorentz_expr):
541 """Define the expression""" 542 543 self.expr = lorentz_expr
544
545 - def define_routine_kernel(self, lorentz=None):
546 """Define the kernel at low level""" 547 548 if not lorentz: 549 logger.info('compute kernel %s' % self.counter) 550 AbstractRoutineBuilder.counter += 1 551 lorentz = eval(self.lorentz_expr) 552 553 if isinstance(lorentz, numbers.Number): 554 self.routine_kernel = lorentz 555 return lorentz 556 lorentz = lorentz.simplify() 557 lorentz = lorentz.expand() 558 lorentz = lorentz.simplify() 559 560 self.routine_kernel = lorentz 561 return lorentz
562 563 564 @staticmethod
565 - def get_routine_name(name, outgoing):
566 """return the name of the """ 567 568 name = '%s_%s' % (name, outgoing) 569 return name
570 571 @classmethod
572 - def load_library(cls, tag):
573 # load the library 574 if tag in cls.prop_lib: 575 return 576 else: 577 cls.prop_lib = create_prop_library(tag, cls.aloha_lib)
578
579 580 -class CombineRoutineBuilder(AbstractRoutineBuilder):
581 """A special builder for combine routine if needed to write those 582 explicitely. 583 """
584 - def __init__(self, l_lorentz, model=None):
585 """ initialize the run 586 l_lorentz: list of lorentz information analyzed (UFO format) 587 language: define in which language we write the output 588 modes: 0 for all incoming particles 589 >0 defines the outgoing part (start to count at 1) 590 """ 591 AbstractRoutineBuilder.__init__(self,l_lorentz[0], model) 592 lorentz = l_lorentz[0] 593 self.spins = lorentz.spins 594 l_name = [l.name for l in l_lorentz] 595 self.name = aloha_writers.combine_name(l_name[0], l_name[1:], None) 596 self.conjg = [] 597 self.tag = [] 598 self.outgoing = None 599 self.lorentz_expr = [] 600 for i, lor in enumerate(l_lorentz): 601 self.lorentz_expr.append( 'Coup(%s) * (%s)' % (i+1, lor.structure)) 602 self.lorentz_expr = ' + '.join(self.lorentz_expr) 603 self.routine_kernel = None 604 self.contracted = {} 605 self.fct = {}
606
607 -class AbstractALOHAModel(dict):
608 """ A class to build and store the full set of Abstract ALOHA Routine""" 609 610 lastprint = 0 611
612 - def __init__(self, model_name, write_dir=None, format='Fortran', 613 explicit_combine=False):
614 """ load the UFO model and init the dictionary """ 615 616 # Option 617 self.explicit_combine = explicit_combine 618 619 # Extract the model name if combined with restriction 620 model_name_pattern = re.compile("^(?P<name>.+)-(?P<rest>[\w\d_]+)$") 621 model_name_re = model_name_pattern.match(model_name) 622 if model_name_re: 623 name = model_name_re.group('name') 624 rest = model_name_re.group("rest") 625 if rest == 'full' or \ 626 os.path.isfile(os.path.join(root_path, "models", name, 627 "restrict_%s.dat" % rest)): 628 model_name = model_name_re.group("name") 629 630 # load the UFO model 631 try: 632 python_pos = model_name 633 __import__(python_pos) 634 except Exception: 635 python_pos = 'models.%s' % model_name 636 __import__(python_pos) 637 self.model = sys.modules[python_pos] 638 # find the position on the disk 639 self.model_pos = os.path.dirname(self.model.__file__) 640 641 # list the external routine 642 self.external_routines = [] 643 644 # init the dictionary 645 dict.__init__(self) 646 self.symmetries = {} 647 self.multiple_lor = {} 648 649 if write_dir: 650 self.main(write_dir,format=format)
651
652 - def main(self, output_dir, format='Fortran'):
653 """ Compute if not already compute. 654 Write file in models/MY_MODEL/MY_FORMAT. 655 copy the file to output_dir 656 """ 657 ext = {'Fortran':'f','Python':'py','CPP':'h'} 658 659 660 # Check if a pickle file exists 661 if not self.load(): 662 self.compute_all() 663 logger.info(' %s aloha routine' % len(self)) 664 665 # Check that output directory exists 666 if not output_dir: 667 output_dir = os.path.join(self.model_pos, format.lower()) 668 logger.debug('aloha output dir is %s' % output_dir) 669 if not os.path.exists(output_dir): 670 os.mkdir(output_dir) 671 672 # Check that all routine are generated at default places: 673 for (name, outgoing), abstract in self.items(): 674 routine_name = AbstractRoutineBuilder.get_routine_name(name, outgoing) 675 if not os.path.exists(os.path.join(output_dir, routine_name) + '.' + ext[format]): 676 abstract.write(output_dir, format) 677 else: 678 logger.info('File for %s already present, skip the writing of this file' % routine_name)
679 680
681 - def save(self, filepos=None):
682 """ save the current model in a pkl file """ 683 684 logger.info('save the aloha abstract routine in a pickle file') 685 if not filepos: 686 filepos = os.path.join(self.model_pos,'aloha.pkl') 687 688 fsock = open(filepos, 'w') 689 cPickle.dump(dict(self), fsock)
690
691 - def load(self, filepos=None):
692 """ reload the pickle file """ 693 return False 694 if not filepos: 695 filepos = os.path.join(self.model_pos,'aloha.pkl') 696 if os.path.exists(filepos): 697 fsock = open(filepos, 'r') 698 self.update(cPickle.load(fsock)) 699 return True 700 else: 701 return False
702
703 - def get(self, lorentzname, outgoing):
704 """ return the AbstractRoutine with a given lorentz name, and for a given 705 outgoing particle """ 706 707 try: 708 return self[(lorentzname, outgoing)] 709 except Exception: 710 logger.warning('(%s, %s) is not a valid key' % 711 (lorentzname, outgoing) ) 712 return None
713
714 - def get_info(self, info, lorentzname, outgoing, tag, cached=False):
715 """return some information about the aloha routine 716 - "rank": return the rank of the loop function 717 If the cached option is set to true, then the result is stored and 718 recycled if possible. 719 """ 720 721 if not aloha.loop_mode and any(t.startswith('L') for t in tag): 722 aloha.loop_mode = True 723 724 725 returned_dict = {} 726 # Make sure the input argument is a list 727 if isinstance(info, str): 728 infos = [info] 729 else: 730 infos = info 731 732 # First deal with the caching of infos 733 if hasattr(self, 'cached_interaction_infos'): 734 # Now try to recover it 735 for info_key in infos: 736 try: 737 returned_dict[info] = self.cached_interaction_infos[\ 738 (lorentzname,outgoing,tuple(tag),info)] 739 except KeyError: 740 # Some information has never been computed before, so they 741 # will be computed later. 742 pass 743 elif cached: 744 self.cached_interaction_infos = {} 745 746 init = False 747 for info_key in infos: 748 if info_key in returned_dict: 749 continue 750 elif not init: 751 # need to create the aloha object 752 lorentz = eval('self.model.lorentz.%s' % lorentzname) 753 abstract = AbstractRoutineBuilder(lorentz) 754 routine = abstract.compute_routine(outgoing, tag, factorize=False) 755 init = True 756 757 assert 'routine' in locals() 758 returned_dict[info_key] = routine.get_info(info_key) 759 if cached: 760 # Cache the information computed 761 self.cached_interaction_infos[\ 762 (lorentzname,outgoing,tuple(tag),info_key)]=returned_dict[info_key] 763 764 if isinstance(info, str): 765 return returned_dict[info] 766 else: 767 return returned_dict
768
769 - def set(self, lorentzname, outgoing, abstract_routine):
770 """ add in the dictionary """ 771 772 self[(lorentzname, outgoing)] = abstract_routine
773
774 - def compute_all(self, save=True, wanted_lorentz = [], custom_propa=False):
775 """ define all the AbstractRoutine linked to a model """ 776 777 # Search identical particles in the vertices in order to avoid 778 #to compute identical contribution 779 self.look_for_symmetries() 780 conjugate_list = self.look_for_conjugate() 781 self.look_for_multiple_lorentz_interactions() 782 783 if not wanted_lorentz: 784 wanted_lorentz = [l.name for l in self.model.all_lorentz] 785 for lorentz in self.model.all_lorentz: 786 if not lorentz.name in wanted_lorentz: 787 # Only include the routines we ask for 788 continue 789 790 if -1 in lorentz.spins: 791 # No Ghost in ALOHA 792 continue 793 794 if lorentz.structure == 'external': 795 for i in range(len(lorentz.spins)): 796 self.external_routines.append('%s_%s' % (lorentz.name, i)) 797 continue 798 799 #standard routines 800 routines = [(i,[]) for i in range(len(lorentz.spins)+1)] 801 # search for special propagators 802 if custom_propa: 803 for vertex in self.model.all_vertices: 804 if lorentz in vertex.lorentz: 805 for i,part in enumerate(vertex.particles): 806 new_prop = False 807 if hasattr(part, 'propagator') and part.propagator: 808 new_prop = ['P%s' % part.propagator.name] 809 elif part.mass.name.lower() == 'zero': 810 new_prop = ['P0'] 811 if new_prop and (i+1, new_prop) not in routines: 812 routines.append((i+1, new_prop)) 813 814 builder = AbstractRoutineBuilder(lorentz, self.model) 815 self.compute_aloha(builder, routines=routines) 816 817 if lorentz.name in self.multiple_lor: 818 for m in self.multiple_lor[lorentz.name]: 819 for outgoing in range(len(lorentz.spins)+1): 820 try: 821 self[(lorentz.name, outgoing)].add_combine(m) 822 except Exception: 823 pass # this routine is a symmetric one, so it 824 # already has the combination. 825 826 if lorentz.name in conjugate_list: 827 conjg_builder_list= builder.define_all_conjugate_builder(\ 828 conjugate_list[lorentz.name]) 829 for conjg_builder in conjg_builder_list: 830 # No duplication of conjugation: 831 assert conjg_builder_list.count(conjg_builder) == 1 832 self.compute_aloha(conjg_builder, lorentz.name) 833 if lorentz.name in self.multiple_lor: 834 for m in self.multiple_lor[lorentz.name]: 835 for outgoing in range(len(lorentz.spins)+1): 836 realname = conjg_builder.name + ''.join(['C%s' % pair for pair in conjg_builder.conjg]) 837 try: 838 self[(realname, outgoing)].add_combine(m) 839 except Exception,error: 840 self[(realname, self.symmetries[lorentz.name][outgoing])].add_combine(m) 841 842 if save: 843 self.save()
844
845 - def add_Lorentz_object(self, lorentzlist):
846 """add a series of Lorentz structure created dynamically""" 847 848 for lor in lorentzlist: 849 if not hasattr(self.model.lorentz, lor.name): 850 setattr(self.model.lorentz, lor.name, lor)
851
852 - def compute_subset(self, data):
853 """ create the requested ALOHA routine. 854 data should be a list of tuple (lorentz, tag, outgoing) 855 tag should be the list of special tag (like conjugation on pair) 856 to apply on the object """ 857 858 logger.info('aloha starts to compute helicity amplitudes') 859 start = time.time() 860 # Search identical particles in the vertices in order to avoid 861 #to compute identical contribution 862 self.look_for_symmetries() 863 # reorganize the data (in order to use optimization for a given lorentz 864 #structure 865 aloha.loop_mode = False 866 # self.explicit_combine = False 867 request = {} 868 869 for list_l_name, tag, outgoing in data: 870 #allow tag to have integer for retro-compatibility 871 all_tag = tag[:] 872 conjugate = [i for i in tag if isinstance(i, int)] 873 874 tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')] 875 tag = tag + ['C%s'%i for i in conjugate] 876 tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')] 877 878 conjugate = tuple([int(c[1:]) for c in tag if c.startswith('C')]) 879 loop = any((t.startswith('L') for t in tag)) 880 if loop: 881 aloha.loop_mode = True 882 self.explicit_combine = True 883 884 for l_name in list_l_name: 885 try: 886 request[l_name][conjugate].append((outgoing,tag)) 887 except Exception: 888 try: 889 request[l_name][conjugate] = [(outgoing,tag)] 890 except Exception: 891 request[l_name] = {conjugate: [(outgoing,tag)]} 892 893 # Loop on the structure to build exactly what is request 894 for l_name in request: 895 lorentz = eval('self.model.lorentz.%s' % l_name) 896 if lorentz.structure == 'external': 897 for tmp in request[l_name]: 898 for outgoing, tag in request[l_name][tmp]: 899 name = aloha_writers.get_routine_name(lorentz.name,outgoing=outgoing,tag=tag) 900 if name not in self.external_routines: 901 self.external_routines.append(name) 902 continue 903 904 builder = AbstractRoutineBuilder(lorentz, self.model) 905 906 907 for conjg in request[l_name]: 908 #ensure that routines are in rising order (for symetries) 909 def sorting(a,b): 910 if a[0] < b[0]: return -1 911 else: return 1
912 routines = request[l_name][conjg] 913 routines.sort(sorting) 914 if not conjg: 915 # No need to conjugate -> compute directly 916 self.compute_aloha(builder, routines=routines) 917 else: 918 # Define the high level conjugate routine 919 conjg_builder = builder.define_conjugate_builder(conjg) 920 # Compute routines 921 self.compute_aloha(conjg_builder, symmetry=lorentz.name, 922 routines=routines) 923 924 925 # Build mutiple lorentz call 926 for list_l_name, tag, outgoing in data: 927 if len(list_l_name) ==1: 928 continue 929 #allow tag to have integer for retrocompatibility 930 conjugate = [i for i in tag if isinstance(i, int)] 931 all_tag = tag[:] 932 tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')] 933 tag = tag + ['C%s'%i for i in conjugate] 934 tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')] 935 936 if not self.explicit_combine: 937 lorentzname = list_l_name[0] 938 lorentzname += ''.join(tag) 939 if self.has_key((lorentzname, outgoing)): 940 self[(lorentzname, outgoing)].add_combine(list_l_name[1:]) 941 else: 942 lorentz = eval('self.model.lorentz.%s' % list_l_name[0]) 943 assert lorentz.structure == 'external' 944 else: 945 l_lorentz = [] 946 for l_name in list_l_name: 947 l_lorentz.append(eval('self.model.lorentz.%s' % l_name)) 948 builder = CombineRoutineBuilder(l_lorentz) 949 950 for conjg in request[list_l_name[0]]: 951 #ensure that routines are in rising order (for symetries) 952 def sorting(a,b): 953 if a[0] < b[0]: return -1 954 else: return 1
955 routines = request[list_l_name[0]][conjg] 956 routines.sort(sorting) 957 if not conjg: 958 # No need to conjugate -> compute directly 959 self.compute_aloha(builder, routines=routines) 960 else: 961 # Define the high level conjugate routine 962 conjg_builder = builder.define_conjugate_builder(conjg) 963 # Compute routines 964 self.compute_aloha(conjg_builder, symmetry=lorentz.name, 965 routines=routines) 966 967 logger.info("aloha creates %s routines in %0.3f s", AbstractRoutineBuilder.counter, time.time()-start) 968
969 - def compute_aloha(self, builder, symmetry=None, routines=None, tag=[]):
970 """ define all the AbstractRoutine linked to a given lorentz structure 971 symmetry authorizes to use the symmetry of anoter lorentz structure. 972 routines to define only a subset of the routines.""" 973 974 name = builder.name 975 if not symmetry: 976 symmetry = name 977 if not routines: 978 if not tag: 979 tag = ['C%s' % i for i in builder.conjg] 980 else: 981 addon = ['C%s' % i for i in builder.conjg] 982 tag = [(i,addon +onetag) for i,onetag in tag] 983 routines = [ tuple([i,tag]) for i in range(len(builder.spins) + 1 )] 984 985 # Create the routines 986 for outgoing, tag in routines: 987 symmetric = self.has_symmetries(symmetry, outgoing, valid_output=routines) 988 realname = name + ''.join(tag) 989 if (realname, outgoing) in self: 990 continue # already computed 991 992 if symmetric: 993 self.get(realname, symmetric).add_symmetry(outgoing) 994 else: 995 wavefunction = builder.compute_routine(outgoing, tag) 996 #Store the information 997 self.set(realname, outgoing, wavefunction)
998 999
1000 - def compute_aloha_without_kernel(self, builder, symmetry=None, routines=None):
1001 """define all the AbstractRoutine linked to a given lorentz structure 1002 symmetry authorizes to use the symmetry of anoter lorentz structure. 1003 routines to define only a subset of the routines. 1004 Compare to compute_aloha, each routines are computed independently. 1005 """ 1006 1007 name = builder.name 1008 if not routines: 1009 routines = [ tuple([i,[]]) for i in range(len(builder.spins) + 1 )] 1010 1011 for outgoing, tag in routines: 1012 builder.routine_kernel = None 1013 wavefunction = builder.compute_routine(outgoing, tag) 1014 self.set(name, outgoing, wavefunction)
1015 1016
1017 - def write(self, output_dir, language):
1018 """ write the full set of Helicity Routine in output_dir""" 1019 for abstract_routine in self.values(): 1020 abstract_routine.write(output_dir, language) 1021 1022 for routine in self.external_routines: 1023 self.locate_external(routine, language, output_dir)
1024 1025 # if aloha_lib.KERNEL.unknow_fct: 1026 # if language == 'Fortran': 1027 # logger.warning('''Some function present in the lorentz structure are not 1028 # recognized. A Template file has been created: 1029 # %s 1030 # Please edit this file to include the associated definition.''' % \ 1031 # pjoin(output_dir, 'additional_aloha_function.f') ) 1032 # else: 1033 # logger.warning('''Some function present in the lorentz structure are 1034 # not recognized. Please edit the code to add the defnition of such function.''') 1035 # logger.info('list of missing fct: %s .' % \ 1036 # ','.join([a[0] for a in aloha_lib.KERNEL.unknow_fct])) 1037 # 1038 # for fct_name, nb_arg in aloha_lib.KERNEL.unknow_fct: 1039 # if language == 'Fortran': 1040 # aloha_writers.write_template_fct(fct_name, nb_arg, output_dir) 1041 1042 1043 1044 #self.write_aloha_file_inc(output_dir) 1045
1046 - def locate_external(self, name, language, output_dir=None):
1047 """search a valid external file and copy it to output_dir directory""" 1048 1049 language_to_ext = {'Python': 'py', 1050 'Fortran' : 'f', 1051 'CPP': 'C'} 1052 ext = language_to_ext[language] 1053 paths = [os.path.join(self.model_pos, language), self.model_pos, 1054 os.path.join(root_path, 'aloha', 'template_files', )] 1055 1056 ext_files = [] 1057 for path in paths: 1058 ext_files = misc.glob('%s.%s' % (name, ext), path) 1059 if ext_files: 1060 break 1061 else: 1062 1063 raise ALOHAERROR, 'No external routine \"%s.%s\" in directories\n %s' % \ 1064 (name, ext, '\n'.join(paths)) 1065 1066 if output_dir: 1067 for filepath in ext_files: 1068 1069 files.cp(filepath, output_dir) 1070 return ext_files
1071 1072 1073
1074 - def look_for_symmetries(self):
1075 """Search some symmetries in the vertices. 1076 We search if some identical particles are in a vertices in order 1077 to avoid to compute symmetrical contributions""" 1078 1079 for vertex in self.model.all_vertices: 1080 for i, part1 in enumerate(vertex.particles): 1081 for j in range(i-1,-1,-1): 1082 part2 = vertex.particles[j] 1083 if part1.pdg_code == part2.pdg_code and part1.color == 1: 1084 if part1.spin == 2 and (i % 2 != j % 2 ): 1085 continue 1086 for lorentz in vertex.lorentz: 1087 if self.symmetries.has_key(lorentz.name): 1088 if self.symmetries[lorentz.name].has_key(i+1): 1089 self.symmetries[lorentz.name][i+1] = max(self.symmetries[lorentz.name][i+1], j+1) 1090 else: 1091 self.symmetries[lorentz.name][i+1] = j+1 1092 else: 1093 self.symmetries[lorentz.name] = {i+1:j+1} 1094 break
1095
1096 - def look_for_multiple_lorentz_interactions(self):
1097 """Search the interaction associate with more than one lorentz structure. 1098 If those lorentz structure have the same order and the same color then 1099 associate a multiple lorentz routines to ALOHA """ 1100 1101 orders = {} 1102 for coup in self.model.all_couplings: 1103 orders[coup.name] = str(coup.order) 1104 1105 for vertex in self.model.all_vertices: 1106 if len(vertex.lorentz) == 1: 1107 continue 1108 #remove ghost 1109 #if -1 in vertex.lorentz[0].spins: 1110 # continue 1111 1112 # assign each order/color to a set of lorentz routine 1113 combine = {} 1114 for (id_col, id_lor), coups in vertex.couplings.items(): 1115 if not isinstance(coups, list): 1116 coups = [coups] 1117 for coup in coups: 1118 order = orders[coup.name] 1119 key = (id_col, order) 1120 if key in combine: 1121 combine[key].append(id_lor) 1122 else: 1123 combine[key] = [id_lor] 1124 1125 # Check if more than one routine are associated 1126 for list_lor in combine.values(): 1127 if len(list_lor) == 1: 1128 continue 1129 list_lor.sort() 1130 main = vertex.lorentz[list_lor[0]].name 1131 if main not in self.multiple_lor: 1132 self.multiple_lor[main] = [] 1133 1134 info = tuple([vertex.lorentz[id].name for id in list_lor[1:]]) 1135 if info not in self.multiple_lor[main]: 1136 self.multiple_lor[main].append(info)
1137 1138
1139 - def has_symmetries(self, l_name, outgoing, out=None, valid_output=None):
1140 """ This returns out if no symmetries are available, otherwise it finds 1141 the lowest equivalent outgoing by recursivally calling this function. 1142 auth is a list of authorize output, if define""" 1143 1144 try: 1145 equiv = self.symmetries[l_name][outgoing] 1146 except Exception: 1147 return out 1148 else: 1149 if not valid_output or equiv in valid_output: 1150 return self.has_symmetries(l_name, equiv, out=equiv, 1151 valid_output=valid_output) 1152 else: 1153 return self.has_symmetries(l_name, equiv, out=out, 1154 valid_output=valid_output)
1155
1156 - def look_for_conjugate(self):
1157 """ create a list for the routine needing to be conjugate """ 1158 1159 # Check if they are majorana in the model. 1160 need = False 1161 for particle in self.model.all_particles: 1162 if particle.spin == 2 and particle.selfconjugate: 1163 need = True 1164 break 1165 1166 if not need: 1167 for interaction in self.model.all_vertices: 1168 fermions = [p for p in interaction.particles if p.spin == 2] 1169 for i in range(0, len(fermions), 2): 1170 if fermions[i].pdg_code * fermions[i+1].pdg_code > 0: 1171 # This is a fermion flow violating interaction 1172 need = True 1173 break 1174 1175 # No majorana particles 1176 if not need: 1177 return {} 1178 1179 conjugate_request = {} 1180 # Check each vertex if they are fermion and/or majorana 1181 for vertex in self.model.all_vertices: 1182 for i in range(0, len(vertex.particles), 2): 1183 part1 = vertex.particles[i] 1184 if part1.spin !=2: 1185 # deal only with fermion 1186 break 1187 # check if this pair contains a majorana 1188 if part1.selfconjugate: 1189 continue 1190 part2 = vertex.particles[i + 1] 1191 if part2.selfconjugate: 1192 continue 1193 1194 # No majorana => add the associate lorentz structure 1195 for lorentz in vertex.lorentz: 1196 try: 1197 conjugate_request[lorentz.name].add(i//2+1) 1198 except Exception: 1199 conjugate_request[lorentz.name] = set([i//2+1]) 1200 1201 for elem in conjugate_request: 1202 conjugate_request[elem] = list(conjugate_request[elem]) 1203 1204 return conjugate_request
1205
1206 1207 1208 -def write_aloha_file_inc(aloha_dir,file_ext, comp_ext):
1209 """find the list of Helicity routine in the directory and create a list 1210 of those files (but with compile extension)""" 1211 1212 aloha_files = [] 1213 1214 # Identify the valid files 1215 alohafile_pattern = re.compile(r'''_\d%s''' % file_ext) 1216 for filename in os.listdir(aloha_dir): 1217 if os.path.isfile(os.path.join(aloha_dir, filename)): 1218 if alohafile_pattern.search(filename): 1219 aloha_files.append(filename.replace(file_ext, comp_ext)) 1220 1221 if os.path.exists(pjoin(aloha_dir, 'additional_aloha_function.f')): 1222 aloha_files.append('additional_aloha_function.o') 1223 1224 text="ALOHARoutine = " 1225 text += ' '.join(aloha_files) 1226 text +='\n' 1227 1228 1229 file(os.path.join(aloha_dir, 'aloha_file.inc'), 'w').write(text)
1230
1231 1232 1233 -def create_prop_library(tag, lib={}):
1234 1235 def create(obj): 1236 """ """ 1237 obj= obj.simplify() 1238 obj = obj.expand() 1239 obj = obj.simplify() 1240 return obj
1241 1242 # avoid to add tag in global 1243 old_tag = set(aloha_lib.KERNEL.use_tag) 1244 name, i = tag 1245 if name == "Spin2Prop": 1246 lib[('Spin2Prop',i)] = create( Spin2Propagator(_spin2_mult + i, \ 1247 2 * _spin2_mult + i,'I2','I3', i) ) 1248 elif name == "Spin2PropMassless": 1249 lib[('Spin2PropMassless',i)] = create( Spin2masslessPropagator( 1250 _spin2_mult + i, 2 * _spin2_mult + i,'I2','I3')) 1251 1252 aloha_lib.KERNEL.use_tag = old_tag 1253 return lib 1254 1255 1256 if '__main__' == __name__: 1257 logging.basicConfig(level=0) 1258 #create_library() 1259 import profile 1260 #model 1261 1262 start = time.time()
1263 - def main():
1264 alohagenerator = AbstractALOHAModel('sm') 1265 alohagenerator.compute_all(save=False) 1266 return alohagenerator
1267 - def write(alohagenerator):
1268 alohagenerator.write('/tmp/', 'Python')
1269 alohagenerator = main() 1270 logger.info('done in %s s' % (time.time()-start)) 1271 write(alohagenerator) 1272 #profile.run('main()') 1273 #profile.run('write(alohagenerator)') 1274 stop = time.time() 1275 logger.info('done in %s s' % (stop-start)) 1276