Package aloha :: Module create_aloha
[hide private]
[frames] | no frames]

Source Code for Module aloha.create_aloha

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2010 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  from __future__ import division 
  16  import cmath 
  17  import copy 
  18  import cPickle 
  19  import glob 
  20  import logging 
  21  import numbers 
  22  import os 
  23  import re 
  24  import shutil 
  25  import sys 
  26  import time 
  27  from madgraph.interface.tutorial_text import output 
  28   
  29  root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0] 
  30  sys.path.append(root_path) 
  31  from aloha.aloha_object import * 
  32  import aloha 
  33  import aloha.aloha_writers as aloha_writers 
  34  import aloha.aloha_lib as aloha_lib 
  35  import aloha.aloha_object as aloha_object 
  36  import aloha.aloha_parsers as aloha_parsers 
  37  import aloha.aloha_fct as aloha_fct 
  38  try: 
  39      import madgraph.iolibs.files as files 
  40      import madgraph.various.misc as misc 
  41  except Exception: 
  42      import aloha.files as files 
  43      import aloha.misc as misc 
  44       
  45  aloha_path = os.path.dirname(os.path.realpath(__file__)) 
  46  logger = logging.getLogger('ALOHA') 
  47   
  48  _conjugate_gap = 50 
  49  _spin2_mult = 1000 
  50   
  51  pjoin = os.path.join 
  52   
  53  ALOHAERROR = aloha.ALOHAERROR 
54 55 -class AbstractRoutine(object):
56 """ store the result of the computation of Helicity Routine 57 this is use for storing and passing to writer """ 58
59 - def __init__(self, expr, outgoing, spins, name, infostr, denom=None):
60 """ store the information """ 61 62 self.spins = spins 63 self.expr = expr 64 self.denominator = denom 65 self.name = name 66 self.outgoing = outgoing 67 self.infostr = infostr 68 self.symmetries = [] 69 self.combined = [] 70 self.tag = [] 71 self.contracted = {}
72 73 74
75 - def add_symmetry(self, outgoing):
76 """ add an outgoing """ 77 78 if not outgoing in self.symmetries: 79 self.symmetries.append(outgoing)
80
81 - def add_combine(self, lor_list):
82 """add a combine rule """ 83 84 if lor_list not in self.combined: 85 self.combined.append(lor_list)
86
87 - def write(self, output_dir, language='Fortran', mode='self', combine=True,**opt):
88 """ write the content of the object """ 89 writer = aloha_writers.WriterFactory(self, language, output_dir, self.tag) 90 text = writer.write(mode=mode, **opt) 91 if combine: 92 for grouped in self.combined: 93 if isinstance(text, tuple): 94 text = tuple([old.__add__(new) for old, new in zip(text, 95 writer.write_combined(grouped, mode=mode+'no_include', **opt))]) 96 else: 97 text += writer.write_combined(grouped, mode=mode+'no_include', **opt) 98 if aloha.mp_precision and 'MP' not in self.tag: 99 self.tag.append('MP') 100 text += self.write(output_dir, language, mode, **opt) 101 return text
102
103 - def get_info(self, info):
104 """return some information on the routine 105 """ 106 if info == "rank": 107 assert isinstance(self.expr, aloha_lib.SplitCoefficient) 108 rank= 1 109 for coeff in self.expr: 110 rank = max(sum(coeff), rank) 111 return rank -1 # due to the coefficient associate to the wavefunctions 112 else: 113 raise ALOHAERROR, '%s is not a valid information that can be computed' % info
114
115 116 -class AbstractRoutineBuilder(object):
117 """ Launch the creation of the Helicity Routine""" 118 119 prop_lib = {} # Store computation for the propagator 120 counter = 0 # counter for statistic only 121
122 - class AbstractALOHAError(Exception):
123 """ An error class for ALOHA"""
124
125 - def __init__(self, lorentz, model=None):
126 """ initialize the run 127 lorentz: the lorentz information analyzed (UFO format) 128 language: define in which language we write the output 129 modes: 0 for all incoming particles 130 >0 defines the outgoing part (start to count at 1) 131 """ 132 133 self.spins = [s for s in lorentz.spins] 134 self.name = lorentz.name 135 self.conjg = [] 136 self.tag = [] 137 self.outgoing = None 138 self.lorentz_expr = lorentz.structure 139 self.routine_kernel = None 140 self.spin2_massless = False 141 self.spin32_massless = False 142 self.contracted = {} 143 self.fct = {} 144 self.model = model 145 self.denominator = None 146 # assert model 147 148 self.lastprint = 0 # to avoid that ALOHA makes too many printout 149 150 if hasattr(lorentz, 'formfactors') and lorentz.formfactors: 151 for formf in lorentz.formfactors: 152 pat = re.compile(r'\b%s\b' % formf.name) 153 self.lorentz_expr = pat.sub('(%s)' % formf.value, self.lorentz_expr)
154
155 - def compute_routine(self, mode, tag=[], factorize=True):
156 """compute the expression and return it""" 157 self.outgoing = mode 158 self.tag = tag 159 if __debug__: 160 if mode == 0: 161 assert not any(t.startswith('L') for t in tag) 162 self.expr = self.compute_aloha_high_kernel(mode, factorize) 163 return self.define_simple_output()
164
165 - def define_all_conjugate_builder(self, pair_list):
166 """ return the full set of AbstractRoutineBuilder linked to fermion 167 clash""" 168 169 solution = [] 170 171 for i, pair in enumerate(pair_list): 172 new_builder = self.define_conjugate_builder(pair) 173 solution.append(new_builder) 174 solution += new_builder.define_all_conjugate_builder(pair_list[i+1:]) 175 return solution
176
177 - def define_conjugate_builder(self, pairs=1):
178 """ return a AbstractRoutineBuilder for the conjugate operation. 179 If they are more than one pair of fermion. Then use pair to claim which 180 one is conjugated""" 181 182 new_builder = copy.copy(self) 183 new_builder.conjg = self.conjg[:] 184 try: 185 for index in pairs: 186 new_builder.apply_conjugation(index) 187 except TypeError: 188 new_builder.apply_conjugation(pairs) 189 return new_builder
190
191 - def apply_conjugation(self, pair=1):
192 """ apply conjugation on self object""" 193 194 nb_fermion = len([1 for s in self.spins if s % 2 == 0]) 195 if isinstance(pair, tuple): 196 if len(pair) ==1 : 197 pair = pair[0] 198 else: 199 raise Exception 200 201 202 if (pair > 1 or nb_fermion >2) and not self.conjg: 203 # self.conjg avoif multiple check 204 data = aloha_fct.get_fermion_flow(self.lorentz_expr, nb_fermion) 205 target = dict([(2*i+1,2*i+2) for i in range(nb_fermion//2)]) 206 if not data == target: 207 text = """Unable to deal with 4(or more) point interactions 208 in presence of majorana particle/flow violation""" 209 raise ALOHAERROR, text 210 211 old_id = 2 * pair - 1 212 new_id = _conjugate_gap + old_id 213 214 self.kernel_tag = set() 215 aloha_lib.KERNEL.use_tag = set() 216 if not self.routine_kernel or isinstance(self.routine_kernel, str): 217 self.routine_kernel = eval(self.parse_expression(self.lorentz_expr)) 218 self.kernel_tag = aloha_lib.KERNEL.use_tag 219 # We need to compute C Gamma^T C^-1 = C_ab G_cb (-1) C_cd 220 # = C_ac G_bc (-1) C_bd = C_ac G_bc C_db 221 self.routine_kernel = \ 222 C(new_id, old_id + 1) * self.routine_kernel * C(new_id + 1, old_id) 223 224 self.lorentz_expr = '('+self.lorentz_expr+') * C(%s,%s) * C(%s,%s)' % \ 225 (new_id, old_id + 1, new_id + 1, old_id ) 226 227 self.conjg.append(pair)
228 229
230 - def define_simple_output(self):
231 """ define a simple output for this AbstractRoutine """ 232 233 infostr = str(self.lorentz_expr) 234 235 output = AbstractRoutine(self.expr, self.outgoing, self.spins, self.name, \ 236 infostr, self.denominator) 237 output.contracted = dict([(name, aloha_lib.KERNEL.reduced_expr2[name]) 238 for name in aloha_lib.KERNEL.use_tag 239 if name.startswith('TMP')]) 240 241 output.fct = dict([(name, aloha_lib.KERNEL.reduced_expr2[name]) 242 for name in aloha_lib.KERNEL.use_tag 243 if name.startswith('FCT')]) 244 245 output.tag = [t for t in self.tag if not t.startswith('C')] 246 output.tag += ['C%s' % pair for pair in self.conjg] 247 return output
248
249 - def parse_expression(self, expr=None, need_P_sign=False):
250 """change the sign of P for outcoming fermion in order to 251 correct the mismatch convention between HELAS and FR""" 252 253 if not expr: 254 expr = self.lorentz_expr 255 256 if need_P_sign: 257 expr = re.sub(r'\b(P|PSlash)\(', r'-\1(', expr) 258 259 calc = aloha_parsers.ALOHAExpressionParser() 260 lorentz_expr = calc.parse(expr) 261 return lorentz_expr
262
263 - def compute_aloha_high_kernel(self, mode, factorize=True):
264 """compute the abstract routine associate to this mode """ 265 266 # reset tag for particles 267 aloha_lib.KERNEL.use_tag=set() 268 #multiply by the wave functions 269 nb_spinor = 0 270 outgoing = self.outgoing 271 if (outgoing + 1) // 2 in self.conjg: 272 #flip the outgoing tag if in conjugate 273 outgoing = outgoing + outgoing % 2 - (outgoing +1) % 2 274 275 if not self.routine_kernel: 276 AbstractRoutineBuilder.counter += 1 277 if self.tag == []: 278 logger.info('aloha creates %s routines' % self.name) 279 elif AbstractALOHAModel.lastprint < time.time() - 1: 280 AbstractALOHAModel.lastprint = time.time() 281 logger.info('aloha creates %s set of routines with options: %s' \ 282 % (self.name, ','.join(self.tag)) ) 283 try: 284 lorentz = self.parse_expression() 285 self.routine_kernel = lorentz 286 lorentz = eval(lorentz) 287 except NameError as error: 288 logger.error('unknow type in Lorentz Evaluation:%s'%str(error)) 289 raise ALOHAERROR, 'unknow type in Lorentz Evaluation: %s ' % str(error) 290 else: 291 self.kernel_tag = set(aloha_lib.KERNEL.use_tag) 292 elif isinstance(self.routine_kernel,str): 293 lorentz = eval(self.routine_kernel) 294 aloha_lib.KERNEL.use_tag = set(self.kernel_tag) 295 else: 296 lorentz = copy.copy(self.routine_kernel) 297 aloha_lib.KERNEL.use_tag = set(self.kernel_tag) 298 for (i, spin ) in enumerate(self.spins): 299 id = i + 1 300 #Check if this is the outgoing particle 301 if id == outgoing: 302 303 # check if we need a special propagator 304 propa = [t[1:] for t in self.tag if t.startswith('P')] 305 if propa == ['0']: 306 massless = True 307 self.denominator = None 308 elif propa == []: 309 massless = False 310 self.denominator = None 311 else: 312 lorentz *= complex(0,1) * self.get_custom_propa(propa[0], spin, id) 313 continue 314 315 316 317 if spin in [1,-1]: 318 lorentz *= complex(0,1) 319 elif spin == 2: 320 # shift and flip the tag if we multiply by C matrices 321 if (id + 1) // 2 in self.conjg: 322 id += _conjugate_gap + id % 2 - (id +1) % 2 323 if (id % 2): 324 #propagator outcoming 325 lorentz *= complex(0,1) * SpinorPropagatorout(id, 'I2', outgoing) 326 else: 327 # #propagator incoming 328 lorentz *= complex(0,1) * SpinorPropagatorin('I2', id, outgoing) 329 elif spin == 3 : 330 if massless or not aloha.unitary_gauge: 331 lorentz *= VectorPropagatorMassless(id, 'I2', id) 332 else: 333 lorentz *= VectorPropagator(id, 'I2', id) 334 elif spin == 4: 335 # shift and flip the tag if we multiply by C matrices 336 if (id + 1) // 2 in self.conjg: 337 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2 338 else: 339 spin_id = id 340 nb_spinor += 1 341 if not massless and (spin_id % 2): 342 lorentz *= complex(0,1) * Spin3halfPropagatorout(id, 'I2', spin_id,'I3', outgoing) 343 elif not massless and not (spin_id % 2): 344 lorentz *= complex(0,1) * Spin3halfPropagatorin('I2', id , 'I3', spin_id, outgoing) 345 elif spin_id %2: 346 lorentz *= complex(0,1) * Spin3halfPropagatorMasslessOut(id, 'I2', spin_id,'I3', outgoing) 347 else : 348 lorentz *= complex(0,1) * Spin3halfPropagatorMasslessIn('I2', id, 'I3', spin_id, outgoing) 349 350 elif spin == 5 : 351 #lorentz *= 1 # delayed evaluation (fastenize the code) 352 if massless: 353 lorentz *= complex(0,1) * Spin2masslessPropagator(_spin2_mult + id, \ 354 2 * _spin2_mult + id,'I2','I3') 355 else: 356 lorentz *= complex(0,1) * Spin2Propagator(_spin2_mult + id, \ 357 2 * _spin2_mult + id,'I2','I3', id) 358 else: 359 raise self.AbstractALOHAError( 360 'The spin value %s (2s+1) is not supported yet' % spin) 361 else: 362 # This is an incoming particle 363 if spin in [1,-1]: 364 lorentz *= Scalar(id) 365 elif spin == 2: 366 # shift the tag if we multiply by C matrices 367 if (id+1) // 2 in self.conjg: 368 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2 369 else: 370 spin_id = id 371 lorentz *= Spinor(spin_id, id) 372 elif spin == 3: 373 lorentz *= Vector(id, id) 374 elif spin == 4: 375 # shift the tag if we multiply by C matrices 376 if (id+1) // 2 in self.conjg: 377 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2 378 else: 379 spin_id = id 380 nb_spinor += 1 381 lorentz *= Spin3Half(id, spin_id, id) 382 elif spin == 5: 383 lorentz *= Spin2(1 * _spin2_mult + id, 2 * _spin2_mult + id, id) 384 else: 385 raise self.AbstractALOHAError( 386 'The spin value %s (2s+1) is not supported yet' % spin) 387 388 # If no particle OffShell 389 if not outgoing: 390 lorentz *= complex(0,-1) 391 # Propagator are taken care separately 392 393 lorentz = lorentz.simplify() 394 395 # Modify the expression in case of loop-pozzorini 396 if any((tag.startswith('L') for tag in self.tag if len(tag)>1)): 397 return self.compute_loop_coefficient(lorentz, outgoing) 398 399 lorentz = lorentz.expand() 400 lorentz = lorentz.simplify() 401 402 if factorize: 403 lorentz = lorentz.factorize() 404 405 lorentz.tag = set(aloha_lib.KERNEL.use_tag) 406 return lorentz
407 408 @staticmethod
409 - def mod_propagator_expression(tag, text):
410 """Change the index of the propagator to match the current need""" 411 412 data = re.split(r'(\b[a-zA-Z]\w*?)\(([\'\w,\s\"\+\-]*?)\)',text) 413 to_change = {} 414 for old, new in tag.items(): 415 if isinstance(new, str): 416 new='\'%s\'' % new 417 else: 418 new = str(new) 419 to_change[r'%s' % old] = new 420 pos=-2 421 while pos +3 < len(data): 422 pos = pos+3 423 ltype = data[pos] 424 if ltype != 'complex': 425 data[pos+1] = re.sub(r'\b(?<!-)(%s)\b' % '|'.join(to_change), 426 lambda x: to_change[x.group()], data[pos+1]) 427 data[pos+1] = '(%s)' % data[pos+1] 428 text=''.join(data) 429 return text
430
431 - def get_custom_propa(self, propa, spin, id):
432 """Return the ALOHA object associated to the user define propagator""" 433 434 propagator = getattr(self.model.propagators, propa) 435 numerator = propagator.numerator 436 denominator = propagator.denominator 437 438 # Find how to make the replacement for the various tag in the propagator expression 439 needPflipping = False 440 if spin in [1,-1]: 441 tag = {'id': id} 442 elif spin == 2: 443 # shift and flip the tag if we multiply by C matrices 444 if (id + 1) // 2 in self.conjg: 445 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2 446 else: 447 spin_id = id 448 if (spin_id % 2): 449 #propagator outcoming 450 needPflipping = True 451 tag ={'1': spin_id, '2': 'I2', 'id': id} 452 else: 453 tag ={'1': 'I2', '2': spin_id, 'id': id} 454 elif spin == 3 : 455 tag ={'1': id, '2': 'I2', 'id': id} 456 elif spin == 4: 457 delta = lambda i,j: aloha_object.Identity(i,j) 458 deltaL = lambda i,j: aloha_object.IdentityL(i,j) 459 # shift and flip the tag if we multiply by C matrices 460 if (id + 1) // 2 in self.conjg: 461 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2 462 else: 463 spin_id = id 464 tag = {'1': 'pr1', '2': 'pr2', 'id':id} 465 if spin_id % 2: 466 needPflipping = True 467 # propaR is needed to do the correct contraction since we need to distinguish spin from lorentz index 468 propaR = deltaL('pr1',id) * deltaL('pr2', 'I2') * delta('pr1', spin_id) * delta('pr2', 'I3') 469 else: 470 propaR = deltaL('pr1',id) * deltaL('pr2', 'I2') * delta('pr2', spin_id) * delta('pr1', 'I3') 471 #numerator += "*deltaL('pr_1',id) * deltaL('pr_2', 'I2') * delta('pr_1', spin_id) * delta('pr_2', 'I3')" 472 elif spin == 5 : 473 tag = {'1': _spin2_mult + id, '2': 'I2', 474 '51': 2 * _spin2_mult + id, '52': 'I3', 'id':id} 475 476 numerator = self.mod_propagator_expression(tag, numerator) 477 if denominator: 478 denominator = self.mod_propagator_expression(tag, denominator) 479 numerator = self.parse_expression(numerator, needPflipping) 480 481 if denominator: 482 self.denominator = self.parse_expression(denominator, needPflipping) 483 self.denominator = eval(self.denominator) 484 if not isinstance(self.denominator, numbers.Number): 485 self.denominator = self.denominator.simplify().expand().simplify().get((0,)) 486 487 if spin ==4: 488 return eval(numerator) * propaR 489 else: 490 return eval(numerator)
491 492 493 494
495 - def compute_loop_coefficient(self, lorentz, outgoing):
496 497 498 l_in = [int(tag[1:]) for tag in self.tag if tag.startswith('L')][0] 499 if (l_in + 1) // 2 in self.conjg: 500 #flip the outgoing tag if in conjugate 501 l_in = l_in + l_in % 2 - (l_in +1) % 2 502 assert l_in != outgoing, 'incoming Open Loop can not be the outcoming one' 503 504 # modify the expression for the momenta 505 # P_i -> P_i + P_L and P_o -> -P_o - P_L 506 Pdep = [aloha_lib.KERNEL.get(P) for P in lorentz.get_all_var_names() 507 if P.startswith('_P')] 508 509 Pdep = set([P for P in Pdep if P.particle in [outgoing, l_in]]) 510 for P in Pdep: 511 if P.particle == l_in: 512 sign = 1 513 else: 514 sign = -1 515 id = P.id 516 lorentz_ind = P.lorentz_ind[0] 517 P_Lid = aloha_object.P(lorentz_ind, 'L') 518 P_obj = aloha_object.P(lorentz_ind, P.particle) 519 new_expr = sign*(P_Lid + P_obj) 520 lorentz = lorentz.replace(id, new_expr) 521 522 # Compute the variable from which we need to split the expression 523 var_veto = ['PL_0', 'PL_1', 'PL_2', 'PL_3'] 524 spin = aloha_writers.WriteALOHA.type_to_variable[abs(self.spins[l_in-1])] 525 size = aloha_writers.WriteALOHA.type_to_size[spin]-1 526 var_veto += ['%s%s_%s' % (spin,l_in,i) for i in range(1,size)] 527 # compute their unique identifiant 528 veto_ids = aloha_lib.KERNEL.get_ids(var_veto) 529 530 lorentz = lorentz.expand(veto = veto_ids) 531 lorentz = lorentz.simplify() 532 coeff_expr = lorentz.split(veto_ids) 533 534 for key, expr in coeff_expr.items(): 535 expr = expr.simplify() 536 coeff_expr[key] = expr.factorize() 537 coeff_expr.tag = set(aloha_lib.KERNEL.use_tag) 538 539 return coeff_expr
540
541 - def define_lorentz_expr(self, lorentz_expr):
542 """Define the expression""" 543 544 self.expr = lorentz_expr
545
546 - def define_routine_kernel(self, lorentz=None):
547 """Define the kernel at low level""" 548 549 if not lorentz: 550 logger.info('compute kernel %s' % self.counter) 551 AbstractRoutineBuilder.counter += 1 552 lorentz = eval(self.lorentz_expr) 553 554 if isinstance(lorentz, numbers.Number): 555 self.routine_kernel = lorentz 556 return lorentz 557 lorentz = lorentz.simplify() 558 lorentz = lorentz.expand() 559 lorentz = lorentz.simplify() 560 561 self.routine_kernel = lorentz 562 return lorentz
563 564 565 @staticmethod
566 - def get_routine_name(name, outgoing):
567 """return the name of the """ 568 569 name = '%s_%s' % (name, outgoing) 570 return name
571 572 @classmethod
573 - def load_library(cls, tag):
574 # load the library 575 if tag in cls.prop_lib: 576 return 577 else: 578 cls.prop_lib = create_prop_library(tag, cls.aloha_lib)
579
580 581 -class CombineRoutineBuilder(AbstractRoutineBuilder):
582 """A special builder for combine routine if needed to write those 583 explicitely. 584 """
585 - def __init__(self, l_lorentz, model=None):
586 """ initialize the run 587 l_lorentz: list of lorentz information analyzed (UFO format) 588 language: define in which language we write the output 589 modes: 0 for all incoming particles 590 >0 defines the outgoing part (start to count at 1) 591 """ 592 AbstractRoutineBuilder.__init__(self,l_lorentz[0], model) 593 lorentz = l_lorentz[0] 594 self.spins = lorentz.spins 595 l_name = [l.name for l in l_lorentz] 596 self.name = aloha_writers.combine_name(l_name[0], l_name[1:], None) 597 self.conjg = [] 598 self.tag = [] 599 self.outgoing = None 600 self.lorentz_expr = [] 601 for i, lor in enumerate(l_lorentz): 602 self.lorentz_expr.append( 'Coup(%s) * (%s)' % (i+1, lor.structure)) 603 self.lorentz_expr = ' + '.join(self.lorentz_expr) 604 self.routine_kernel = None 605 self.contracted = {} 606 self.fct = {}
607
608 -class AbstractALOHAModel(dict):
609 """ A class to build and store the full set of Abstract ALOHA Routine""" 610 611 lastprint = 0 612
613 - def __init__(self, model_name, write_dir=None, format='Fortran', 614 explicit_combine=False):
615 """ load the UFO model and init the dictionary """ 616 617 # Option 618 self.explicit_combine = explicit_combine 619 620 # Extract the model name if combined with restriction 621 model_name_pattern = re.compile("^(?P<name>.+)-(?P<rest>[\w\d_]+)$") 622 model_name_re = model_name_pattern.match(model_name) 623 if model_name_re: 624 name = model_name_re.group('name') 625 rest = model_name_re.group("rest") 626 if rest == 'full' or \ 627 os.path.isfile(os.path.join(root_path, "models", name, 628 "restrict_%s.dat" % rest)): 629 model_name = model_name_re.group("name") 630 631 # load the UFO model 632 try: 633 python_pos = model_name 634 __import__(python_pos) 635 except Exception: 636 python_pos = 'models.%s' % model_name 637 __import__(python_pos) 638 self.model = sys.modules[python_pos] 639 # find the position on the disk 640 self.model_pos = os.path.dirname(self.model.__file__) 641 642 # list the external routine 643 self.external_routines = [] 644 645 # init the dictionary 646 dict.__init__(self) 647 self.symmetries = {} 648 self.multiple_lor = {} 649 650 if write_dir: 651 self.main(write_dir,format=format)
652
653 - def main(self, output_dir, format='Fortran'):
654 """ Compute if not already compute. 655 Write file in models/MY_MODEL/MY_FORMAT. 656 copy the file to output_dir 657 """ 658 ext = {'Fortran':'f','Python':'py','CPP':'h'} 659 660 661 # Check if a pickle file exists 662 if not self.load(): 663 self.compute_all() 664 logger.info(' %s aloha routine' % len(self)) 665 666 # Check that output directory exists 667 if not output_dir: 668 output_dir = os.path.join(self.model_pos, format.lower()) 669 logger.debug('aloha output dir is %s' % output_dir) 670 if not os.path.exists(output_dir): 671 os.mkdir(output_dir) 672 673 # Check that all routine are generated at default places: 674 for (name, outgoing), abstract in self.items(): 675 routine_name = AbstractRoutineBuilder.get_routine_name(name, outgoing) 676 if not os.path.exists(os.path.join(output_dir, routine_name) + '.' + ext[format]): 677 abstract.write(output_dir, format) 678 else: 679 logger.info('File for %s already present, skip the writing of this file' % routine_name)
680 681
682 - def save(self, filepos=None):
683 """ save the current model in a pkl file """ 684 685 logger.info('save the aloha abstract routine in a pickle file') 686 if not filepos: 687 filepos = os.path.join(self.model_pos,'aloha.pkl') 688 689 fsock = open(filepos, 'w') 690 cPickle.dump(dict(self), fsock)
691
692 - def load(self, filepos=None):
693 """ reload the pickle file """ 694 return False 695 if not filepos: 696 filepos = os.path.join(self.model_pos,'aloha.pkl') 697 if os.path.exists(filepos): 698 fsock = open(filepos, 'r') 699 self.update(cPickle.load(fsock)) 700 return True 701 else: 702 return False
703
704 - def get(self, lorentzname, outgoing):
705 """ return the AbstractRoutine with a given lorentz name, and for a given 706 outgoing particle """ 707 708 try: 709 return self[(lorentzname, outgoing)] 710 except Exception: 711 logger.warning('(%s, %s) is not a valid key' % 712 (lorentzname, outgoing) ) 713 return None
714
715 - def get_info(self, info, lorentzname, outgoing, tag, cached=False):
716 """return some information about the aloha routine 717 - "rank": return the rank of the loop function 718 If the cached option is set to true, then the result is stored and 719 recycled if possible. 720 """ 721 722 if not aloha.loop_mode and any(t.startswith('L') for t in tag): 723 aloha.loop_mode = True 724 725 726 returned_dict = {} 727 # Make sure the input argument is a list 728 if isinstance(info, str): 729 infos = [info] 730 else: 731 infos = info 732 733 # First deal with the caching of infos 734 if hasattr(self, 'cached_interaction_infos'): 735 # Now try to recover it 736 for info_key in infos: 737 try: 738 returned_dict[info] = self.cached_interaction_infos[\ 739 (lorentzname,outgoing,tuple(tag),info)] 740 except KeyError: 741 # Some information has never been computed before, so they 742 # will be computed later. 743 pass 744 elif cached: 745 self.cached_interaction_infos = {} 746 747 init = False 748 for info_key in infos: 749 if info_key in returned_dict: 750 continue 751 elif not init: 752 # need to create the aloha object 753 lorentz = eval('self.model.lorentz.%s' % lorentzname) 754 abstract = AbstractRoutineBuilder(lorentz) 755 routine = abstract.compute_routine(outgoing, tag, factorize=False) 756 init = True 757 758 assert 'routine' in locals() 759 returned_dict[info_key] = routine.get_info(info_key) 760 if cached: 761 # Cache the information computed 762 self.cached_interaction_infos[\ 763 (lorentzname,outgoing,tuple(tag),info_key)]=returned_dict[info_key] 764 765 if isinstance(info, str): 766 return returned_dict[info] 767 else: 768 return returned_dict
769
770 - def set(self, lorentzname, outgoing, abstract_routine):
771 """ add in the dictionary """ 772 773 self[(lorentzname, outgoing)] = abstract_routine
774
775 - def compute_all(self, save=True, wanted_lorentz = [], custom_propa=False):
776 """ define all the AbstractRoutine linked to a model """ 777 778 # Search identical particles in the vertices in order to avoid 779 #to compute identical contribution 780 self.look_for_symmetries() 781 conjugate_list = self.look_for_conjugate() 782 self.look_for_multiple_lorentz_interactions() 783 784 if not wanted_lorentz: 785 wanted_lorentz = [l.name for l in self.model.all_lorentz] 786 for lorentz in self.model.all_lorentz: 787 if not lorentz.name in wanted_lorentz: 788 # Only include the routines we ask for 789 continue 790 791 if -1 in lorentz.spins: 792 # No Ghost in ALOHA 793 continue 794 795 if lorentz.structure == 'external': 796 for i in range(len(lorentz.spins)): 797 self.external_routines.append('%s_%s' % (lorentz.name, i)) 798 continue 799 800 #standard routines 801 routines = [(i,[]) for i in range(len(lorentz.spins)+1)] 802 # search for special propagators 803 if custom_propa: 804 for vertex in self.model.all_vertices: 805 if lorentz in vertex.lorentz: 806 for i,part in enumerate(vertex.particles): 807 new_prop = False 808 if hasattr(part, 'propagator') and part.propagator: 809 new_prop = ['P%s' % part.propagator.name] 810 elif part.mass.name.lower() == 'zero': 811 new_prop = ['P0'] 812 if new_prop and (i+1, new_prop) not in routines: 813 routines.append((i+1, new_prop)) 814 815 builder = AbstractRoutineBuilder(lorentz, self.model) 816 self.compute_aloha(builder, routines=routines) 817 818 if lorentz.name in self.multiple_lor: 819 for m in self.multiple_lor[lorentz.name]: 820 for outgoing in range(len(lorentz.spins)+1): 821 try: 822 self[(lorentz.name, outgoing)].add_combine(m) 823 except Exception: 824 pass # this routine is a symmetric one, so it 825 # already has the combination. 826 827 if lorentz.name in conjugate_list: 828 conjg_builder_list= builder.define_all_conjugate_builder(\ 829 conjugate_list[lorentz.name]) 830 for conjg_builder in conjg_builder_list: 831 # No duplication of conjugation: 832 assert conjg_builder_list.count(conjg_builder) == 1 833 self.compute_aloha(conjg_builder, lorentz.name) 834 if lorentz.name in self.multiple_lor: 835 for m in self.multiple_lor[lorentz.name]: 836 for outgoing in range(len(lorentz.spins)+1): 837 realname = conjg_builder.name + ''.join(['C%s' % pair for pair in conjg_builder.conjg]) 838 try: 839 self[(realname, outgoing)].add_combine(m) 840 except Exception,error: 841 self[(realname, self.symmetries[lorentz.name][outgoing])].add_combine(m) 842 843 if save: 844 self.save()
845
846 - def add_Lorentz_object(self, lorentzlist):
847 """add a series of Lorentz structure created dynamically""" 848 849 for lor in lorentzlist: 850 if not hasattr(self.model.lorentz, lor.name): 851 setattr(self.model.lorentz, lor.name, lor)
852
853 - def compute_subset(self, data):
854 """ create the requested ALOHA routine. 855 data should be a list of tuple (lorentz, tag, outgoing) 856 tag should be the list of special tag (like conjugation on pair) 857 to apply on the object """ 858 859 # Search identical particles in the vertices in order to avoid 860 #to compute identical contribution 861 self.look_for_symmetries() 862 # reorganize the data (in order to use optimization for a given lorentz 863 #structure 864 aloha.loop_mode = False 865 # self.explicit_combine = False 866 request = {} 867 868 for list_l_name, tag, outgoing in data: 869 #allow tag to have integer for retro-compatibility 870 all_tag = tag[:] 871 conjugate = [i for i in tag if isinstance(i, int)] 872 873 tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')] 874 tag = tag + ['C%s'%i for i in conjugate] 875 tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')] 876 877 conjugate = tuple([int(c[1:]) for c in tag if c.startswith('C')]) 878 loop = any((t.startswith('L') for t in tag)) 879 if loop: 880 aloha.loop_mode = True 881 self.explicit_combine = True 882 883 for l_name in list_l_name: 884 try: 885 request[l_name][conjugate].append((outgoing,tag)) 886 except Exception: 887 try: 888 request[l_name][conjugate] = [(outgoing,tag)] 889 except Exception: 890 request[l_name] = {conjugate: [(outgoing,tag)]} 891 892 # Loop on the structure to build exactly what is request 893 for l_name in request: 894 lorentz = eval('self.model.lorentz.%s' % l_name) 895 if lorentz.structure == 'external': 896 for tmp in request[l_name]: 897 for outgoing, tag in request[l_name][tmp]: 898 name = aloha_writers.get_routine_name(lorentz.name,outgoing=outgoing,tag=tag) 899 if name not in self.external_routines: 900 self.external_routines.append(name) 901 continue 902 903 builder = AbstractRoutineBuilder(lorentz, self.model) 904 905 906 for conjg in request[l_name]: 907 #ensure that routines are in rising order (for symetries) 908 def sorting(a,b): 909 if a[0] < b[0]: return -1 910 else: return 1
911 routines = request[l_name][conjg] 912 routines.sort(sorting) 913 if not conjg: 914 # No need to conjugate -> compute directly 915 self.compute_aloha(builder, routines=routines) 916 else: 917 # Define the high level conjugate routine 918 conjg_builder = builder.define_conjugate_builder(conjg) 919 # Compute routines 920 self.compute_aloha(conjg_builder, symmetry=lorentz.name, 921 routines=routines) 922 923 924 # Build mutiple lorentz call 925 for list_l_name, tag, outgoing in data: 926 if len(list_l_name) ==1: 927 continue 928 #allow tag to have integer for retrocompatibility 929 conjugate = [i for i in tag if isinstance(i, int)] 930 all_tag = tag[:] 931 tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')] 932 tag = tag + ['C%s'%i for i in conjugate] 933 tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')] 934 935 if not self.explicit_combine: 936 lorentzname = list_l_name[0] 937 lorentzname += ''.join(tag) 938 if self.has_key((lorentzname, outgoing)): 939 self[(lorentzname, outgoing)].add_combine(list_l_name[1:]) 940 else: 941 lorentz = eval('self.model.lorentz.%s' % list_l_name[0]) 942 assert lorentz.structure == 'external' 943 else: 944 l_lorentz = [] 945 for l_name in list_l_name: 946 l_lorentz.append(eval('self.model.lorentz.%s' % l_name)) 947 builder = CombineRoutineBuilder(l_lorentz) 948 949 for conjg in request[list_l_name[0]]: 950 #ensure that routines are in rising order (for symetries) 951 def sorting(a,b): 952 if a[0] < b[0]: return -1 953 else: return 1
954 routines = request[list_l_name[0]][conjg] 955 routines.sort(sorting) 956 if not conjg: 957 # No need to conjugate -> compute directly 958 self.compute_aloha(builder, routines=routines) 959 else: 960 # Define the high level conjugate routine 961 conjg_builder = builder.define_conjugate_builder(conjg) 962 # Compute routines 963 self.compute_aloha(conjg_builder, symmetry=lorentz.name, 964 routines=routines) 965 966 967
968 - def compute_aloha(self, builder, symmetry=None, routines=None, tag=[]):
969 """ define all the AbstractRoutine linked to a given lorentz structure 970 symmetry authorizes to use the symmetry of anoter lorentz structure. 971 routines to define only a subset of the routines.""" 972 973 name = builder.name 974 if not symmetry: 975 symmetry = name 976 if not routines: 977 if not tag: 978 tag = ['C%s' % i for i in builder.conjg] 979 else: 980 addon = ['C%s' % i for i in builder.conjg] 981 tag = [(i,addon +onetag) for i,onetag in tag] 982 routines = [ tuple([i,tag]) for i in range(len(builder.spins) + 1 )] 983 984 # Create the routines 985 for outgoing, tag in routines: 986 symmetric = self.has_symmetries(symmetry, outgoing, valid_output=routines) 987 realname = name + ''.join(tag) 988 if (realname, outgoing) in self: 989 continue # already computed 990 991 if symmetric: 992 self.get(realname, symmetric).add_symmetry(outgoing) 993 else: 994 wavefunction = builder.compute_routine(outgoing, tag) 995 #Store the information 996 self.set(realname, outgoing, wavefunction)
997 998
999 - def compute_aloha_without_kernel(self, builder, symmetry=None, routines=None):
1000 """define all the AbstractRoutine linked to a given lorentz structure 1001 symmetry authorizes to use the symmetry of anoter lorentz structure. 1002 routines to define only a subset of the routines. 1003 Compare to compute_aloha, each routines are computed independently. 1004 """ 1005 1006 name = builder.name 1007 if not routines: 1008 routines = [ tuple([i,[]]) for i in range(len(builder.spins) + 1 )] 1009 1010 for outgoing, tag in routines: 1011 builder.routine_kernel = None 1012 wavefunction = builder.compute_routine(outgoing, tag) 1013 self.set(name, outgoing, wavefunction)
1014 1015
1016 - def write(self, output_dir, language):
1017 """ write the full set of Helicity Routine in output_dir""" 1018 for abstract_routine in self.values(): 1019 abstract_routine.write(output_dir, language) 1020 1021 for routine in self.external_routines: 1022 self.locate_external(routine, language, output_dir)
1023 1024 # if aloha_lib.KERNEL.unknow_fct: 1025 # if language == 'Fortran': 1026 # logger.warning('''Some function present in the lorentz structure are not 1027 # recognized. A Template file has been created: 1028 # %s 1029 # Please edit this file to include the associated definition.''' % \ 1030 # pjoin(output_dir, 'additional_aloha_function.f') ) 1031 # else: 1032 # logger.warning('''Some function present in the lorentz structure are 1033 # not recognized. Please edit the code to add the defnition of such function.''') 1034 # logger.info('list of missing fct: %s .' % \ 1035 # ','.join([a[0] for a in aloha_lib.KERNEL.unknow_fct])) 1036 # 1037 # for fct_name, nb_arg in aloha_lib.KERNEL.unknow_fct: 1038 # if language == 'Fortran': 1039 # aloha_writers.write_template_fct(fct_name, nb_arg, output_dir) 1040 1041 1042 1043 #self.write_aloha_file_inc(output_dir) 1044
1045 - def locate_external(self, name, language, output_dir=None):
1046 """search a valid external file and copy it to output_dir directory""" 1047 1048 language_to_ext = {'Python': 'py', 1049 'Fortran' : 'f', 1050 'CPP': 'C'} 1051 ext = language_to_ext[language] 1052 paths = [os.path.join(self.model_pos, language), self.model_pos, 1053 os.path.join(root_path, 'aloha', 'template_files', )] 1054 1055 ext_files = [] 1056 for path in paths: 1057 ext_files = misc.glob('%s.%s' % (name, ext), path) 1058 if ext_files: 1059 break 1060 else: 1061 1062 raise ALOHAERROR, 'No external routine \"%s.%s\" in directories\n %s' % \ 1063 (name, ext, '\n'.join(paths)) 1064 1065 if output_dir: 1066 for filepath in ext_files: 1067 1068 files.cp(filepath, output_dir) 1069 return ext_files
1070 1071 1072
1073 - def look_for_symmetries(self):
1074 """Search some symmetries in the vertices. 1075 We search if some identical particles are in a vertices in order 1076 to avoid to compute symmetrical contributions""" 1077 1078 for vertex in self.model.all_vertices: 1079 for i, part1 in enumerate(vertex.particles): 1080 for j in range(i-1,-1,-1): 1081 part2 = vertex.particles[j] 1082 if part1.pdg_code == part2.pdg_code and part1.color == 1: 1083 if part1.spin == 2 and (i % 2 != j % 2 ): 1084 continue 1085 for lorentz in vertex.lorentz: 1086 if self.symmetries.has_key(lorentz.name): 1087 if self.symmetries[lorentz.name].has_key(i+1): 1088 self.symmetries[lorentz.name][i+1] = max(self.symmetries[lorentz.name][i+1], j+1) 1089 else: 1090 self.symmetries[lorentz.name][i+1] = j+1 1091 else: 1092 self.symmetries[lorentz.name] = {i+1:j+1} 1093 break
1094
1095 - def look_for_multiple_lorentz_interactions(self):
1096 """Search the interaction associate with more than one lorentz structure. 1097 If those lorentz structure have the same order and the same color then 1098 associate a multiple lorentz routines to ALOHA """ 1099 1100 orders = {} 1101 for coup in self.model.all_couplings: 1102 orders[coup.name] = str(coup.order) 1103 1104 for vertex in self.model.all_vertices: 1105 if len(vertex.lorentz) == 1: 1106 continue 1107 #remove ghost 1108 #if -1 in vertex.lorentz[0].spins: 1109 # continue 1110 1111 # assign each order/color to a set of lorentz routine 1112 combine = {} 1113 for (id_col, id_lor), coups in vertex.couplings.items(): 1114 if not isinstance(coups, list): 1115 coups = [coups] 1116 for coup in coups: 1117 order = orders[coup.name] 1118 key = (id_col, order) 1119 if key in combine: 1120 combine[key].append(id_lor) 1121 else: 1122 combine[key] = [id_lor] 1123 1124 # Check if more than one routine are associated 1125 for list_lor in combine.values(): 1126 if len(list_lor) == 1: 1127 continue 1128 list_lor.sort() 1129 main = vertex.lorentz[list_lor[0]].name 1130 if main not in self.multiple_lor: 1131 self.multiple_lor[main] = [] 1132 1133 info = tuple([vertex.lorentz[id].name for id in list_lor[1:]]) 1134 if info not in self.multiple_lor[main]: 1135 self.multiple_lor[main].append(info)
1136 1137
1138 - def has_symmetries(self, l_name, outgoing, out=None, valid_output=None):
1139 """ This returns out if no symmetries are available, otherwise it finds 1140 the lowest equivalent outgoing by recursivally calling this function. 1141 auth is a list of authorize output, if define""" 1142 1143 try: 1144 equiv = self.symmetries[l_name][outgoing] 1145 except Exception: 1146 return out 1147 else: 1148 if not valid_output or equiv in valid_output: 1149 return self.has_symmetries(l_name, equiv, out=equiv, 1150 valid_output=valid_output) 1151 else: 1152 return self.has_symmetries(l_name, equiv, out=out, 1153 valid_output=valid_output)
1154
1155 - def look_for_conjugate(self):
1156 """ create a list for the routine needing to be conjugate """ 1157 1158 # Check if they are majorana in the model. 1159 need = False 1160 for particle in self.model.all_particles: 1161 if particle.spin == 2 and particle.selfconjugate: 1162 need = True 1163 break 1164 1165 if not need: 1166 for interaction in self.model.all_vertices: 1167 fermions = [p for p in interaction.particles if p.spin == 2] 1168 for i in range(0, len(fermions), 2): 1169 if fermions[i].pdg_code * fermions[i+1].pdg_code > 0: 1170 # This is a fermion flow violating interaction 1171 need = True 1172 break 1173 1174 # No majorana particles 1175 if not need: 1176 return {} 1177 1178 conjugate_request = {} 1179 # Check each vertex if they are fermion and/or majorana 1180 for vertex in self.model.all_vertices: 1181 for i in range(0, len(vertex.particles), 2): 1182 part1 = vertex.particles[i] 1183 if part1.spin !=2: 1184 # deal only with fermion 1185 break 1186 # check if this pair contains a majorana 1187 if part1.selfconjugate: 1188 continue 1189 part2 = vertex.particles[i + 1] 1190 if part2.selfconjugate: 1191 continue 1192 1193 # No majorana => add the associate lorentz structure 1194 for lorentz in vertex.lorentz: 1195 try: 1196 conjugate_request[lorentz.name].add(i//2+1) 1197 except Exception: 1198 conjugate_request[lorentz.name] = set([i//2+1]) 1199 1200 for elem in conjugate_request: 1201 conjugate_request[elem] = list(conjugate_request[elem]) 1202 1203 return conjugate_request
1204
1205 1206 1207 -def write_aloha_file_inc(aloha_dir,file_ext, comp_ext):
1208 """find the list of Helicity routine in the directory and create a list 1209 of those files (but with compile extension)""" 1210 1211 aloha_files = [] 1212 1213 # Identify the valid files 1214 alohafile_pattern = re.compile(r'''_\d%s''' % file_ext) 1215 for filename in os.listdir(aloha_dir): 1216 if os.path.isfile(os.path.join(aloha_dir, filename)): 1217 if alohafile_pattern.search(filename): 1218 aloha_files.append(filename.replace(file_ext, comp_ext)) 1219 1220 if os.path.exists(pjoin(aloha_dir, 'additional_aloha_function.f')): 1221 aloha_files.append('additional_aloha_function.o') 1222 1223 text="ALOHARoutine = " 1224 text += ' '.join(aloha_files) 1225 text +='\n' 1226 1227 1228 file(os.path.join(aloha_dir, 'aloha_file.inc'), 'w').write(text)
1229
1230 1231 1232 -def create_prop_library(tag, lib={}):
1233 1234 def create(obj): 1235 """ """ 1236 obj= obj.simplify() 1237 obj = obj.expand() 1238 obj = obj.simplify() 1239 return obj
1240 1241 # avoid to add tag in global 1242 old_tag = set(aloha_lib.KERNEL.use_tag) 1243 name, i = tag 1244 if name == "Spin2Prop": 1245 lib[('Spin2Prop',i)] = create( Spin2Propagator(_spin2_mult + i, \ 1246 2 * _spin2_mult + i,'I2','I3', i) ) 1247 elif name == "Spin2PropMassless": 1248 lib[('Spin2PropMassless',i)] = create( Spin2masslessPropagator( 1249 _spin2_mult + i, 2 * _spin2_mult + i,'I2','I3')) 1250 1251 aloha_lib.KERNEL.use_tag = old_tag 1252 return lib 1253 1254 1255 if '__main__' == __name__: 1256 logging.basicConfig(level=0) 1257 #create_library() 1258 import profile 1259 #model 1260 1261 start = time.time()
1262 - def main():
1263 alohagenerator = AbstractALOHAModel('sm') 1264 alohagenerator.compute_all(save=False) 1265 return alohagenerator
1266 - def write(alohagenerator):
1267 alohagenerator.write('/tmp/', 'Python')
1268 alohagenerator = main() 1269 logger.info('done in %s s' % (time.time()-start)) 1270 write(alohagenerator) 1271 #profile.run('main()') 1272 #profile.run('write(alohagenerator)') 1273 stop = time.time() 1274 logger.info('done in %s s' % (stop-start)) 1275