1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """ Set of Tool in order to modify a given UFO model.
16 (mainly by adding-suppressing interactions and allow to modify by text the
17 different part of the model. Check of consistency of the model are performed.
18 This produce a new valid UFO model in output.
19 """
20 import copy
21 import glob
22 import logging
23 import os
24 import re
25 import sys
26
27 import madgraph.core.base_objects as base_objects
28 import madgraph.iolibs.files as files
29 import madgraph.various.misc as misc
30 import models as ufomodels
31 import models.import_ufo as import_ufo
32 import models.check_param_card as check_param_card
33 from madgraph import MG5DIR
34
35 pjoin =os.path.join
36 logger = logging.getLogger('madgraph.model')
37
39
40
42
43 text = obj.__repr__()
44 if text.startswith('_'):
45 text = '%s%s' % (str(obj.__class__.__name__)[0].upper(), text)
46 return text
47
49 """ The class storing the current status of the model """
50
51 - def __init__(self, modelpath, addon='__1'):
52 """load the model from a valid UFO directory (otherwise keep everything
53 as empty."""
54 self.modelpath = modelpath
55 model = ufomodels.load_model(modelpath)
56
57 if not hasattr(model, 'all_orders'):
58 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention (no couplings_order information)\n' +\
59 'MG5 is able to load such model but NOT to the add model feature.'
60 if isinstance(model.all_particles[0].mass, basestring):
61 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention (Mass/Width of particles are string name, not object)\n' +\
62 'MG5 is able to load such model but NOT to the add model feature.'
63
64 old_particles = [id(p) for p in model.all_particles]
65 self.particles = [copy.copy(p) for p in model.all_particles]
66 if any(hasattr(p, 'loop_particles') for p in self.particles):
67 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention '
68 self.vertices = list(model.all_vertices)
69
70 for v in self.vertices:
71 new_p = []
72 for p in v.particles:
73 try:
74 new_p.append(self.particles[old_particles.index(id(p))])
75 except:
76 p3 = [p2 for p2 in self.particles if p2.name == p.name and p2.pdg_code == p.pdg_code]
77 new_p.append(p3[0])
78 v.particles = new_p
79
80 self.couplings = list(model.all_couplings)
81 self.lorentz = list(model.all_lorentz)
82 self.parameters = list(model.all_parameters)
83 self.Parameter = self.parameters[0].__class__
84 self.orders = list(model.all_orders)
85
86 self.functions = list(model.all_functions)
87 self.new_external = []
88
89 if hasattr(model, 'all_propagators'):
90 self.propagators = list(model.all_propagators)
91 else:
92 self.propagators = []
93
94
95 if hasattr(model, 'all_CTvertices'):
96 self.CTvertices = list(model.all_CTvertices)
97 else:
98 self.CTvertices = []
99
100 if hasattr(model, 'all_CTparameters'):
101 self.CTparameters = list(model.all_CTparameters)
102 else:
103 self.CTparameters = []
104
105
106
107 if 'self.expr = expression' in open(pjoin(self.modelpath, 'object_library.py')).read():
108 self.translate = {'expr': 'expression'}
109 else:
110 self.translate = {}
111
112
113 self.old_new = {}
114 self.addon = addon
115
116
117 self.particle_dict = {}
118 for particle in self.particles:
119 self.particle_dict[particle.pdg_code] = particle
120
121
122 self.all_path = [self.modelpath]
123
124 - def write(self, outputdir):
145
146
147 - def mod_file(self, inputpath, outputpath):
148
149 fsock = open(outputpath, 'w')
150
151 to_change = {}
152 to_change.update(self.translate)
153 to_change.update(self.old_new)
154
155 pattern = re.compile(r'\b(%s)\b' % ('|'.join(to_change)))
156
157
158
159 all_particles_name = [self.format_param(P)[2:] for P in self.particles]
160 all_lower = [p.lower() for p in all_particles_name]
161 pat2 = re.compile(r'\bP\.(\w+)\b')
162
163
164 for line in open(inputpath):
165 line = pattern.sub(lambda mo: to_change[mo.group()], line)
166 part_in_line = set(pat2.findall(line))
167
168
169 to_replace = {}
170 for p in part_in_line:
171 if p in all_particles_name:
172 continue
173 else:
174 ind = all_lower.index(p.lower())
175 to_replace[p] = all_particles_name[ind]
176 if to_replace:
177 pat3 = re.compile(r'\bP\.(%s)\b' % '|'.join(p for p in to_replace))
178 line = pat3.sub(lambda mo: 'P.%s'%to_replace[mo.groups(0)[0]], line)
179 fsock.write(line)
180
181
183 """ propagate model restriction of the original model. """
184
185 restrict_list = [l for l in os.listdir(self.modelpath) if l.startswith('restrict_')]
186 if not self.new_external:
187
188 for p in restrict_list:
189 files.cp(pjoin(self.modelpath, p), outputdir)
190
191 else:
192
193 for p in restrict_list:
194 param_card = check_param_card.ParamCard(pjoin(self.modelpath, p))
195 for parameter in self.new_external:
196 block = parameter.lhablock
197 lhaid = parameter.lhacode
198 value = parameter.value
199 if value == 0:
200 value = 1e-99
201 elif value == 1:
202 value = 9.999999e-1
203 try:
204 param_card.add_param(block.lower(), lhaid, value, 'from addon')
205 except check_param_card.InvalidParamCard:
206 logger.warning("%s will not acting for %s %s" % (p, block, lhaid))
207 param_card[block.lower()].get(lhaid).value = value
208
209 param_card.write(pjoin(outputdir, p), precision=7)
210
243
244
245
246 - def create_data_text(self, obj):
247 """ create the data associate to the object"""
248
249
250
251 nb_space = 0
252 if hasattr(obj, 'require_args_all'):
253 args = obj.require_args_all
254 elif hasattr(obj, 'require_args'):
255 args = obj.require_args
256 else:
257 args = []
258 if args:
259 text = """%s = %s(""" % (repr(obj), obj.__class__.__name__)
260 else:
261 text = """%s = %s(""" % (obj.name, obj.__class__.__name__)
262
263
264 for data in args:
265 if data in self.translate:
266 data = self.translate[data]
267 if not nb_space:
268 add_space = len(text)
269 else:
270 add_space = 0
271
272 if ',' in data:
273 continue
274
275 try:
276 expr = getattr(obj, data)
277 except:
278 if data in ['counterterm', 'propagator', 'loop_particles']:
279 expr = None
280 setattr(obj, data, None)
281 else:
282 raise
283 name =str(data)
284 if name in self.translate:
285 name = self.translate[name]
286
287
288 text += '%s%s = %s,\n' % (' ' * nb_space,name, self.format_param(getattr(obj, data)))
289 nb_space += add_space
290
291 if hasattr(obj, 'get_all'):
292 other_attr = [name for name in obj.get_all().keys()
293 if name not in args]
294 else:
295 other_attr = obj.__dict__.keys()
296
297 if str(obj.__class__.__name__) == 'CTParameter' and 'nature' in other_attr:
298 logger.critical('UFO model is outdated (including some bugs). Please update object_library.py to latest version')
299 other_attr.remove('nature')
300
301 other_attr.sort()
302 if other_attr == ['GhostNumber', 'LeptonNumber', 'Y', 'partial_widths', 'selfconjugate']:
303 other_attr=['GhostNumber', 'LeptonNumber', 'Y','selfconjugate']
304
305 for data in other_attr:
306 name =str(data)
307 if name in ['partial_widths', 'loop_particles']:
308 continue
309 if name in self.translate:
310 name = self.translate[name]
311 if not nb_space:
312 add_space = len(text)
313 else:
314 add_space = 0
315 text += '%s%s = %s,\n' % (' ' * nb_space, name, self.format_param(getattr(obj, data)))
316 nb_space += add_space
317
318 text = text[:-2] + ')\n\n'
319
320
321 return text
322
323 - def create_file_content(self, datalist):
324 """ """
325 return '\n'.join([self.create_data_text(obj) for obj in datalist])
326
327
328 - def write_particles(self, outputdir):
329 """ """
330 text = """
331 # This file was automatically created by The UFO_usermod
332
333 from __future__ import division
334 from object_library import all_particles, Particle
335 import parameters as Param
336
337 """
338 text += self.create_file_content(self.particles)
339 ff = open(os.path.join(outputdir, 'particles.py'), 'w')
340 ff.writelines(text)
341 ff.close()
342 return
343
345 """ """
346 text = """
347 # This file was automatically created by The UFO_usermod
348
349 from object_library import all_vertices, Vertex
350 import particles as P
351 import couplings as C
352 import lorentz as L
353
354 """
355 text += self.create_file_content(self.vertices)
356 ff = open(os.path.join(outputdir, 'vertices.py'), 'w')
357 ff.writelines(text)
358 ff.close()
359 return
360
362 """ """
363
364 if not self.CTvertices:
365 return
366
367 text = """
368 # This file was automatically created by The UFO_usermod
369
370 from object_library import all_vertices, all_CTvertices, Vertex, CTVertex
371 import particles as P
372 import couplings as C
373 import lorentz as L
374
375 """
376 text += self.create_file_content(self.CTvertices)
377 ff = open(os.path.join(outputdir, 'CT_vertices.py'), 'w')
378 ff.writelines(text)
379 ff.close()
380 return
381
382
384 """ """
385 text = """
386 # This file was automatically created by The UFO_usermod
387
388 from object_library import all_couplings, Coupling
389 """
390 text += self.create_file_content(self.couplings)
391 ff = open(os.path.join(outputdir, 'couplings.py'), 'w')
392 ff.writelines(text)
393 ff.close()
394 return
395
397 """ """
398 text = """
399 # This file was automatically created by The UFO_usermod
400
401 from object_library import all_lorentz, Lorentz
402 """
403
404 text += self.create_file_content(self.lorentz)
405 ff = open(os.path.join(outputdir, 'lorentz.py'), 'w')
406 ff.writelines(text)
407 ff.close()
408 return
409
411 """ """
412 text = """
413 # This file was automatically created by The UFO_usermod
414
415 from object_library import all_parameters, Parameter
416 """
417
418 text += self.create_file_content(self.parameters)
419 ff = open(os.path.join(outputdir, 'parameters.py'), 'w')
420 ff.writelines(text)
421 ff.close()
422 return
423
425 """ """
426 if not self.CTparameters:
427 return
428
429 text = """
430 # This file was automatically created by The UFO_usermod
431
432 from object_library import all_CTparameters, CTParameter
433
434 from function_library import complexconjugate, re, im, csc, sec, acsc, asec, cot
435 """
436
437 text += self.create_file_content(self.CTparameters)
438 ff = open(os.path.join(outputdir, 'CT_parameters.py'), 'w')
439 ff.writelines(text)
440 ff.close()
441 return
442
443
445 """ """
446 text = """
447 # This file was automatically created by The UFO_usermod
448
449 from object_library import all_orders, CouplingOrder
450 """
451
452 text += self.create_file_content(self.orders)
453 ff = open(os.path.join(outputdir, 'coupling_orders.py'), 'w')
454 ff.writelines(text)
455 ff.close()
456 return
457
459 """ """
460 text = """
461 # This file was automatically created by The UFO_usermod
462
463 import cmath
464 from object_library import all_functions, Function
465
466 """
467
468 text += self.create_file_content(self.functions)
469 ff = open(os.path.join(outputdir, 'function_library.py'), 'w')
470 ff.writelines(text)
471 ff.close()
472 return
473
475 """ """
476
477 text = """
478 # This file was automatically created by The UFO_usermod
479 from object_library import all_propagators, Propagator
480 """
481
482 text += self.create_file_content(self.propagators)
483 ff = open(os.path.join(outputdir, 'propagators.py'), 'w')
484 ff.writelines(text)
485 ff.close()
486 return
487
489 """Copy/merge the routines written in Fortran/C++/pyhton"""
490
491
492 re_fct = re.compile('''^\s{7,70}[\w\s]*function (\w*)\(''',re.M+re.I)
493 present_fct = set()
494 for dirpath in self.all_path:
495 if os.path.exists(pjoin(dirpath, 'Fortran', 'functions.f')):
496 text = open(pjoin(dirpath, 'Fortran', 'functions.f')).read()
497 new_fct = re_fct.findall(text)
498 nb_old = len(present_fct)
499 nb_added = len(new_fct)
500 new_fct = set([f.lower() for f in new_fct])
501 present_fct.update(new_fct)
502 if len(present_fct) < nb_old + nb_added:
503 logger.critical('''Some Functions in functions.f are define in more than one model.
504 This require AT LEAST manual modification of the resulting file. But more likely the
505 model need to be consider as un-physical! Use it very carefully.''')
506
507 if not os.path.exists(pjoin(outputdir, 'Fortran')):
508 os.mkdir(pjoin(outputdir, 'Fortran'))
509 fsock = open(pjoin(outputdir, 'Fortran','functions.f'),'a')
510 fsock.write(text)
511 fsock.close()
512
513
514
515 for dirpath in self.all_path:
516 for subdir in ['Fortran', 'CPP', 'Python']:
517 if os.path.exists(pjoin(dirpath, subdir)):
518 for filepath in os.listdir(pjoin(dirpath, subdir)):
519 if filepath == 'functions.f':
520 continue
521 if '.' not in filepath:
522 continue
523 logger.warning('Manual HELAS routine associated to the model. Those are not modified automaticaly!! So you need to manually checked them')
524 nb = 0
525 name, extension = filepath.rsplit('.', 1)
526
527 while 1:
528 filename = '%s%s%s' %(name, '.moved' * nb, extension)
529 if os.path.exists(pjoin(outputdir, subdir, filename)):
530 nb+=1
531 else:
532 break
533 if not os.path.exists(pjoin(outputdir, subdir)):
534 os.mkdir(pjoin(outputdir, subdir))
535 files.cp(pjoin(dirpath, subdir, filepath), pjoin(outputdir, subdir, filename))
536
537 - def get_particle(self, name):
538 """ """
539 for part in self.particles:
540 if part.name == name:
541 return part
542
543 raise USRMODERROR, 'no particle %s in the model' % name
544
552
553 - def add_particle(self, particle, identify=None):
554 """Add a particle in a consistent way"""
555
556 name = particle.name
557 if identify:
558 name = identify
559 old_part = next((p for p in self.particles if p.name==name), None)
560 if not old_part:
561 first = True
562 for p in self.particles:
563 if p.name.lower() == name.lower():
564 if not first:
565 raise Exception
566 else:
567 first =False
568 old_part = p
569
570
571
572 if old_part:
573
574 if old_part.pdg_code == particle.pdg_code:
575 particle.replace = old_part
576 return self.check_mass_width_of_particle(old_part, particle)
577 elif identify:
578 if particle.spin != old_part.spin:
579 raise USRMODERROR, "identify particles should have the same spin"
580 elif particle.color != old_part.color:
581 raise USRMODERROR, "identify particles should have the same color"
582 particle.replace = old_part
583 return self.check_mass_width_of_particle(old_part, particle)
584 else:
585 logger.warning('The particle name \'%s\' is present in both model with different pdg code' % name)
586 logger.warning('The particle coming from the plug-in model will be rename to \'%s%s\'' % (name, self.addon))
587 particle.name = '%s%s' % (name, self.addon)
588 self.particles.append(particle)
589 return
590 elif identify:
591 raise USRMODERROR, "Particle %s is not in the model" % identify
592
593 pdg = particle.pdg_code
594 if pdg in self.particle_dict:
595 particle.replace = self.particle_dict[pdg]
596 return self.check_mass_width_of_particle(self.particle_dict[pdg], particle)
597 else:
598 if hasattr(particle, 'replace'):
599 del particle.replace
600 self.particles.append(particle)
601
602
603 - def check_mass_width_of_particle(self, p_base, p_plugin):
604
605 if p_base.mass.name != p_plugin.mass.name:
606
607 if p_plugin.mass.name in self.old_new:
608 if self.old_new[p_plugin.mass.name] != p_base.mass.name:
609 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model: equivalent of %s is %s != %s ' % ( p_plugin.mass.name, self.old_new[p_plugin.mass.name], p_base.mass.name)
610 elif p_base.mass.name.lower() == 'zero':
611 p_base.mass = p_plugin.mass
612 elif p_plugin.mass.name.lower() == 'zero':
613 pass
614 else:
615 misc.sprint(p_base.mass.value, p_plugin.mass.value, dir(p_base.mass))
616 misc.sprint(p_base.mass.nature, p_plugin.mass.nature)
617 misc.sprint(self.old_new)
618 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model\n' + \
619 ' Mass: %s and %s\n' %(p_base.mass.name, p_plugin.mass.name) + \
620 ' conflict name %s\n' % self.old_new + \
621 ' pdg_code: %s %s' % (p_base.pdg_code, p_plugin.pdg_code)
622
623 if p_base.width.name != p_plugin.width.name:
624
625 if p_plugin.width.name in self.old_new:
626 if self.old_new[p_plugin.width.name] != p_base.width.name:
627 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model'
628 elif p_base.width.name.lower() == 'zero':
629 p_base.width = p_plugin.width
630 elif p_plugin.width.name.lower() == 'zero':
631 pass
632 else:
633 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model'
634
635 return
636
638 """adding a param_card parameter inside the current model.
639 if the parameter block/lhcode already exists then just do nothing
640 (but if the name are different then keep the info for future translation)
641 If the name already exists in the model. raise an exception.
642 """
643
644 name = parameter.name
645
646 old_param = next((p for p in self.parameters if p.name==name), None)
647 if old_param:
648 if old_param.lhablock == parameter.lhablock and \
649 old_param.lhacode == parameter.lhacode:
650 return
651 else:
652 logger.info('The two model defines the parameter \'%s\'\n' % parameter.name +
653 ' the original model for %s :%s\n' %(old_param.lhablock, old_param.lhacode)+
654 ' the plugin for %s :%s\n' %(parameter.lhablock,parameter.lhacode)+
655 ' We will rename the one from the plugin to %s%s' % (parameter.name, self.addon))
656 if old_param.nature == 'internal':
657 logger.warning('''The parameter %s is actually an internal parameter of the base model.
658 his value is given by %s.
659 If those two parameters are expected to be identical, you need to provide the value in the param_card according to this formula.
660 ''')
661
662 self.old_new[parameter.name] = '%s%s' % (parameter.name, self.addon)
663 parameter.name = '%s%s' % (parameter.name, self.addon)
664
665
666
667
668 lhacode = parameter.lhacode
669 if parameter.lhablock.lower() in ['mass', 'decay']:
670 if int(parameter.lhacode[0]) in identify_pid:
671 lhacode = [identify_pid[int(parameter.lhacode[0])]]
672
673 old_param = next((p for p in self.parameters if p.lhacode==lhacode \
674 and p.lhablock==parameter.lhablock), None)
675 if old_param:
676 logger.info('The two model defines the block \'%s\' with id \'%s\' with different parameter name \'%s\', \'%s\'\n'\
677 % (old_param.lhablock, old_param.lhacode, parameter.name, old_param.name) + \
678 ' We will merge those two parameters in a single one')
679 if parameter.name in self.old_new.values():
680 key = [k for k in self.old_new if self.old_new[k] == parameter.name][0]
681 self.old_new[key] = old_param.name
682 self.old_new[parameter.name] = old_param.name
683 else:
684 self.old_new[parameter.name] = old_param.name
685
686
687 elif parameter.lhablock.lower() in ['mass', 'decay'] and int(parameter.lhacode[0]) in identify_pid:
688
689
690 orig_particle = self.particle_dict[lhacode[0]]
691 if parameter.lhablock.lower() == 'mass':
692 old_param = orig_particle.mass
693 else:
694 old_param = orig_particle.width
695 if old_param.name.lower() == 'zero':
696
697 self.parameters.append(parameter)
698 self.new_external.append(parameter)
699 else:
700 logger.info('The two model defines the parameter for block \'%s\' with id \'%s\' with different parameter name \'%s\', \'%s\'\n'\
701 % (parameter.lhablock.lower(), lhacode[0], parameter.name, old_param.name) + \
702 ' We will merge those two parameters in a single one')
703 if parameter.name in self.old_new.values():
704 key = [k for k in self.old_new if self.old_new[k] == parameter.name][0]
705 self.old_new[key] = old_param.name
706 self.old_new[parameter.name] = old_param.name
707 else:
708 self.old_new[parameter.name] = old_param.name
709
710 else:
711
712 self.parameters.append(parameter)
713 self.new_external.append(parameter)
714
716 """ add a parameter of type internal """
717
718 name = parameter.name
719
720 old_param = next((p for p in self.parameters if p.name==name), None)
721 if old_param:
722 if old_param.value == parameter.value:
723 return
724 else:
725 if self.old_new:
726 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
727 def replace(matchobj):
728 return self.old_new[matchobj.group(0)]
729 parameter.value = pattern.sub(replace, parameter.value)
730 self.old_new[parameter.name] = '%s%s' % (parameter.name, self.addon)
731
732 parameter.name = '%s%s' % (parameter.name, self.addon)
733 self.parameters.append(parameter)
734 return
735
736
737 if self.old_new:
738 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
739 def replace(matchobj):
740 return self.old_new[matchobj.group(0)]
741 parameter.value = pattern.sub(replace, parameter.value)
742
743 self.parameters.append(parameter)
744
745
746
747
749 """add one coupling"""
750
751
752 name = coupling.name
753 same_name = next((p for p in self.couplings if p.name==name), None)
754 if same_name:
755 coupling.name = '%s%s' % (coupling.name, self.addon)
756
757 if self.old_new:
758 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
759 def replace(matchobj):
760 return self.old_new[matchobj.group(0)]
761 coupling.value = pattern.sub(replace, coupling.value)
762
763 old_coupling = next((p for p in self.couplings if p.value==coupling.value), None)
764
765 if old_coupling:
766 coupling.replace = old_coupling
767 else:
768 self.couplings.append(coupling)
769
771 """adding a new coupling order inside the model"""
772
773 name = coupling_order.name
774 same_name = next((p for p in self.orders if p.name==name), None)
775 if same_name:
776 if coupling_order.hierarchy != same_name.hierarchy:
777 logger.warning('%s has different hierarchy use the minimal value (%s, %s) => %s' \
778 % (name, same_name.hierarchy, coupling_order.hierarchy,
779 min(same_name.hierarchy, coupling_order.hierarchy)))
780 same_name.hierarchy = min(same_name.hierarchy, coupling_order.hierarchy)
781 if coupling_order.expansion_order != same_name.expansion_order:
782 logger.warning('%s has different expansion_order use the minimal value (%s, %s) => %s' \
783 % (name, coupling_order.expansion_order, same_name.expansion_order,
784 min(same_name.expansion_order, coupling_order.expansion_order)))
785 same_name.expansion_order = min(same_name.expansion_order, coupling_order.expansion_order)
786 if hasattr(same_name, 'perturbative_expansion') and same_name.perturbative_expansion:
787 logger.info('%s will be forbidden to run at NLO' % same_name.name)
788 same_name.perturbative_expansion = 0
789
790
791 else:
792 self.orders.append(coupling_order)
793
795 """add one coupling"""
796
797
798 name = lorentz.name
799 same_name = next((p for p in self.lorentz if p.name==name), None)
800 if same_name:
801 lorentz.name = '%s%s' % (lorentz.name, self.addon)
802
803 if self.old_new:
804 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
805 def replace(matchobj):
806 return self.old_new[matchobj.group(0)]
807 lorentz.structure = pattern.sub(replace, lorentz.structure)
808
809 old_lor = next((p for p in self.lorentz
810 if p.structure==lorentz.structure and p.spins == lorentz.spins),
811 None)
812
813 if old_lor:
814 lorentz.replace = old_lor
815 else:
816 self.lorentz.append(lorentz)
817
819 """Add one interaction to the model. This is UNCONDITIONAL!
820 if the same interaction is in the model this means that the interaction
821 will appear twice. This is now weaken if both interaction are exactly identical!
822 (EXACT same color/lorentz/coupling expression)
823 """
824
825 interaction = interaction.__class__(**interaction.__dict__)
826 model.all_vertices.pop(-1)
827
828
829 name = interaction.name
830 same_name = next((p for p in self.vertices if p.name==name), None)
831 if same_name:
832 interaction.name = '%s%s' % (interaction.name, self.addon)
833
834
835 particles = [p.replace if hasattr(p, 'replace') else p for p in interaction.particles]
836 interaction.particles = particles
837
838 lorentz = [l.replace if hasattr(l, 'replace') else l for l in interaction.lorentz]
839 interaction.lorentz = lorentz
840
841
842 couplings = [(key, c.replace) if hasattr(c, 'replace') else (key, c)
843 for key, c in interaction.couplings.items()]
844 interaction.couplings = dict(couplings)
845
846
847
848
849 get_pdg = lambda vertex: sorted([p.pdg_code for p in vertex.particles])
850 id_part = get_pdg(interaction)
851 iden_vertex = [v for v in self.vertices if get_pdg(v) == id_part]
852 iden = False
853 nb_coupling = len(interaction.couplings)
854 keys = interaction.couplings.keys()
855
856 get_lor_and_color = lambda i: (interaction.lorentz[keys[i][1]].structure,
857 interaction.color[keys[i][0]])
858 for v in iden_vertex:
859 if len(v.couplings) != nb_coupling:
860 continue
861 found = []
862 for ((i,j), coup) in v.couplings.items():
863 new_lorentz = v.lorentz[j].structure
864 new_color = v.color[i]
865 k=0
866 same = [k for k in range(nb_coupling) if k not in found and
867 get_lor_and_color(k) == (new_lorentz, new_color)]
868 if not same:
869 break
870 else:
871 for k in same:
872 if interaction.couplings[keys[k]] == coup:
873 found.append(k)
874 break
875 else:
876
877 for k in same:
878 if interaction.couplings[keys[k]].order == coup.order:
879 found.append(k)
880 warning = """Did NOT add interaction %s since same particles/lorentz/color/coupling order
881 BUT did not manage to ensure that the coupling is the same. couplings expression:
882 base model: %s
883 addon model: %s
884 """ % (id_part, coup.value, interaction.couplings[keys[k]].value)
885 logger.warning(warning)
886 found.append(k)
887 break
888 else:
889 pass
890
891 else:
892
893 return
894
895 logger.info('Adding interaction for the following particles: %s' % id_part)
896
897
898
899
900 self.vertices.append(interaction)
901
903 """Add one interaction to the model. This is UNCONDITIONAL!
904 if the same interaction is in the model this means that the interaction
905 will appear twice."""
906
907
908 name = interaction.name
909 same_name = next((p for p in self.vertices if p.name==name), None)
910 if same_name:
911 interaction.name = '%s%s' % (interaction.name, self.addon)
912
913
914 particles = [p.replace if hasattr(p, 'replace') else p for p in interaction.particles]
915 interaction.particles = particles
916
917
918 lorentz = [l.replace if hasattr(l, 'replace') else l for l in interaction.lorentz]
919 interaction.lorentz = lorentz
920
921
922 couplings = [(key, c.replace) if hasattr(c, 'replace') else (key, c)
923 for key, c in interaction.couplings.items()]
924 interaction.couplings = dict(couplings)
925
926
927
928 loop_particles=[ [p.replace if hasattr(p, 'replace') else p for p in plist]
929 for plist in interaction.loop_particles]
930 interaction.loop_particles = loop_particles
931 self.CTvertices.append(interaction)
932
933
934 - def add_model(self, model=None, path=None, identify_particles=None):
935 """add another model in the current one"""
936
937
938 self.new_external = []
939 if path:
940 model = ufomodels.load_model(path)
941
942 if not model:
943 raise USRMODERROR, 'Need a valid Model'
944 else:
945 path = model.__path__[0]
946
947 if not hasattr(model, 'all_orders'):
948 raise USRMODERROR, 'Add-on Model doesn\'t follows UFO convention (no couplings_order information)\n' +\
949 'MG5 is able to load such model but NOT to the add model feature.'
950 if isinstance(model.all_particles[0].mass, basestring):
951 raise USRMODERROR, 'Add-on Model doesn\'t follows UFO convention (Mass/Width of particles are string name, not object)\n' +\
952 'MG5 is able to load such model but NOT to the add model feature.'
953
954 for order in model.all_orders:
955 if hasattr(order, 'perturbative_expansion') and order.perturbative_expansion:
956 raise USRMODERROR, 'Add-on model can not be loop model.'
957
958 for order in model.all_orders:
959 self.add_coupling_order(order)
960
961
962
963 identify_pid = {}
964 if identify_particles:
965 for new, old in identify_particles.items():
966 new_part = next((p for p in model.all_particles if p.name==new), None)
967 old_part = next((p for p in self.particles if p.name==old), None)
968
969 if not new_part:
970 first = True
971 for p in model.all_particles:
972 if p.name.lower() == new.lower():
973 if not first:
974 raise Exception
975 else:
976 first =False
977 new_part = p
978 if not old_part:
979 first = True
980 for p in self.particles:
981 if p.name.lower() == old.lower():
982 if not first:
983 raise Exception
984 else:
985 first =False
986 old_part = p
987 if not old_part:
988
989
990 defaultname = base_objects.Model.load_default_name()
991 for pdg, value in defaultname.items():
992 if value == old:
993 old_part = self.particle_dict[pdg]
994 identify_particles[new] = old_part.name
995 break
996
997
998 identify_pid[new_part.pdg_code] = old_part.pdg_code
999 if new_part is None:
1000 raise USRMODERROR, "particle %s not in added model" % new
1001 if old_part is None:
1002 raise USRMODERROR, "particle %s not in original model" % old
1003 if new_part.antiname not in identify_particles:
1004 new_anti = new_part.antiname
1005 old_anti = old_part.antiname
1006 if old_anti == old:
1007 raise USRMODERROR, "failed identification (one particle is self-conjugate and not the other)"
1008 logger.info("adding identification for anti-particle: %s=%s" % (new_anti, old_anti))
1009 identify_particles[new_anti] = old_anti
1010
1011 for parameter in model.all_parameters:
1012 self.add_parameter(parameter, identify_pid)
1013 for coupling in model.all_couplings:
1014 self.add_coupling(coupling)
1015 for lorentz in model.all_lorentz:
1016 self.add_lorentz(lorentz)
1017 for particle in model.all_particles:
1018 if particle.name in identify_particles:
1019 self.add_particle(particle, identify=identify_particles[particle.name])
1020 else:
1021 self.add_particle(particle)
1022 for vertex in model.all_vertices:
1023 self.add_interaction(vertex, model)
1024
1025 self.all_path.append(path)
1026
1027
1028 return
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064