1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """ Set of Tool in order to modify a given UFO model.
16 (mainly by adding-suppressing interactions and allow to modify by text the
17 different part of the model. Check of consistency of the model are performed.
18 This produce a new valid UFO model in output.
19 """
20 import copy
21 import glob
22 import logging
23 import os
24 import re
25 import sys
26
27 import madgraph.core.base_objects as base_objects
28 import madgraph.iolibs.files as files
29 import madgraph.various.misc as misc
30 import models as ufomodels
31 import models.import_ufo as import_ufo
32 import models.check_param_card as check_param_card
33 from madgraph import MG5DIR
34
35 pjoin =os.path.join
36 logger = logging.getLogger('madgraph.model')
37
39
40
42
43 text = obj.__repr__()
44 if text.startswith('_'):
45 text = '%s%s' % (str(obj.__class__.__name__)[0].upper(), text)
46 return text
47
49 """ The class storing the current status of the model """
50
51 - def __init__(self, modelpath, addon='__1'):
52 """load the model from a valid UFO directory (otherwise keep everything
53 as empty."""
54 self.modelpath = modelpath
55 model = ufomodels.load_model(modelpath)
56
57 if not hasattr(model, 'all_orders'):
58 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention (no couplings_order information)\n' +\
59 'MG5 is able to load such model but NOT to the add model feature.'
60 if isinstance(model.all_particles[0].mass, basestring):
61 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention (Mass/Width of particles are string name, not object)\n' +\
62 'MG5 is able to load such model but NOT to the add model feature.'
63
64 old_particles = [id(p) for p in model.all_particles]
65 self.particles = [copy.copy(p) for p in model.all_particles]
66 if any(hasattr(p, 'loop_particles') for p in self.particles):
67 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention '
68 self.vertices = list(model.all_vertices)
69
70 for v in self.vertices:
71 new_p = []
72 for p in v.particles:
73 try:
74 new_p.append(self.particles[old_particles.index(id(p))])
75 except:
76 p3 = [p2 for p2 in self.particles if p2.name == p.name and p2.pdg_code == p.pdg_code]
77 new_p.append(p3[0])
78 v.particles = new_p
79
80 self.couplings = list(model.all_couplings)
81 self.lorentz = list(model.all_lorentz)
82 self.parameters = list(model.all_parameters)
83 self.Parameter = self.parameters[0].__class__
84 self.orders = list(model.all_orders)
85
86 self.functions = list(model.all_functions)
87 self.new_external = []
88
89 if hasattr(model, 'all_propagators'):
90 self.propagators = list(model.all_propagators)
91 else:
92 self.propagators = []
93
94
95 if hasattr(model, 'all_CTvertices'):
96 self.CTvertices = list(model.all_CTvertices)
97 else:
98 self.CTvertices = []
99
100
101 if 'self.expr = expression' in open(pjoin(self.modelpath, 'object_library.py')).read():
102 self.translate = {'expr': 'expression'}
103 else:
104 self.translate = {}
105
106
107 self.old_new = {}
108 self.addon = addon
109
110
111 self.particle_dict = {}
112 for particle in self.particles:
113 self.particle_dict[particle.pdg_code] = particle
114
115
116 self.all_path = [self.modelpath]
117
118 - def write(self, outputdir):
138
139
140 - def mod_file(self, inputpath, outputpath):
141
142 fsock = open(outputpath, 'w')
143
144 to_change = {}
145 to_change.update(self.translate)
146 to_change.update(self.old_new)
147 for particle in self.particles:
148 if hasattr(particle, 'replace') and particle.replace:
149 misc.sprint(particle.get('name'), particle.replace.get('name'))
150
151 pattern = re.compile(r'\b(%s)\b' % ('|'.join(to_change)))
152
153
154
155 all_particles_name = [self.format_param(P)[2:] for P in self.particles]
156 all_lower = [p.lower() for p in all_particles_name]
157 pat2 = re.compile(r'\bP\.(\w+)\b')
158
159
160 for line in open(inputpath):
161 line = pattern.sub(lambda mo: to_change[mo.group()], line)
162 part_in_line = set(pat2.findall(line))
163
164
165 to_replace = {}
166 for p in part_in_line:
167 if p in all_particles_name:
168 continue
169 else:
170 ind = all_lower.index(p.lower())
171 to_replace[p] = all_particles_name[ind]
172 if to_replace:
173 pat3 = re.compile(r'\bP\.(%s)\b' % '|'.join(p for p in to_replace))
174 line = pat3.sub(lambda mo: 'P.%s'%to_replace[mo.groups(0)[0]], line)
175 fsock.write(line)
176
177
179 """ propagate model restriction of the original model. """
180
181 restrict_list = [l for l in os.listdir(self.modelpath) if l.startswith('restrict_')]
182 if not self.new_external:
183
184 for p in restrict_list:
185 files.cp(pjoin(self.modelpath, p), outputdir)
186
187 else:
188
189 for p in restrict_list:
190 param_card = check_param_card.ParamCard(pjoin(self.modelpath, p))
191 for parameter in self.new_external:
192 block = parameter.lhablock
193 lhaid = parameter.lhacode
194 value = parameter.value
195 if value == 0:
196 value = 1e-99
197 elif value == 1:
198 value = 9.999999e-1
199 try:
200 param_card.add_param(block.lower(), lhaid, value, 'from addon')
201 except check_param_card.InvalidParamCard:
202 logger.warning("%s will not acting for %s %s" % (p, block, lhaid))
203 param_card[block.lower()].get(lhaid).value = value
204
205 param_card.write(pjoin(outputdir, p), precision=7)
206
207
208
209
210
211
212
213
246
247
248
249 - def create_data_text(self, obj):
250 """ create the data associate to the object"""
251
252
253
254 nb_space = 0
255 if hasattr(obj, 'require_args_all'):
256 args = obj.require_args_all
257 elif hasattr(obj, 'require_args'):
258 args = obj.require_args
259 else:
260 args = []
261 if args:
262 text = """%s = %s(""" % (repr(obj), obj.__class__.__name__)
263 else:
264 text = """%s = %s(""" % (obj.name, obj.__class__.__name__)
265
266
267 for data in args:
268 if data in self.translate:
269 data = self.translate[data]
270 if not nb_space:
271 add_space = len(text)
272 else:
273 add_space = 0
274
275 try:
276 expr = getattr(obj, data)
277 except:
278 if data in ['counterterm', 'propagator', 'loop_particles']:
279 expr = None
280 setattr(obj, data, None)
281 else:
282 raise
283 name =str(data)
284 if name in self.translate:
285 name = self.translate[name]
286
287
288 text += '%s%s = %s,\n' % (' ' * nb_space,name, self.format_param(getattr(obj, data)))
289 nb_space += add_space
290
291 if hasattr(obj, 'get_all'):
292 other_attr = [name for name in obj.get_all().keys()
293 if name not in args]
294 else:
295 other_attr = obj.__dict__.keys()
296
297 other_attr.sort()
298 if other_attr == ['GhostNumber', 'LeptonNumber', 'Y', 'partial_widths', 'selfconjugate']:
299 other_attr=['GhostNumber', 'LeptonNumber', 'Y','selfconjugate']
300
301 for data in other_attr:
302 name =str(data)
303 if name in ['partial_widths', 'loop_particles']:
304 continue
305 if name in self.translate:
306 name = self.translate[name]
307 if not nb_space:
308 add_space = len(text)
309 else:
310 add_space = 0
311 text += '%s%s = %s,\n' % (' ' * nb_space, name, self.format_param(getattr(obj, data)))
312 nb_space += add_space
313
314 text = text[:-2] + ')\n\n'
315
316
317 return text
318
319 - def create_file_content(self, datalist):
320 """ """
321 return '\n'.join([self.create_data_text(obj) for obj in datalist])
322
323
324 - def write_particles(self, outputdir):
325 """ """
326 text = """
327 # This file was automatically created by The UFO_usermod
328
329 from __future__ import division
330 from object_library import all_particles, Particle
331 import parameters as Param
332
333 """
334 text += self.create_file_content(self.particles)
335 ff = open(os.path.join(outputdir, 'particles.py'), 'w')
336 ff.writelines(text)
337 ff.close()
338 return
339
341 """ """
342 text = """
343 # This file was automatically created by The UFO_usermod
344
345 from object_library import all_vertices, Vertex
346 import particles as P
347 import couplings as C
348 import lorentz as L
349
350 """
351 text += self.create_file_content(self.vertices)
352 ff = open(os.path.join(outputdir, 'vertices.py'), 'w')
353 ff.writelines(text)
354 ff.close()
355 return
356
358 """ """
359
360 if not self.CTvertices:
361 return
362
363 text = """
364 # This file was automatically created by The UFO_usermod
365
366 from object_library import all_vertices, all_CTvertices, Vertex, CTVertex
367 import particles as P
368 import couplings as C
369 import lorentz as L
370
371 """
372 text += self.create_file_content(self.CTvertices)
373 ff = open(os.path.join(outputdir, 'CT_vertices.py'), 'w')
374 ff.writelines(text)
375 ff.close()
376 return
377
378
380 """ """
381 text = """
382 # This file was automatically created by The UFO_usermod
383
384 from object_library import all_couplings, Coupling
385 """
386 text += self.create_file_content(self.couplings)
387 ff = open(os.path.join(outputdir, 'couplings.py'), 'w')
388 ff.writelines(text)
389 ff.close()
390 return
391
393 """ """
394 text = """
395 # This file was automatically created by The UFO_usermod
396
397 from object_library import all_lorentz, Lorentz
398 """
399
400 text += self.create_file_content(self.lorentz)
401 ff = open(os.path.join(outputdir, 'lorentz.py'), 'w')
402 ff.writelines(text)
403 ff.close()
404 return
405
407 """ """
408 text = """
409 # This file was automatically created by The UFO_usermod
410
411 from object_library import all_parameters, Parameter
412 """
413
414 text += self.create_file_content(self.parameters)
415 ff = open(os.path.join(outputdir, 'parameters.py'), 'w')
416 ff.writelines(text)
417 ff.close()
418 return
419
421 """ """
422 text = """
423 # This file was automatically created by The UFO_usermod
424
425 from object_library import all_orders, CouplingOrder
426 """
427
428 text += self.create_file_content(self.orders)
429 ff = open(os.path.join(outputdir, 'coupling_orders.py'), 'w')
430 ff.writelines(text)
431 ff.close()
432 return
433
435 """ """
436 text = """
437 # This file was automatically created by The UFO_usermod
438
439 import cmath
440 from object_library import all_functions, Function
441
442 """
443
444 text += self.create_file_content(self.functions)
445 ff = open(os.path.join(outputdir, 'function_library.py'), 'w')
446 ff.writelines(text)
447 ff.close()
448 return
449
451 """ """
452
453 text = """
454 # This file was automatically created by The UFO_usermod
455 from object_library import all_propagators, Propagator
456 """
457
458 text += self.create_file_content(self.propagators)
459 ff = open(os.path.join(outputdir, 'propagators.py'), 'w')
460 ff.writelines(text)
461 ff.close()
462 return
463
465 """Copy/merge the routines written in Fortran/C++/pyhton"""
466
467
468 re_fct = re.compile('''^\s{7,70}[\w\s]*function (\w*)\(''',re.M+re.I)
469 present_fct = set()
470 for dirpath in self.all_path:
471 if os.path.exists(pjoin(dirpath, 'Fortran', 'functions.f')):
472 text = open(pjoin(dirpath, 'Fortran', 'functions.f')).read()
473 new_fct = re_fct.findall(text)
474 nb_old = len(present_fct)
475 nb_added = len(new_fct)
476 new_fct = set([f.lower() for f in new_fct])
477 present_fct.update(new_fct)
478 if len(present_fct) < nb_old + nb_added:
479 logger.critical('''Some Functions in functions.f are define in more than one model.
480 This require AT LEAST manual modification of the resulting file. But more likely the
481 model need to be consider as un-physical! Use it very carefully.''')
482
483 if not os.path.exists(pjoin(outputdir, 'Fortran')):
484 os.mkdir(pjoin(outputdir, 'Fortran'))
485 fsock = open(pjoin(outputdir, 'Fortran','functions.f'),'a')
486 fsock.write(text)
487 fsock.close()
488
489
490
491 for dirpath in self.all_path:
492 for subdir in ['Fortran', 'CPP', 'Python']:
493 if os.path.exists(pjoin(dirpath, subdir)):
494 for filepath in os.listdir(pjoin(dirpath, subdir)):
495 if filepath == 'functions.f':
496 continue
497 if '.' not in filepath:
498 continue
499 logger.warning('Manual HELAS routine associated to the model. Those are not modified automaticaly!! So you need to manually checked them')
500 nb = 0
501 name, extension = filepath.rsplit('.', 1)
502
503 while 1:
504 filename = '%s%s%s' %(name, '.moved' * nb, extension)
505 if os.path.exists(pjoin(outputdir, subdir, filename)):
506 nb+=1
507 else:
508 break
509 if not os.path.exists(pjoin(outputdir, subdir)):
510 os.mkdir(pjoin(outputdir, subdir))
511 files.cp(pjoin(dirpath, subdir, filepath), pjoin(outputdir, subdir, filename))
512
513 - def get_particle(self, name):
514 """ """
515 for part in self.particles:
516 if part.name == name:
517 return part
518
519 raise USRMODERROR, 'no particle %s in the model' % name
520
528
529 - def add_particle(self, particle, identify=None):
530 """Add a particle in a consistent way"""
531
532 name = particle.name
533 if identify:
534 name = identify
535 old_part = next((p for p in self.particles if p.name==name), None)
536 if not old_part:
537 first = True
538 for p in self.particles:
539 if p.name.lower() == name.lower():
540 if not first:
541 raise Exception
542 else:
543 first =False
544 old_part = p
545
546
547
548 if old_part:
549
550 if old_part.pdg_code == particle.pdg_code:
551 particle.replace = old_part
552 return self.check_mass_width_of_particle(old_part, particle)
553 elif identify:
554 if particle.spin != old_part.spin:
555 raise USRMODERROR, "identify particles should have the same spin"
556 elif particle.color != old_part.color:
557 raise USRMODERROR, "identify particles should have the same color"
558 particle.replace = old_part
559 return self.check_mass_width_of_particle(old_part, particle)
560 else:
561 logger.warning('The particle name \'%s\' is present in both model with different pdg code' % name)
562 logger.warning('The particle coming from the plug-in model will be rename to \'%s%s\'' % (name, self.addon))
563 particle.name = '%s%s' % (name, self.addon)
564 self.particles.append(particle)
565 return
566 elif identify:
567 raise USRMODERROR, "Particle %s is not in the model" % identify
568
569 pdg = particle.pdg_code
570 if pdg in self.particle_dict:
571 particle.replace = self.particle_dict[pdg]
572 return self.check_mass_width_of_particle(self.particle_dict[pdg], particle)
573 else:
574 if hasattr(particle, 'replace'):
575 del particle.replace
576 self.particles.append(particle)
577
578
579 - def check_mass_width_of_particle(self, p_base, p_plugin):
580
581 if p_base.mass.name != p_plugin.mass.name:
582
583 if p_plugin.mass.name in self.old_new:
584 if self.old_new[p_plugin.mass.name] != p_base.mass.name:
585 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model: equivalent of %s is %s != %s ' % ( p_plugin.mass.name, self.old_new[p_plugin.mass.name], p_base.mass.name)
586 elif p_base.mass.name.lower() == 'zero':
587 p_base.mass = p_plugin.mass
588 elif p_plugin.mass.name.lower() == 'zero':
589 pass
590 else:
591 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model\n' + \
592 ' Mass: %s and %s\n' %(p_base.mass.name, p_plugin.mass.name) + \
593 ' conflict name %s\n' % self.old_new + \
594 ' pdg_code: %s %s' % (p_base.pdg_code, p_plugin.pdg_code)
595
596 if p_base.width.name != p_plugin.width.name:
597
598 if p_plugin.width.name in self.old_new:
599 if self.old_new[p_plugin.width.name] != p_base.width.name:
600 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model'
601 elif p_base.width.name.lower() == 'zero':
602 p_base.width = p_plugin.width
603 elif p_plugin.width.name.lower() == 'zero':
604 pass
605 else:
606 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model'
607
608 return
609
611 """adding a param_card parameter inside the current model.
612 if the parameter block/lhcode already exists then just do nothing
613 (but if the name are different then keep the info for future translation)
614 If the name already exists in the model. raise an exception.
615 """
616
617 name = parameter.name
618
619 old_param = next((p for p in self.parameters if p.name==name), None)
620 if old_param:
621 if old_param.lhablock == parameter.lhablock and \
622 old_param.lhacode == parameter.lhacode:
623 return
624 else:
625 logger.info('The two model defines the parameter \'%s\'\n' % parameter.name +
626 ' the original model for %s :%s\n' %(old_param.lhablock, old_param.lhacode)+
627 ' the plugin for %s :%s\n' %(parameter.lhablock,parameter.lhacode)+
628 ' We will rename the one from the plugin to %s%s' % (parameter.name, self.addon))
629 if old_param.nature == 'internal':
630 logger.warning('''The parameter %s is actually an internal parameter of the base model.
631 his value is given by %s.
632 If those two parameters are expected to be identical, you need to provide the value in the param_card according to this formula.
633 ''')
634
635 self.old_new[parameter.name] = '%s%s' % (parameter.name, self.addon)
636 parameter.name = '%s%s' % (parameter.name, self.addon)
637
638
639
640
641 lhacode = parameter.lhacode
642 if parameter.lhablock.lower() in ['mass', 'decay']:
643 if int(parameter.lhacode[0]) in identify_pid:
644 lhacode = [identify_pid[int(parameter.lhacode[0])]]
645
646 old_param = next((p for p in self.parameters if p.lhacode==lhacode \
647 and p.lhablock==parameter.lhablock), None)
648 if old_param:
649 logger.info('The two model defines the block \'%s\' with id \'%s\' with different parameter name \'%s\', \'%s\'\n'\
650 % (old_param.lhablock, old_param.lhacode, parameter.name, old_param.name) + \
651 ' We will merge those two parameters in a single one')
652 if parameter.name in self.old_new.values():
653 key = [k for k in self.old_new if self.old_new[k] == parameter.name][0]
654 self.old_new[key] = old_param.name
655 self.old_new[parameter.name] = old_param.name
656 else:
657 self.old_new[parameter.name] = old_param.name
658
659
660 else:
661
662 self.parameters.append(parameter)
663 self.new_external.append(parameter)
664
666 """ add a parameter of type internal """
667
668 name = parameter.name
669
670 old_param = next((p for p in self.parameters if p.name==name), None)
671 if old_param:
672 if old_param.value == parameter.value:
673 return
674 else:
675 if self.old_new:
676 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
677 def replace(matchobj):
678 return self.old_new[matchobj.group(0)]
679 parameter.value = pattern.sub(replace, parameter.value)
680 self.old_new[parameter.name] = '%s%s' % (parameter.name, self.addon)
681
682 parameter.name = '%s%s' % (parameter.name, self.addon)
683 self.parameters.append(parameter)
684 return
685
686
687 if self.old_new:
688 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
689 def replace(matchobj):
690 return self.old_new[matchobj.group(0)]
691 parameter.value = pattern.sub(replace, parameter.value)
692
693 self.parameters.append(parameter)
694
695
696
697
699 """add one coupling"""
700
701
702 name = coupling.name
703 same_name = next((p for p in self.couplings if p.name==name), None)
704 if same_name:
705 coupling.name = '%s%s' % (coupling.name, self.addon)
706
707 if self.old_new:
708 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
709 def replace(matchobj):
710 return self.old_new[matchobj.group(0)]
711 coupling.value = pattern.sub(replace, coupling.value)
712
713 old_coupling = next((p for p in self.couplings if p.value==coupling.value), None)
714
715 if old_coupling:
716 coupling.replace = old_coupling
717 else:
718 self.couplings.append(coupling)
719
721 """adding a new coupling order inside the model"""
722
723 name = coupling_order.name
724 same_name = next((p for p in self.orders if p.name==name), None)
725 if same_name:
726 if coupling_order.hierarchy != same_name.hierarchy:
727 logger.warning('%s has different hierarchy use the minimal value (%s, %s) => %s' \
728 % (name, same_name.hierarchy, coupling_order.hierarchy,
729 min(same_name.hierarchy, coupling_order.hierarchy)))
730 same_name.hierarchy = min(same_name.hierarchy, coupling_order.hierarchy)
731 if coupling_order.expansion_order != same_name.expansion_order:
732 logger.warning('%s has different expansion_order use the minimal value (%s, %s) => %s' \
733 % (name, coupling_order.expansion_order, same_name.expansion_order,
734 min(same_name.expansion_order, coupling_order.expansion_order)))
735 same_name.expansion_order = min(same_name.expansion_order, coupling_order.expansion_order)
736 if hasattr(same_name, 'perturbative_expansion') and same_name.perturbative_expansion:
737 logger.info('%s will be forbidden to run at NLO' % same_name.name)
738 same_name.perturbative_expansion = 0
739
740
741 else:
742 self.orders.append(coupling_order)
743
745 """add one coupling"""
746
747
748 name = lorentz.name
749 same_name = next((p for p in self.lorentz if p.name==name), None)
750 if same_name:
751 lorentz.name = '%s%s' % (lorentz.name, self.addon)
752
753 if self.old_new:
754 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
755 def replace(matchobj):
756 return self.old_new[matchobj.group(0)]
757 lorentz.structure = pattern.sub(replace, lorentz.structure)
758
759 old_lor = next((p for p in self.lorentz
760 if p.structure==lorentz.structure and p.spins == lorentz.spins),
761 None)
762
763 if old_lor:
764 lorentz.replace = old_lor
765 else:
766 self.lorentz.append(lorentz)
767
769 """Add one interaction to the model. This is UNCONDITIONAL!
770 if the same interaction is in the model this means that the interaction
771 will appear twice. This is now weaken if both interaction are exactly identical!
772 (EXACT same color/lorentz/coupling expression)
773 """
774
775 interaction = interaction.__class__(**interaction.__dict__)
776 model.all_vertices.pop(-1)
777
778
779 name = interaction.name
780 same_name = next((p for p in self.vertices if p.name==name), None)
781 if same_name:
782 interaction.name = '%s%s' % (interaction.name, self.addon)
783
784
785 particles = [p.replace if hasattr(p, 'replace') else p for p in interaction.particles]
786 interaction.particles = particles
787
788 lorentz = [l.replace if hasattr(l, 'replace') else l for l in interaction.lorentz]
789 interaction.lorentz = lorentz
790
791
792 couplings = [(key, c.replace) if hasattr(c, 'replace') else (key, c)
793 for key, c in interaction.couplings.items()]
794 interaction.couplings = dict(couplings)
795
796
797
798
799 get_pdg = lambda vertex: sorted([p.pdg_code for p in vertex.particles])
800 id_part = get_pdg(interaction)
801 iden_vertex = [v for v in self.vertices if get_pdg(v) == id_part]
802 iden = False
803 nb_coupling = len(interaction.couplings)
804 keys = interaction.couplings.keys()
805
806 get_lor_and_color = lambda i: (interaction.lorentz[keys[i][1]].structure,
807 interaction.color[keys[i][0]])
808 for v in iden_vertex:
809 if len(v.couplings) != nb_coupling:
810 continue
811 found = []
812 for ((i,j), coup) in v.couplings.items():
813 new_lorentz = v.lorentz[j].structure
814 new_color = v.color[i]
815 k=0
816 same = [k for k in range(nb_coupling) if k not in found and
817 get_lor_and_color(k) == (new_lorentz, new_color)]
818 if not same:
819 break
820 else:
821 for k in same:
822 if interaction.couplings[keys[k]] == coup:
823 found.append(k)
824 break
825 else:
826
827 for k in same:
828 if interaction.couplings[keys[k]].order == coup.order:
829 found.append(k)
830 warning = """Did NOT add interaction %s since same particles/lorentz/color/coupling order
831 BUT did not manage to ensure that the coupling is the same. couplings expression:
832 base model: %s
833 addon model: %s
834 """ % (id_part, coup.value, interaction.couplings[keys[k]].value)
835 logger.warning(warning)
836 found.append(k)
837 break
838 else:
839 pass
840
841 else:
842
843 return
844
845 logger.info('Adding interaction for the following particles: %s' % id_part)
846
847
848
849
850 self.vertices.append(interaction)
851
853 """Add one interaction to the model. This is UNCONDITIONAL!
854 if the same interaction is in the model this means that the interaction
855 will appear twice."""
856
857
858 name = interaction.name
859 same_name = next((p for p in self.vertices if p.name==name), None)
860 if same_name:
861 interaction.name = '%s%s' % (interaction.name, self.addon)
862
863
864 particles = [p.replace if hasattr(p, 'replace') else p for p in interaction.particles]
865 interaction.particles = particles
866
867
868 lorentz = [l.replace if hasattr(l, 'replace') else l for l in interaction.lorentz]
869 interaction.lorentz = lorentz
870
871
872 couplings = [(key, c.replace) if hasattr(c, 'replace') else (key, c)
873 for key, c in interaction.couplings.items()]
874 interaction.couplings = dict(couplings)
875
876
877
878 loop_particles=[ [p.replace if hasattr(p, 'replace') else p for p in plist]
879 for plist in interaction.loop_particles]
880 interaction.loop_particles = loop_particles
881 self.CTvertices.append(interaction)
882
883
884 - def add_model(self, model=None, path=None, identify_particles=None):
885 """add another model in the current one"""
886
887
888 self.new_external = []
889 if path:
890 model = ufomodels.load_model(path)
891
892 if not model:
893 raise USRMODERROR, 'Need a valid Model'
894 else:
895 path = model.__path__[0]
896
897 if not hasattr(model, 'all_orders'):
898 raise USRMODERROR, 'Add-on Model doesn\'t follows UFO convention (no couplings_order information)\n' +\
899 'MG5 is able to load such model but NOT to the add model feature.'
900 if isinstance(model.all_particles[0].mass, basestring):
901 raise USRMODERROR, 'Add-on Model doesn\'t follows UFO convention (Mass/Width of particles are string name, not object)\n' +\
902 'MG5 is able to load such model but NOT to the add model feature.'
903
904 for order in model.all_orders:
905 if hasattr(order, 'perturbative_expansion') and order.perturbative_expansion:
906 raise USRMODERROR, 'Add-on model can not be loop model.'
907
908 for order in model.all_orders:
909 self.add_coupling_order(order)
910
911
912
913 identify_pid = {}
914 if identify_particles:
915 for new, old in identify_particles.items():
916 new_part = next((p for p in model.all_particles if p.name==new), None)
917 old_part = next((p for p in self.particles if p.name==old), None)
918
919 if not new_part:
920 first = True
921 for p in model.all_particles:
922 if p.name.lower() == new.lower():
923 if not first:
924 raise Exception
925 else:
926 first =False
927 new_part = p
928 if not old_part:
929 first = True
930 for p in self.particles:
931 if p.name.lower() == old.lower():
932 if not first:
933 raise Exception
934 else:
935 first =False
936 old_part = p
937 if not old_part:
938
939
940 defaultname = base_objects.Model.load_default_name()
941 for pdg, value in defaultname.items():
942 if value == old:
943 old_part = self.particle_dict[pdg]
944 identify_particles[new] = old_part.name
945 break
946
947
948 identify_pid[new_part.pdg_code] = old_part.pdg_code
949 if new_part is None:
950 raise USRMODERROR, "particle %s not in added model" % new
951 if old_part is None:
952 raise USRMODERROR, "particle %s not in original model" % old
953 if new_part.antiname not in identify_particles:
954 new_anti = new_part.antiname
955 old_anti = old_part.antiname
956 if old_anti == old:
957 raise USRMODERROR, "failed identification (one particle is self-conjugate and not the other)"
958 logger.info("adding identification for anti-particle: %s=%s" % (new_anti, old_anti))
959 identify_particles[new_anti] = old_anti
960
961 for parameter in model.all_parameters:
962 self.add_parameter(parameter, identify_pid)
963 for coupling in model.all_couplings:
964 self.add_coupling(coupling)
965 for lorentz in model.all_lorentz:
966 self.add_lorentz(lorentz)
967 for particle in model.all_particles:
968 if particle.name in identify_particles:
969 self.add_particle(particle, identify=identify_particles[particle.name])
970 else:
971 self.add_particle(particle)
972 for vertex in model.all_vertices:
973 self.add_interaction(vertex, model)
974
975 self.all_path.append(path)
976
977
978 return
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014