1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """ Set of Tool in order to modify a given UFO model.
16 (mainly by adding-suppressing interactions and allow to modify by text the
17 different part of the model. Check of consistency of the model are performed.
18 This produce a new valid UFO model in output.
19 """
20
21 import glob
22 import logging
23 import os
24 import re
25 import sys
26
27 import madgraph.core.base_objects as base_objects
28 import madgraph.iolibs.files as files
29 import madgraph.various.misc as misc
30 import models as ufomodels
31 import models.import_ufo as import_ufo
32 import models.check_param_card as check_param_card
33 from madgraph import MG5DIR
34
35 pjoin =os.path.join
36 logger = logging.getLogger('madgraph.model')
37
39
40
42
43 text = obj.__repr__()
44 if text.startswith('_'):
45 text = '%s%s' % (str(obj.__class__.__name__)[0].upper(), text)
46 return text
47
49 """ The class storing the current status of the model """
50
51 - def __init__(self, modelpath, addon='__1'):
52 """load the model from a valid UFO directory (otherwise keep everything
53 as empty."""
54 self.modelpath = modelpath
55 model = ufomodels.load_model(modelpath)
56
57
58 if not hasattr(model, 'all_orders'):
59 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention (no couplings_order information)\n' +\
60 'MG5 is able to load such model but NOT to the add model feature.'
61 if isinstance(model.all_particles[0].mass, basestring):
62 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention (Mass/Width of particles are string name, not object)\n' +\
63 'MG5 is able to load such model but NOT to the add model feature.'
64
65
66 self.particles = model.all_particles
67 if any(hasattr(p, 'loop_particles') for p in self.particles):
68 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention '
69 self.vertices = model.all_vertices
70 self.couplings = model.all_couplings
71 self.lorentz = model.all_lorentz
72 self.parameters = model.all_parameters
73 self.Parameter = self.parameters[0].__class__
74 self.orders = model.all_orders
75
76 self.functions = model.all_functions
77 self.new_external = []
78
79 if hasattr(model, 'all_propagators'):
80 self.propagators = model.all_propagators
81 else:
82 self.propagators = []
83
84
85 if hasattr(model, 'all_CTvertices'):
86 self.CTvertices = model.all_CTvertices
87 else:
88 self.CTvertices = []
89
90
91 if 'self.expr = expression' in open(pjoin(self.modelpath, 'object_library.py')).read():
92 self.translate = {'expr': 'expression'}
93 else:
94 self.translate = {}
95
96
97 self.old_new = {}
98 self.addon = addon
99
100
101 self.particle_dict = {}
102 for particle in self.particles:
103 self.particle_dict[particle.pdg_code] = particle
104
105
106 self.all_path = [self.modelpath]
107
108 - def write(self, outputdir):
128
129
130 - def mod_file(self, inputpath, outputpath):
131
132 fsock = open(outputpath, 'w')
133
134 to_change = {}
135 to_change.update(self.translate)
136 to_change.update(self.old_new)
137 for particle in self.particles:
138 if hasattr(particle, 'replace') and particle.replace:
139 misc.sprint(particle.get('name'), particle.replace.get('name'))
140
141 pattern = re.compile(r'\b(%s)\b' % ('|'.join(to_change)))
142 for line in open(inputpath):
143 line = pattern.sub(lambda mo: to_change[mo.group()], line)
144 fsock.write(line)
145
146
148 """ propagate model restriction of the original model. """
149
150 restrict_list = [l for l in os.listdir(self.modelpath) if l.startswith('restrict_')]
151 if not self.new_external:
152
153 for p in restrict_list:
154 files.cp(pjoin(self.modelpath, p), outputdir)
155
156 else:
157
158 for p in restrict_list:
159 param_card = check_param_card.ParamCard(pjoin(self.modelpath, p))
160 for parameter in self.new_external:
161 block = parameter.lhablock
162 lhaid = parameter.lhacode
163 value = parameter.value
164 if value == 0:
165 value = 1e-99
166 elif value == 1:
167 value = 9.999999e-1
168 try:
169 param_card.add_param(block.lower(), lhaid, value, 'from addon')
170 except check_param_card.InvalidParamCard:
171 logger.warning("%s will not acting for %s %s" % (p, block, lhaid))
172 param_card[block.lower()].get(lhaid).value = value
173
174 param_card.write(pjoin(outputdir, p), precision=7)
175
176
177
178
179
180
181
182
215
216
217
218 - def create_data_text(self, obj):
219 """ create the data associate to the object"""
220
221
222
223 nb_space = 0
224 if hasattr(obj, 'require_args_all'):
225 args = obj.require_args_all
226 elif hasattr(obj, 'require_args'):
227 args = obj.require_args
228 else:
229 args = []
230 if args:
231 text = """%s = %s(""" % (repr(obj), obj.__class__.__name__)
232 else:
233 text = """%s = %s(""" % (obj.name, obj.__class__.__name__)
234
235
236 for data in args:
237 if data in self.translate:
238 data = self.translate[data]
239 if not nb_space:
240 add_space = len(text)
241 else:
242 add_space = 0
243
244 try:
245 expr = getattr(obj, data)
246 except:
247 if data in ['counterterm', 'propagator', 'loop_particles']:
248 expr = None
249 setattr(obj, data, None)
250 else:
251 raise
252 name =str(data)
253 if name in self.translate:
254 name = self.translate[name]
255
256
257 text += '%s%s = %s,\n' % (' ' * nb_space,name, self.format_param(getattr(obj, data)))
258 nb_space += add_space
259
260 if hasattr(obj, 'get_all'):
261 other_attr = [name for name in obj.get_all().keys()
262 if name not in args]
263 else:
264 other_attr = obj.__dict__.keys()
265
266 for data in other_attr:
267 name =str(data)
268 if name in ['partial_widths', 'loop_particles']:
269 continue
270 if name in self.translate:
271 name = self.translate[name]
272 if not nb_space:
273 add_space = len(text)
274 else:
275 add_space = 0
276 text += '%s%s = %s,\n' % (' ' * nb_space, name, self.format_param(getattr(obj, data)))
277 nb_space += add_space
278
279 text = text[:-2] + ')\n\n'
280
281 return text
282
283 - def create_file_content(self, datalist):
284 """ """
285 return '\n'.join([self.create_data_text(obj) for obj in datalist])
286
287
288 - def write_particles(self, outputdir):
289 """ """
290 text = """
291 # This file was automatically created by The UFO_usermod
292
293 from __future__ import division
294 from object_library import all_particles, Particle
295 import parameters as Param
296
297 """
298 text += self.create_file_content(self.particles)
299 ff = open(os.path.join(outputdir, 'particles.py'), 'w')
300 ff.writelines(text)
301 ff.close()
302 return
303
305 """ """
306 text = """
307 # This file was automatically created by The UFO_usermod
308
309 from object_library import all_vertices, Vertex
310 import particles as P
311 import couplings as C
312 import lorentz as L
313
314 """
315 text += self.create_file_content(self.vertices)
316 ff = open(os.path.join(outputdir, 'vertices.py'), 'w')
317 ff.writelines(text)
318 ff.close()
319 return
320
322 """ """
323
324 if not self.CTvertices:
325 return
326
327 text = """
328 # This file was automatically created by The UFO_usermod
329
330 from object_library import all_vertices, all_CTvertices, Vertex, CTVertex
331 import particles as P
332 import couplings as C
333 import lorentz as L
334
335 """
336 text += self.create_file_content(self.CTvertices)
337 ff = open(os.path.join(outputdir, 'CT_vertices.py'), 'w')
338 ff.writelines(text)
339 ff.close()
340 return
341
342
344 """ """
345 text = """
346 # This file was automatically created by The UFO_usermod
347
348 from object_library import all_couplings, Coupling
349 """
350 text += self.create_file_content(self.couplings)
351 ff = open(os.path.join(outputdir, 'couplings.py'), 'w')
352 ff.writelines(text)
353 ff.close()
354 return
355
357 """ """
358 text = """
359 # This file was automatically created by The UFO_usermod
360
361 from object_library import all_lorentz, Lorentz
362 """
363
364 text += self.create_file_content(self.lorentz)
365 ff = open(os.path.join(outputdir, 'lorentz.py'), 'w')
366 ff.writelines(text)
367 ff.close()
368 return
369
371 """ """
372 text = """
373 # This file was automatically created by The UFO_usermod
374
375 from object_library import all_parameters, Parameter
376 """
377
378 text += self.create_file_content(self.parameters)
379 ff = open(os.path.join(outputdir, 'parameters.py'), 'w')
380 ff.writelines(text)
381 ff.close()
382 return
383
385 """ """
386 text = """
387 # This file was automatically created by The UFO_usermod
388
389 from object_library import all_orders, CouplingOrder
390 """
391
392 text += self.create_file_content(self.orders)
393 ff = open(os.path.join(outputdir, 'coupling_orders.py'), 'w')
394 ff.writelines(text)
395 ff.close()
396 return
397
399 """ """
400 text = """
401 # This file was automatically created by The UFO_usermod
402
403 import cmath
404 from object_library import all_functions, Function
405
406 """
407
408 text += self.create_file_content(self.functions)
409 ff = open(os.path.join(outputdir, 'function_library.py'), 'w')
410 ff.writelines(text)
411 ff.close()
412 return
413
415 """ """
416
417 text = """
418 # This file was automatically created by The UFO_usermod
419 from object_library import all_propagators, Propagator
420 """
421
422 text += self.create_file_content(self.propagators)
423 ff = open(os.path.join(outputdir, 'propagators.py'), 'w')
424 ff.writelines(text)
425 ff.close()
426 return
427
429 """Copy/merge the routines written in Fortran/C++/pyhton"""
430
431
432 re_fct = re.compile('''^\s{7,70}[\w\s]*function (\w*)\(''',re.M+re.I)
433 present_fct = set()
434 for dirpath in self.all_path:
435 if os.path.exists(pjoin(dirpath, 'Fortran', 'functions.f')):
436 text = open(pjoin(dirpath, 'Fortran', 'functions.f')).read()
437 new_fct = re_fct.findall(text)
438 nb_old = len(present_fct)
439 nb_added = len(new_fct)
440 new_fct = set([f.lower() for f in new_fct])
441 present_fct.update(new_fct)
442 if len(present_fct) < nb_old + nb_added:
443 logger.critical('''Some Functions in functions.f are define in more than one model.
444 This require AT LEAST manual modification of the resulting file. But more likely the
445 model need to be consider as un-physical! Use it very carefully.''')
446
447 if not os.path.exists(pjoin(outputdir, 'Fortran')):
448 os.mkdir(pjoin(outputdir, 'Fortran'))
449 fsock = open(pjoin(outputdir, 'Fortran','functions.f'),'a')
450 fsock.write(text)
451 fsock.close()
452
453
454
455 for dirpath in self.all_path:
456 for subdir in ['Fortran', 'CPP', 'Python']:
457 if os.path.exists(pjoin(dirpath, subdir)):
458 for filepath in os.listdir(pjoin(dirpath, subdir)):
459 if filepath == 'functions.f':
460 continue
461 if '.' not in filepath:
462 continue
463 logger.warning('Manual HELAS routine associated to the model. Those are not modified automaticaly!! So you need to manually checked them')
464 nb = 0
465 name, extension = filepath.rsplit('.', 1)
466
467 while 1:
468 filename = '%s%s%s' %(name, '.moved' * nb, extension)
469 if os.path.exists(pjoin(outputdir, subdir, filename)):
470 nb+=1
471 else:
472 break
473 if not os.path.exists(pjoin(outputdir, subdir)):
474 os.mkdir(pjoin(outputdir, subdir))
475 files.cp(pjoin(dirpath, subdir, filepath), pjoin(outputdir, subdir, filename))
476
477 - def get_particle(self, name):
478 """ """
479 for part in self.particles:
480 if part.name == name:
481 return part
482
483 raise USRMODERROR, 'no particle %s in the model' % name
484
492
493 - def add_particle(self, particle, identify=None):
494 """Add a particle in a consistent way"""
495
496 name = particle.name
497 if identify:
498 name = identify
499 old_part = next((p for p in self.particles if p.name==name), None)
500 if not old_part:
501 first = True
502 for p in self.particles:
503 if p.name.lower() == name.lower():
504 if not first:
505 raise Exception
506 else:
507 first =False
508 old_part = p
509
510
511
512 if old_part:
513
514 if old_part.pdg_code == particle.pdg_code:
515 particle.replace = old_part
516 return self.check_mass_width_of_particle(old_part, particle)
517 elif identify:
518 if particle.spin != old_part.spin:
519 raise USRMODERROR, "identify particles should have the same spin"
520 elif particle.color != old_part.color:
521 raise USRMODERROR, "identify particles should have the same color"
522 particle.replace = old_part
523 return self.check_mass_width_of_particle(old_part, particle)
524 else:
525 logger.warning('The particle name \'%s\' is present in both model with different pdg code' % name)
526 logger.warning('The particle coming from the plug-in model will be rename to \'%s%s\'' % (name, self.addon))
527 particle.name = '%s%s' % (name, self.addon)
528 self.particles.append(particle)
529 return
530 elif identify:
531 raise USRMODERROR, "Particle %s is not in the model" % identify
532
533 pdg = particle.pdg_code
534 if pdg in self.particle_dict:
535 particle.replace = self.particle_dict[pdg]
536 return self.check_mass_width_of_particle(self.particle_dict[pdg], particle)
537 else:
538 if hasattr(particle, 'replace'):
539 del particle.replace
540 self.particles.append(particle)
541
542
543 - def check_mass_width_of_particle(self, p_base, p_plugin):
544
545 if p_base.mass.name != p_plugin.mass.name:
546
547 if p_plugin.mass.name in self.old_new:
548 if self.old_new[p_plugin.mass.name] != p_base.mass.name:
549 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model: equivalent of %s is %s != %s ' % ( p_plugin.mass.name, self.old_new[p_plugin.mass.name], p_base.mass.name)
550 elif p_base.mass.name.lower() == 'zero':
551 p_base.mass = p_plugin.mass
552 elif p_plugin.mass.name.lower() == 'zero':
553 pass
554 else:
555 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model\n' + \
556 ' Mass: %s and %s\n' %(p_base.mass.name, p_plugin.mass.name) + \
557 ' conflict name %s\n' % self.old_new + \
558 ' pdg_code: %s %s' % (p_base.pdg_code, p_plugin.pdg_code)
559
560 if p_base.width.name != p_plugin.width.name:
561
562 if p_plugin.width.name in self.old_new:
563 if self.old_new[p_plugin.width.name] != p_base.width.name:
564 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model'
565 elif p_base.width.name.lower() == 'zero':
566 p_base.width = p_plugin.width
567 elif p_plugin.width.name.lower() == 'zero':
568 pass
569 else:
570 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model'
571
572 return
573
575 """adding a param_card parameter inside the current model.
576 if the parameter block/lhcode already exists then just do nothing
577 (but if the name are different then keep the info for future translation)
578 If the name already exists in the model. raise an exception.
579 """
580
581 name = parameter.name
582
583 old_param = next((p for p in self.parameters if p.name==name), None)
584 if old_param:
585 if old_param.lhablock == parameter.lhablock and \
586 old_param.lhacode == parameter.lhacode:
587 return
588 else:
589 logger.info('The two model defines the parameter \'%s\'\n' % parameter.name +
590 ' the original model for %s :%s\n' %(old_param.lhablock, old_param.lhacode)+
591 ' the plugin for %s :%s\n' %(parameter.lhablock,parameter.lhacode)+
592 ' We will rename the one from the plugin to %s%s' % (parameter.name, self.addon))
593 if old_param.nature == 'internal':
594 logger.warning('''The parameter %s is actually an internal parameter of the base model.
595 his value is given by %s.
596 If those two parameters are expected to be identical, you need to provide the value in the param_card according to this formula.
597 ''')
598
599 self.old_new[parameter.name] = '%s%s' % (parameter.name, self.addon)
600 parameter.name = '%s%s' % (parameter.name, self.addon)
601
602
603
604
605 lhacode = parameter.lhacode
606 if parameter.lhablock.lower() in ['mass', 'decay']:
607 if int(parameter.lhacode[0]) in identify_pid:
608 lhacode = [identify_pid[int(parameter.lhacode[0])]]
609
610 old_param = next((p for p in self.parameters if p.lhacode==lhacode \
611 and p.lhablock==parameter.lhablock), None)
612 if old_param:
613 logger.info('The two model defines the block \'%s\' with id \'%s\' with different parameter name \'%s\', \'%s\'\n'\
614 % (old_param.lhablock, old_param.lhacode, parameter.name, old_param.name) + \
615 ' We will merge those two parameters in a single one')
616 if parameter.name in self.old_new.values():
617 key = [k for k in self.old_new if self.old_new[k] == parameter.name][0]
618 self.old_new[key] = old_param.name
619 self.old_new[parameter.name] = old_param.name
620 else:
621 self.old_new[parameter.name] = old_param.name
622
623
624 else:
625
626 self.parameters.append(parameter)
627 self.new_external.append(parameter)
628
630 """ add a parameter of type internal """
631
632 name = parameter.name
633
634 old_param = next((p for p in self.parameters if p.name==name), None)
635 if old_param:
636 if old_param.value == parameter.value:
637 return
638 else:
639 if self.old_new:
640 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
641 def replace(matchobj):
642 return self.old_new[matchobj.group(0)]
643 parameter.value = pattern.sub(replace, parameter.value)
644 self.old_new[parameter.name] = '%s%s' % (parameter.name, self.addon)
645
646 parameter.name = '%s%s' % (parameter.name, self.addon)
647 self.parameters.append(parameter)
648 return
649
650
651 if self.old_new:
652 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
653 def replace(matchobj):
654 return self.old_new[matchobj.group(0)]
655 parameter.value = pattern.sub(replace, parameter.value)
656
657 self.parameters.append(parameter)
658
659
660
661
663 """add one coupling"""
664
665
666 name = coupling.name
667 same_name = next((p for p in self.couplings if p.name==name), None)
668 if same_name:
669 coupling.name = '%s%s' % (coupling.name, self.addon)
670
671 if self.old_new:
672 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
673 def replace(matchobj):
674 return self.old_new[matchobj.group(0)]
675 coupling.value = pattern.sub(replace, coupling.value)
676
677 old_coupling = next((p for p in self.couplings if p.value==coupling.value), None)
678
679 if old_coupling:
680 coupling.replace = old_coupling
681 else:
682 self.couplings.append(coupling)
683
685 """adding a new coupling order inside the model"""
686
687 name = coupling_order.name
688 same_name = next((p for p in self.orders if p.name==name), None)
689 if same_name:
690 if coupling_order.hierarchy != same_name.hierarchy:
691 logger.warning('%s has different hierarchy use the minimal value (%s, %s) => %s' \
692 % (name, same_name.hierarchy, coupling_order.hierarchy,
693 min(same_name.hierarchy, coupling_order.hierarchy)))
694 same_name.hierarchy = min(same_name.hierarchy, coupling_order.hierarchy)
695 if coupling_order.expansion_order != same_name.expansion_order:
696 logger.warning('%s has different expansion_order use the minimal value (%s, %s) => %s' \
697 % (name, coupling_order.expansion_order, same_name.expansion_order,
698 min(same_name.expansion_order, coupling_order.expansion_order)))
699 same_name.expansion_order = min(same_name.expansion_order, coupling_order.expansion_order)
700 if hasattr(same_name, 'perturbative_expansion') and same_name.perturbative_expansion:
701 logger.info('%s will be forbidden to run at NLO' % same_name.name)
702 same_name.perturbative_expansion = 0
703
704
705 else:
706 self.orders.append(coupling_order)
707
709 """add one coupling"""
710
711
712 name = lorentz.name
713 same_name = next((p for p in self.lorentz if p.name==name), None)
714 if same_name:
715 lorentz.name = '%s%s' % (lorentz.name, self.addon)
716
717 if self.old_new:
718 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
719 def replace(matchobj):
720 return self.old_new[matchobj.group(0)]
721 lorentz.structure = pattern.sub(replace, lorentz.structure)
722
723 old_lor = next((p for p in self.lorentz
724 if p.structure==lorentz.structure and p.spins == lorentz.spins),
725 None)
726
727 if old_lor:
728 lorentz.replace = old_lor
729 else:
730 self.lorentz.append(lorentz)
731
733 """Add one interaction to the model. This is UNCONDITIONAL!
734 if the same interaction is in the model this means that the interaction
735 will appear twice. This is now weaken if both interaction are exactly identical!
736 (EXACT same color/lorentz/coupling expression)
737 """
738
739 interaction = interaction.__class__(**interaction.__dict__)
740 model.all_vertices.pop(-1)
741
742
743 name = interaction.name
744 same_name = next((p for p in self.vertices if p.name==name), None)
745 if same_name:
746 interaction.name = '%s%s' % (interaction.name, self.addon)
747
748
749 particles = [p.replace if hasattr(p, 'replace') else p for p in interaction.particles]
750 interaction.particles = particles
751
752 lorentz = [l.replace if hasattr(l, 'replace') else l for l in interaction.lorentz]
753 interaction.lorentz = lorentz
754
755
756 couplings = [(key, c.replace) if hasattr(c, 'replace') else (key, c)
757 for key, c in interaction.couplings.items()]
758 interaction.couplings = dict(couplings)
759
760
761
762
763 get_pdg = lambda vertex: sorted([p.pdg_code for p in vertex.particles])
764 id_part = get_pdg(interaction)
765 iden_vertex = [v for v in self.vertices if get_pdg(v) == id_part]
766 iden = False
767 nb_coupling = len(interaction.couplings)
768 keys = interaction.couplings.keys()
769
770 get_lor_and_color = lambda i: (interaction.lorentz[keys[i][1]].structure,
771 interaction.color[keys[i][0]])
772 for v in iden_vertex:
773 if len(v.couplings) != nb_coupling:
774 continue
775 found = []
776 for ((i,j), coup) in v.couplings.items():
777 new_lorentz = v.lorentz[j].structure
778 new_color = v.color[i]
779 k=0
780 same = [k for k in range(nb_coupling) if k not in found and
781 get_lor_and_color(k) == (new_lorentz, new_color)]
782 if not same:
783 break
784 else:
785 for k in same:
786 if interaction.couplings[keys[k]] == coup:
787 found.append(k)
788 break
789 else:
790
791 for k in same:
792 if interaction.couplings[keys[k]].order == coup.order:
793 found.append(k)
794 warning = """Did NOT add interaction %s since same particles/lorentz/color/coupling order
795 BUT did not manage to ensure that the coupling is the same. couplings expression:
796 base model: %s
797 addon model: %s
798 """ % (id_part, coup.value, interaction.couplings[keys[k]].value)
799 logger.warning(warning)
800 found.append(k)
801 break
802 else:
803 pass
804
805 else:
806
807 return
808
809 logger.info('Adding interaction for the following particles: %s' % id_part)
810
811
812
813
814 self.vertices.append(interaction)
815
817 """Add one interaction to the model. This is UNCONDITIONAL!
818 if the same interaction is in the model this means that the interaction
819 will appear twice."""
820
821
822 name = interaction.name
823 same_name = next((p for p in self.vertices if p.name==name), None)
824 if same_name:
825 interaction.name = '%s%s' % (interaction.name, self.addon)
826
827
828 particles = [p.replace if hasattr(p, 'replace') else p for p in interaction.particles]
829 interaction.particles = particles
830
831
832 lorentz = [l.replace if hasattr(l, 'replace') else l for l in interaction.lorentz]
833 interaction.lorentz = lorentz
834
835
836 couplings = [(key, c.replace) if hasattr(c, 'replace') else (key, c)
837 for key, c in interaction.couplings.items()]
838 interaction.couplings = dict(couplings)
839
840
841
842 loop_particles=[ [p.replace if hasattr(p, 'replace') else p for p in plist]
843 for plist in interaction.loop_particles]
844 interaction.loop_particles = loop_particles
845 self.CTvertices.append(interaction)
846
847
848 - def add_model(self, model=None, path=None, identify_particles=None):
849 """add another model in the current one"""
850
851
852 self.new_external = []
853 if path:
854 model = ufomodels.load_model(path)
855
856 if not model:
857 raise USRMODERROR, 'Need a valid Model'
858 else:
859 path = model.__path__[0]
860
861 if not hasattr(model, 'all_orders'):
862 raise USRMODERROR, 'Add-on Model doesn\'t follows UFO convention (no couplings_order information)\n' +\
863 'MG5 is able to load such model but NOT to the add model feature.'
864 if isinstance(model.all_particles[0].mass, basestring):
865 raise USRMODERROR, 'Add-on Model doesn\'t follows UFO convention (Mass/Width of particles are string name, not object)\n' +\
866 'MG5 is able to load such model but NOT to the add model feature.'
867
868 for order in model.all_orders:
869 if hasattr(order, 'perturbative_expansion') and order.perturbative_expansion:
870 raise USRMODERROR, 'Add-on model can not be loop model.'
871
872 for order in model.all_orders:
873 self.add_coupling_order(order)
874
875
876
877 identify_pid = {}
878 if identify_particles:
879 for new, old in identify_particles.items():
880 new_part = next((p for p in model.all_particles if p.name==new), None)
881 old_part = next((p for p in self.particles if p.name==old), None)
882
883 if not new_part:
884 first = True
885 for p in model.all_particles:
886 if p.name.lower() == new.lower():
887 if not first:
888 raise Exception
889 else:
890 first =False
891 new_part = p
892 if not old_part:
893 first = True
894 for p in self.particles:
895 if p.name.lower() == old.lower():
896 if not first:
897 raise Exception
898 else:
899 first =False
900 old_part = p
901 if not old_part:
902
903
904 defaultname = base_objects.Model.load_default_name()
905 for pdg, value in defaultname.items():
906 if value == old:
907 old_part = self.particle_dict[pdg]
908 identify_particles[new] = old_part.name
909 break
910
911
912 identify_pid[new_part.pdg_code] = old_part.pdg_code
913 if new_part is None:
914 raise USRMODERROR, "particle %s not in added model" % new
915 if old_part is None:
916 raise USRMODERROR, "particle %s not in original model" % old
917 if new_part.antiname not in identify_particles:
918 new_anti = new_part.antiname
919 old_anti = old_part.antiname
920 if old_anti == old:
921 raise USRMODERROR, "failed identification (one particle is self-conjugate and not the other)"
922 logger.info("adding identification for anti-particle: %s=%s" % (new_anti, old_anti))
923 identify_particles[new_anti] = old_anti
924
925 for parameter in model.all_parameters:
926 self.add_parameter(parameter, identify_pid)
927 for coupling in model.all_couplings:
928 self.add_coupling(coupling)
929 for lorentz in model.all_lorentz:
930 self.add_lorentz(lorentz)
931 for particle in model.all_particles:
932 if particle.name in identify_particles:
933 self.add_particle(particle, identify=identify_particles[particle.name])
934 else:
935 self.add_particle(particle)
936 for vertex in model.all_vertices:
937 self.add_interaction(vertex, model)
938
939 self.all_path.append(path)
940
941
942 return
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978