1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """ Set of Tool in order to modify a given UFO model.
16 (mainly by adding-suppressing interactions and allow to modify by text the
17 different part of the model. Check of consistency of the model are performed.
18 This produce a new valid UFO model in output.
19 """
20
21 import glob
22 import logging
23 import os
24 import re
25 import sys
26
27 import madgraph.core.base_objects as base_objects
28 import madgraph.iolibs.files as files
29 import madgraph.various.misc as misc
30 import models as ufomodels
31 import models.import_ufo as import_ufo
32 import models.check_param_card as check_param_card
33
34 pjoin =os.path.join
35 logger = logging.getLogger('madgraph.model')
36
38
39
41
42 text = obj.__repr__()
43 if text.startswith('_'):
44 text = '%s%s' % (str(obj.__class__.__name__)[0].upper(), text)
45 return text
46
48 """ The class storing the current status of the model """
49
50 - def __init__(self, modelpath, addon='__1'):
51 """load the model from a valid UFO directory (otherwise keep everything
52 as empty."""
53 self.modelpath = modelpath
54 model = ufomodels.load_model(modelpath)
55
56
57 if not hasattr(model, 'all_orders'):
58 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention (no couplings_order information)\n' +\
59 'MG5 is able to load such model but NOT to the add model feature.'
60 if isinstance(model.all_particles[0].mass, basestring):
61 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention (Mass/Width of particles are string name, not object)\n' +\
62 'MG5 is able to load such model but NOT to the add model feature.'
63
64
65 self.particles = model.all_particles
66 if any(hasattr(p, 'loop_particles') for p in self.particles):
67 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention '
68 self.vertices = model.all_vertices
69 self.couplings = model.all_couplings
70 self.lorentz = model.all_lorentz
71 self.parameters = model.all_parameters
72 self.Parameter = self.parameters[0].__class__
73 self.orders = model.all_orders
74
75 self.functions = model.all_functions
76 self.new_external = []
77
78 if hasattr(model, 'all_propagators'):
79 self.propagators = model.all_propagators
80 else:
81 self.propagators = []
82
83
84 if hasattr(model, 'all_CTvertices'):
85 self.CTvertices = model.all_CTvertices
86 else:
87 self.CTvertices = []
88
89
90 if 'self.expr = expression' in open(pjoin(self.modelpath, 'object_library.py')).read():
91 self.translate = {'expr': 'expression'}
92 else:
93 self.translate = {}
94
95
96 self.old_new = {}
97 self.addon = addon
98
99
100 self.particle_dict = {}
101 for particle in self.particles:
102 self.particle_dict[particle.pdg_code] = particle
103
104
105 self.all_path = [self.modelpath]
106
107 - def write(self, outputdir):
127
128
129 - def mod_file(self, inputpath, outputpath):
130
131 fsock = open(outputpath, 'w')
132
133 to_change = {}
134 to_change.update(self.translate)
135 to_change.update(self.old_new)
136 for particle in self.particles:
137 if hasattr(particle, 'replace') and particle.replace:
138 misc.sprint(particle.get('name'), particle.replace.get('name'))
139
140 pattern = re.compile(r'\b(%s)\b' % ('|'.join(to_change)))
141 for line in open(inputpath):
142 line = pattern.sub(lambda mo: to_change[mo.group()], line)
143 fsock.write(line)
144
145
147 """ propagate model restriction of the original model. """
148
149 restrict_list = [l for l in os.listdir(self.modelpath) if l.startswith('restrict_')]
150 if not self.new_external:
151
152 for p in restrict_list:
153 files.cp(pjoin(self.modelpath, p), outputdir)
154
155 else:
156
157 for p in restrict_list:
158 param_card = check_param_card.ParamCard(pjoin(self.modelpath, p))
159 for parameter in self.new_external:
160 block = parameter.lhablock
161 lhaid = parameter.lhacode
162 value = parameter.value
163 if value == 0:
164 value = 1e-99
165 elif value == 1:
166 value = 9.999999e-1
167 try:
168 param_card.add_param(block.lower(), lhaid, value, 'from addon')
169 except check_param_card.InvalidParamCard:
170 logger.warning("%s will not acting for %s %s" % (p, block, lhaid))
171 param_card[block.lower()].get(lhaid).value = value
172
173 param_card.write(pjoin(outputdir, p), precision=7)
174
175
176
177
178
179
180
181
214
215
216
217 - def create_data_text(self, obj):
218 """ create the data associate to the object"""
219
220
221
222 nb_space = 0
223 if hasattr(obj, 'require_args_all'):
224 args = obj.require_args_all
225 elif hasattr(obj, 'require_args'):
226 args = obj.require_args
227 else:
228 args = []
229 if args:
230 text = """%s = %s(""" % (repr(obj), obj.__class__.__name__)
231 else:
232 text = """%s = %s(""" % (obj.name, obj.__class__.__name__)
233
234
235 for data in args:
236 if data in self.translate:
237 data = self.translate[data]
238 if not nb_space:
239 add_space = len(text)
240 else:
241 add_space = 0
242
243 try:
244 expr = getattr(obj, data)
245 except:
246 if data in ['counterterm', 'propagator', 'loop_particles']:
247 expr = None
248 setattr(obj, data, None)
249 else:
250 raise
251 name =str(data)
252 if name in self.translate:
253 name = self.translate[name]
254
255
256 text += '%s%s = %s,\n' % (' ' * nb_space,name, self.format_param(getattr(obj, data)))
257 nb_space += add_space
258
259 if hasattr(obj, 'get_all'):
260 other_attr = [name for name in obj.get_all().keys()
261 if name not in args]
262 else:
263 other_attr = obj.__dict__.keys()
264
265 for data in other_attr:
266 name =str(data)
267 if name in ['partial_widths', 'loop_particles']:
268 continue
269 if name in self.translate:
270 name = self.translate[name]
271 if not nb_space:
272 add_space = len(text)
273 else:
274 add_space = 0
275 text += '%s%s = %s,\n' % (' ' * nb_space, name, self.format_param(getattr(obj, data)))
276 nb_space += add_space
277
278 text = text[:-2] + ')\n\n'
279
280 return text
281
282 - def create_file_content(self, datalist):
283 """ """
284 return '\n'.join([self.create_data_text(obj) for obj in datalist])
285
286
287 - def write_particles(self, outputdir):
288 """ """
289 text = """
290 # This file was automatically created by The UFO_usermod
291
292 from __future__ import division
293 from object_library import all_particles, Particle
294 import parameters as Param
295
296 """
297 text += self.create_file_content(self.particles)
298 ff = open(os.path.join(outputdir, 'particles.py'), 'w')
299 ff.writelines(text)
300 ff.close()
301 return
302
304 """ """
305 text = """
306 # This file was automatically created by The UFO_usermod
307
308 from object_library import all_vertices, Vertex
309 import particles as P
310 import couplings as C
311 import lorentz as L
312
313 """
314 text += self.create_file_content(self.vertices)
315 ff = open(os.path.join(outputdir, 'vertices.py'), 'w')
316 ff.writelines(text)
317 ff.close()
318 return
319
321 """ """
322
323 if not self.CTvertices:
324 return
325
326 text = """
327 # This file was automatically created by The UFO_usermod
328
329 from object_library import all_vertices, all_CTvertices, Vertex, CTVertex
330 import particles as P
331 import couplings as C
332 import lorentz as L
333
334 """
335 text += self.create_file_content(self.CTvertices)
336 ff = open(os.path.join(outputdir, 'CT_vertices.py'), 'w')
337 ff.writelines(text)
338 ff.close()
339 return
340
341
343 """ """
344 text = """
345 # This file was automatically created by The UFO_usermod
346
347 from object_library import all_couplings, Coupling
348 """
349 text += self.create_file_content(self.couplings)
350 ff = open(os.path.join(outputdir, 'couplings.py'), 'w')
351 ff.writelines(text)
352 ff.close()
353 return
354
356 """ """
357 text = """
358 # This file was automatically created by The UFO_usermod
359
360 from object_library import all_lorentz, Lorentz
361 """
362
363 text += self.create_file_content(self.lorentz)
364 ff = open(os.path.join(outputdir, 'lorentz.py'), 'w')
365 ff.writelines(text)
366 ff.close()
367 return
368
370 """ """
371 text = """
372 # This file was automatically created by The UFO_usermod
373
374 from object_library import all_parameters, Parameter
375 """
376
377 text += self.create_file_content(self.parameters)
378 ff = open(os.path.join(outputdir, 'parameters.py'), 'w')
379 ff.writelines(text)
380 ff.close()
381 return
382
384 """ """
385 text = """
386 # This file was automatically created by The UFO_usermod
387
388 from object_library import all_orders, CouplingOrder
389 """
390
391 text += self.create_file_content(self.orders)
392 ff = open(os.path.join(outputdir, 'coupling_orders.py'), 'w')
393 ff.writelines(text)
394 ff.close()
395 return
396
398 """ """
399 text = """
400 # This file was automatically created by The UFO_usermod
401
402 import cmath
403 from object_library import all_functions, Function
404
405 """
406
407 text += self.create_file_content(self.functions)
408 ff = open(os.path.join(outputdir, 'function_library.py'), 'w')
409 ff.writelines(text)
410 ff.close()
411 return
412
414 """ """
415
416 text = """
417 # This file was automatically created by The UFO_usermod
418 from object_library import all_propagators, Propagator
419 """
420
421 text += self.create_file_content(self.propagators)
422 ff = open(os.path.join(outputdir, 'propagators.py'), 'w')
423 ff.writelines(text)
424 ff.close()
425 return
426
428 """Copy/merge the routines written in Fortran/C++/pyhton"""
429
430
431 re_fct = re.compile('''^\s{7,70}[\w\s]*function (\w*)\(''',re.M+re.I)
432 present_fct = set()
433 for dirpath in self.all_path:
434 if os.path.exists(pjoin(dirpath, 'Fortran', 'functions.f')):
435 text = open(pjoin(dirpath, 'Fortran', 'functions.f')).read()
436 new_fct = re_fct.findall(text)
437 nb_old = len(present_fct)
438 nb_added = len(new_fct)
439 new_fct = set([f.lower() for f in new_fct])
440 present_fct.update(new_fct)
441 if len(present_fct) < nb_old + nb_added:
442 logger.critical('''Some Functions in functions.f are define in more than one model.
443 This require AT LEAST manual modification of the resulting file. But more likely the
444 model need to be consider as un-physical! Use it very carefully.''')
445
446 if not os.path.exists(pjoin(outputdir, 'Fortran')):
447 os.mkdir(pjoin(outputdir, 'Fortran'))
448 fsock = open(pjoin(outputdir, 'Fortran','functions.f'),'a')
449 fsock.write(text)
450 fsock.close()
451
452
453
454 for dirpath in self.all_path:
455 for subdir in ['Fortran', 'CPP', 'Python']:
456 if os.path.exists(pjoin(dirpath, subdir)):
457 for filepath in os.listdir(pjoin(dirpath, subdir)):
458 if filepath == 'functions.f':
459 continue
460 if '.' not in filepath:
461 continue
462 logger.warning('Manual HELAS routine associated to the model. Those are not modified automaticaly!! So you need to manually checked them')
463 nb = 0
464 name, extension = filepath.rsplit('.', 1)
465
466 while 1:
467 filename = '%s%s%s' %(name, '.moved' * nb, extension)
468 if os.path.exists(pjoin(outputdir, subdir, filename)):
469 nb+=1
470 else:
471 break
472 if not os.path.exists(pjoin(outputdir, subdir)):
473 os.mkdir(pjoin(outputdir, subdir))
474 files.cp(pjoin(dirpath, subdir, filepath), pjoin(outputdir, subdir, filename))
475
476 - def get_particle(self, name):
477 """ """
478 for part in self.particles:
479 if part.name == name:
480 return part
481
482 raise USRMODERROR, 'no particle %s in the model' % name
483
491
492 - def add_particle(self, particle, identify=None):
493 """Add a particle in a consistent way"""
494
495 name = particle.name
496 if identify:
497 name = identify
498 old_part = next((p for p in self.particles if p.name==name), None)
499 if not old_part:
500 first = True
501 for p in self.particles:
502 if p.name.lower() == name.lower():
503 if not first:
504 raise Exception
505 else:
506 first =False
507 old_part = p
508
509
510
511 if old_part:
512
513 if old_part.pdg_code == particle.pdg_code:
514 particle.replace = old_part
515 return self.check_mass_width_of_particle(old_part, particle)
516 elif identify:
517 if particle.spin != old_part.spin:
518 raise USRMODERROR, "identify particles should have the same spin"
519 elif particle.color != old_part.color:
520 raise USRMODERROR, "identify particles should have the same color"
521 particle.replace = old_part
522 return self.check_mass_width_of_particle(old_part, particle)
523 else:
524 logger.warning('The particle name \'%s\' is present in both model with different pdg code' % name)
525 logger.warning('The particle coming from the plug-in model will be rename to \'%s%s\'' % (name, self.addon))
526 particle.name = '%s%s' % (name, self.addon)
527 self.particles.append(particle)
528 return
529 elif identify:
530 raise USRMODERROR, "Particle %s is not in the model" % identify
531
532 pdg = particle.pdg_code
533 if pdg in self.particle_dict:
534 particle.replace = self.particle_dict[pdg]
535 return self.check_mass_width_of_particle(self.particle_dict[pdg], particle)
536 else:
537 if hasattr(particle, 'replace'):
538 del particle.replace
539 self.particles.append(particle)
540
541
542 - def check_mass_width_of_particle(self, p_base, p_plugin):
543
544 if p_base.mass.name != p_plugin.mass.name:
545
546 if p_plugin.mass.name in self.old_new:
547 if self.old_new[p_plugin.mass.name] != p_base.mass.name:
548 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model: equivalent of %s is %s != %s ' % ( p_plugin.mass.name, self.old_new[p_plugin.mass.name], p_base.mass.name)
549 elif p_base.mass.name.lower() == 'zero':
550 p_base.mass = p_plugin.mass
551 elif p_plugin.mass.name.lower() == 'zero':
552 pass
553 else:
554 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model\n' + \
555 ' Mass: %s and %s\n' %(p_base.mass.name, p_plugin.mass.name) + \
556 ' conflict name %s\n' % self.old_new + \
557 ' pdg_code: %s %s' % (p_base.pdg_code, p_plugin.pdg_code)
558
559 if p_base.width.name != p_plugin.width.name:
560
561 if p_plugin.width.name in self.old_new:
562 if self.old_new[p_plugin.width.name] != p_base.width.name:
563 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model'
564 elif p_base.width.name.lower() == 'zero':
565 p_base.width = p_plugin.width
566 elif p_plugin.width.name.lower() == 'zero':
567 pass
568 else:
569 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model'
570
571 return
572
574 """adding a param_card parameter inside the current model.
575 if the parameter block/lhcode already exists then just do nothing
576 (but if the name are different then keep the info for future translation)
577 If the name already exists in the model. raise an exception.
578 """
579
580 name = parameter.name
581
582 old_param = next((p for p in self.parameters if p.name==name), None)
583 if old_param:
584 if old_param.lhablock == parameter.lhablock and \
585 old_param.lhacode == parameter.lhacode:
586 return
587 else:
588 logger.info('The two model defines the parameter \'%s\'\n' % parameter.name +
589 ' the original model for %s :%s\n' %(old_param.lhablock, old_param.lhacode)+
590 ' the plugin for %s :%s\n' %(parameter.lhablock,parameter.lhacode)+
591 ' We will rename the one from the plugin to %s%s' % (parameter.name, self.addon))
592 if old_param.nature == 'internal':
593 logger.warning('''The parameter %s is actually an internal parameter of the base model.
594 his value is given by %s.
595 If those two parameters are expected to be identical, you need to provide the value in the param_card according to this formula.
596 ''')
597
598 self.old_new[parameter.name] = '%s%s' % (parameter.name, self.addon)
599 parameter.name = '%s%s' % (parameter.name, self.addon)
600
601
602
603
604 lhacode = parameter.lhacode
605 if parameter.lhablock.lower() in ['mass', 'decay']:
606 if int(parameter.lhacode[0]) in identify_pid:
607 lhacode = [identify_pid[int(parameter.lhacode[0])]]
608
609 old_param = next((p for p in self.parameters if p.lhacode==lhacode \
610 and p.lhablock==parameter.lhablock), None)
611 if old_param:
612 logger.info('The two model defines the block \'%s\' with id \'%s\' with different parameter name \'%s\', \'%s\'\n'\
613 % (old_param.lhablock, old_param.lhacode, parameter.name, old_param.name) + \
614 ' We will merge those two parameters in a single one')
615 if parameter.name in self.old_new.values():
616 key = [k for k in self.old_new if self.old_new[k] == parameter.name][0]
617 self.old_new[key] = old_param.name
618 self.old_new[parameter.name] = old_param.name
619 else:
620 self.old_new[parameter.name] = old_param.name
621
622
623 else:
624
625 self.parameters.append(parameter)
626 self.new_external.append(parameter)
627
629 """ add a parameter of type internal """
630
631 name = parameter.name
632
633 old_param = next((p for p in self.parameters if p.name==name), None)
634 if old_param:
635 if old_param.value == parameter.value:
636 return
637 else:
638 if self.old_new:
639 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
640 def replace(matchobj):
641 return self.old_new[matchobj.group(0)]
642 parameter.value = pattern.sub(replace, parameter.value)
643 self.old_new[parameter.name] = '%s%s' % (parameter.name, self.addon)
644
645 parameter.name = '%s%s' % (parameter.name, self.addon)
646 self.parameters.append(parameter)
647 return
648
649
650 if self.old_new:
651 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
652 def replace(matchobj):
653 return self.old_new[matchobj.group(0)]
654 parameter.value = pattern.sub(replace, parameter.value)
655
656 self.parameters.append(parameter)
657
658
659
660
662 """add one coupling"""
663
664
665 name = coupling.name
666 same_name = next((p for p in self.couplings if p.name==name), None)
667 if same_name:
668 coupling.name = '%s%s' % (coupling.name, self.addon)
669
670 if self.old_new:
671 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
672 def replace(matchobj):
673 return self.old_new[matchobj.group(0)]
674 coupling.value = pattern.sub(replace, coupling.value)
675
676 old_coupling = next((p for p in self.couplings if p.value==coupling.value), None)
677
678 if old_coupling:
679 coupling.replace = old_coupling
680 else:
681 self.couplings.append(coupling)
682
684 """adding a new coupling order inside the model"""
685
686 name = coupling_order.name
687 same_name = next((p for p in self.orders if p.name==name), None)
688 if same_name:
689 if coupling_order.hierarchy != same_name.hierarchy:
690 logger.warning('%s has different hierarchy use the minimal value (%s, %s) => %s' \
691 % (name, same_name.hierarchy, coupling_order.hierarchy,
692 min(same_name.hierarchy, coupling_order.hierarchy)))
693 same_name.hierarchy = min(same_name.hierarchy, coupling_order.hierarchy)
694 if coupling_order.expansion_order != same_name.expansion_order:
695 logger.warning('%s has different expansion_order use the minimal value (%s, %s) => %s' \
696 % (name, coupling_order.expansion_order, same_name.expansion_order,
697 min(same_name.expansion_order, coupling_order.expansion_order)))
698 same_name.expansion_order = min(same_name.expansion_order, coupling_order.expansion_order)
699 if hasattr(same_name, 'perturbative_expansion') and same_name.perturbative_expansion:
700 logger.info('%s will be forbidden to run at NLO' % same_name.name)
701 same_name.perturbative_expansion = 0
702
703
704 else:
705 self.orders.append(coupling_order)
706
708 """add one coupling"""
709
710
711 name = lorentz.name
712 same_name = next((p for p in self.lorentz if p.name==name), None)
713 if same_name:
714 lorentz.name = '%s%s' % (lorentz.name, self.addon)
715
716 if self.old_new:
717 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
718 def replace(matchobj):
719 return self.old_new[matchobj.group(0)]
720 lorentz.structure = pattern.sub(replace, lorentz.structure)
721
722 old_lor = next((p for p in self.lorentz
723 if p.structure==lorentz.structure and p.spins == lorentz.spins),
724 None)
725
726 if old_lor:
727 lorentz.replace = old_lor
728 else:
729 self.lorentz.append(lorentz)
730
732 """Add one interaction to the model. This is UNCONDITIONAL!
733 if the same interaction is in the model this means that the interaction
734 will appear twice. This is now weaken if both interaction are exactly identical!
735 (EXACT same color/lorentz/coupling expression)
736 """
737
738 interaction = interaction.__class__(**interaction.__dict__)
739 model.all_vertices.pop(-1)
740
741
742 name = interaction.name
743 same_name = next((p for p in self.vertices if p.name==name), None)
744 if same_name:
745 interaction.name = '%s%s' % (interaction.name, self.addon)
746
747
748 particles = [p.replace if hasattr(p, 'replace') else p for p in interaction.particles]
749 interaction.particles = particles
750
751 lorentz = [l.replace if hasattr(l, 'replace') else l for l in interaction.lorentz]
752 interaction.lorentz = lorentz
753
754
755 couplings = [(key, c.replace) if hasattr(c, 'replace') else (key, c)
756 for key, c in interaction.couplings.items()]
757 interaction.couplings = dict(couplings)
758
759
760
761
762 get_pdg = lambda vertex: sorted([p.pdg_code for p in vertex.particles])
763 id_part = get_pdg(interaction)
764 iden_vertex = [v for v in self.vertices if get_pdg(v) == id_part]
765 iden = False
766 nb_coupling = len(interaction.couplings)
767 keys = interaction.couplings.keys()
768
769 get_lor_and_color = lambda i: (interaction.lorentz[keys[i][1]].structure,
770 interaction.color[keys[i][0]])
771 for v in iden_vertex:
772 if len(v.couplings) != nb_coupling:
773 continue
774 found = []
775 for ((i,j), coup) in v.couplings.items():
776 new_lorentz = v.lorentz[j].structure
777 new_color = v.color[i]
778 k=0
779 same = [k for k in range(nb_coupling) if k not in found and
780 get_lor_and_color(k) == (new_lorentz, new_color)]
781 if not same:
782 break
783 else:
784 for k in same:
785 if interaction.couplings[keys[k]] == coup:
786 found.append(k)
787 break
788 else:
789
790 for k in same:
791 if interaction.couplings[keys[k]].order == coup.order:
792 found.append(k)
793 warning = """Did NOT add interaction %s since same particles/lorentz/color/coupling order
794 BUT did not manage to ensure that the coupling is the same. couplings expression:
795 base model: %s
796 addon model: %s
797 """ % (id_part, coup.value, interaction.couplings[keys[k]].value)
798 logger.warning(warning)
799 found.append(k)
800 break
801 else:
802 pass
803
804 else:
805
806 return
807
808 logger.info('Adding interaction for the following particles: %s' % id_part)
809
810
811
812
813 self.vertices.append(interaction)
814
816 """Add one interaction to the model. This is UNCONDITIONAL!
817 if the same interaction is in the model this means that the interaction
818 will appear twice."""
819
820
821 name = interaction.name
822 same_name = next((p for p in self.vertices if p.name==name), None)
823 if same_name:
824 interaction.name = '%s%s' % (interaction.name, self.addon)
825
826
827 particles = [p.replace if hasattr(p, 'replace') else p for p in interaction.particles]
828 interaction.particles = particles
829
830
831 lorentz = [l.replace if hasattr(l, 'replace') else l for l in interaction.lorentz]
832 interaction.lorentz = lorentz
833
834
835 couplings = [(key, c.replace) if hasattr(c, 'replace') else (key, c)
836 for key, c in interaction.couplings.items()]
837 interaction.couplings = dict(couplings)
838
839
840
841 loop_particles=[ [p.replace if hasattr(p, 'replace') else p for p in plist]
842 for plist in interaction.loop_particles]
843 interaction.loop_particles = loop_particles
844 self.CTvertices.append(interaction)
845
846
847 - def add_model(self, model=None, path=None, identify_particles=None):
848 """add another model in the current one"""
849
850
851 self.new_external = []
852 if path:
853 model = ufomodels.load_model(path)
854
855 if not model:
856 raise USRMODERROR, 'Need a valid Model'
857 else:
858 path = model.__path__[0]
859
860 if not hasattr(model, 'all_orders'):
861 raise USRMODERROR, 'Add-on Model doesn\'t follows UFO convention (no couplings_order information)\n' +\
862 'MG5 is able to load such model but NOT to the add model feature.'
863 if isinstance(model.all_particles[0].mass, basestring):
864 raise USRMODERROR, 'Add-on Model doesn\'t follows UFO convention (Mass/Width of particles are string name, not object)\n' +\
865 'MG5 is able to load such model but NOT to the add model feature.'
866
867 for order in model.all_orders:
868 if hasattr(order, 'perturbative_expansion') and order.perturbative_expansion:
869 raise USRMODERROR, 'Add-on model can not be loop model.'
870
871 for order in model.all_orders:
872 self.add_coupling_order(order)
873
874
875
876 identify_pid = {}
877 if identify_particles:
878 for new, old in identify_particles.items():
879 new_part = next((p for p in model.all_particles if p.name==new), None)
880 old_part = next((p for p in self.particles if p.name==old), None)
881
882 if not new_part:
883 first = True
884 for p in model.all_particles:
885 if p.name.lower() == new.lower():
886 if not first:
887 raise Exception
888 else:
889 first =False
890 new_part = p
891 if not old_part:
892 first = True
893 for p in self.particles:
894 if p.name.lower() == old.lower():
895 if not first:
896 raise Exception
897 else:
898 first =False
899 old_part = p
900
901 identify_pid[new_part.pdg_code] = old_part.pdg_code
902 if new_part is None:
903 raise USRMODERROR, "particle %s not in added model" % new
904 if old_part is None:
905 raise USRMODERROR, "particle %s not in original model" % old
906 if new_part.antiname not in identify_particles:
907 new_anti = new_part.antiname
908 old_anti = old_part.antiname
909 if old_anti == old:
910 raise USRMODERROR, "failed identification (one particle is self-conjugate and not the other)"
911 logger.info("adding identification for anti-particle: %s=%s" % (new_anti, old_anti))
912 identify_particles[new_anti] = old_anti
913
914 for parameter in model.all_parameters:
915 self.add_parameter(parameter, identify_pid)
916 for coupling in model.all_couplings:
917 self.add_coupling(coupling)
918 for lorentz in model.all_lorentz:
919 self.add_lorentz(lorentz)
920 for particle in model.all_particles:
921 if particle.name in identify_particles:
922 self.add_particle(particle, identify=identify_particles[particle.name])
923 else:
924 self.add_particle(particle)
925 for vertex in model.all_vertices:
926 self.add_interaction(vertex, model)
927
928 self.all_path.append(path)
929
930
931 return
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967