1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """Classes for diagram generation. Amplitude performs the diagram
16 generation, DecayChainAmplitude keeps track of processes with decay
17 chains, and MultiProcess allows generation of processes with
18 multiparticle definitions. DiagramTag allows to identify diagrams
19 based on relevant properties.
20 """
21
22 from __future__ import absolute_import
23 from six.moves import filter
24
25
26
27 import array
28 import copy
29 import itertools
30 import logging
31
32 import madgraph.core.base_objects as base_objects
33 import madgraph.various.misc as misc
34 from madgraph import InvalidCmd, MadGraph5Error
35 from six.moves import range
36 from six.moves import zip
37
38 logger = logging.getLogger('madgraph.diagram_generation')
42
48 """Class to tag diagrams based on objects with some __lt__ measure, e.g.
49 PDG code/interaction id (for comparing diagrams from the same amplitude),
50 or Lorentz/coupling/mass/width (for comparing AMPs from different MEs).
51 Algorithm: Create chains starting from external particles:
52 1 \ / 6
53 2 /\______/\ 7
54 3_ / | \_ 8
55 4 / 5 \_ 9
56 \ 10
57 gives ((((9,10,id910),8,id9108),(6,7,id67),id910867)
58 (((1,2,id12),(3,4,id34)),id1234),
59 5,id91086712345)
60 where idN is the id of the corresponding interaction. The ordering within
61 chains is based on chain length (depth; here, 1234 has depth 3, 910867 has
62 depth 4, 5 has depht 0), and if equal on the ordering of the chain elements.
63 The determination of central vertex is based on minimizing the chain length
64 for the longest subchain.
65 This gives a unique tag which can be used to identify diagrams
66 (instead of symmetry), as well as identify identical matrix elements from
67 different processes."""
68
70 """Exception for any problems in DiagramTags"""
71 pass
72
73 - def __init__(self, diagram, model=None, ninitial=2):
74 """Initialize with a diagram. Create DiagramTagChainLinks according to
75 the diagram, and figure out if we need to shift the central vertex."""
76
77
78 leg_dict = {}
79
80 for vertex in diagram.get('vertices'):
81
82 legs = vertex.get('legs')[:-1]
83 lastvx = vertex == diagram.get('vertices')[-1]
84 if lastvx:
85
86 legs = vertex.get('legs')
87
88 link = DiagramTagChainLink([leg_dict.setdefault(leg.get('number'),
89 DiagramTagChainLink(self.link_from_leg(leg, model))) \
90 for leg in legs],
91 self.vertex_id_from_vertex(vertex,
92 lastvx,
93 model,
94 ninitial))
95
96 if not lastvx:
97 leg_dict[vertex.get('legs')[-1].get('number')] = link
98
99
100 self.tag = link
101
102
103
104 done = max([l.depth for l in self.tag.links]) == 0
105 while not done:
106
107 longest_chain = self.tag.links[0]
108
109 new_link = DiagramTagChainLink(self.tag.links[1:],
110 self.flip_vertex(\
111 self.tag.vertex_id,
112 longest_chain.vertex_id,
113 self.tag.links[1:]))
114
115 other_links = list(longest_chain.links) + [new_link]
116 other_link = DiagramTagChainLink(other_links,
117 self.flip_vertex(\
118 longest_chain.vertex_id,
119 self.tag.vertex_id,
120 other_links))
121
122 if other_link.links[0] < self.tag.links[0]:
123
124 self.tag = other_link
125 else:
126
127 done = True
128
132
134 """Output a diagram from a DiagramTag. Note that each daughter
135 class must implement the static functions id_from_vertex_id
136 (if the vertex id is something else than an integer) and
137 leg_from_link (to pass the correct info from an end link to a
138 leg)."""
139
140
141 diagram = base_objects.Diagram({'vertices': \
142 self.vertices_from_link(self.tag,
143 model,
144 True)})
145 diagram.calculate_orders(model)
146 return diagram
147
148 @classmethod
150 """Recursively return the leg corresponding to this link and
151 the list of all vertices from all previous links"""
152
153 if link.end_link:
154
155 return cls.leg_from_link(link), []
156
157
158 leg_vertices = [cls.vertices_from_link(l, model) for l in link.links]
159
160 legs = base_objects.LegList(sorted([l for l,v in leg_vertices],
161 key= lambda l: l.get('number'), reverse=True))
162
163
164 vertices = base_objects.VertexList(sum([v for l, v in leg_vertices],
165 []))
166
167 if not first_vertex:
168
169
170 last_leg = cls.leg_from_legs(legs,link.vertex_id,model)
171 legs.append(last_leg)
172
173
174 vertices.append(cls.vertex_from_link(legs,
175 link.vertex_id,
176 model))
177 if first_vertex:
178
179 return vertices
180 else:
181
182 return last_leg, vertices
183
184 @classmethod
186 """Returns the list of external PDGs of the interaction corresponding
187 to this vertex_id."""
188
189
190
191
192 if (len(vertex_id)>=3 and 'PDGs' in vertex_id[2]):
193 return vertex_id[2]['PDGs']
194 else:
195 return [part.get_pdg_code() for part in model.get_interaction(
196 cls.id_from_vertex_id(vertex_id)).get('particles')]
197
198 @classmethod
200 """Return a leg from a leg list and the model info"""
201
202 pdgs = list(cls.legPDGs_from_vertex_id(vertex_id, model))
203
204
205 for pdg in [leg.get('id') for leg in legs]:
206 pdgs.remove(pdg)
207
208 assert len(pdgs) == 1
209
210 pdg = model.get_particle(pdgs[0]).get_anti_pdg_code()
211 number = min([l.get('number') for l in legs])
212
213 state = (len([l for l in legs if l.get('state') == False]) != 1)
214
215 onshell= False
216
217 return base_objects.Leg({'id': pdg,
218 'number': number,
219 'state': state,
220 'onshell': onshell})
221
222 @classmethod
235
236 @staticmethod
238 """Return a leg from a link"""
239
240 if link.end_link:
241
242 return base_objects.Leg({'number':link.links[0][1],
243 'id':link.links[0][0][0],
244 'state':(link.links[0][0][1] == 0),
245 'onshell':False})
246
247
248 assert False
249
250 @staticmethod
252 """Return the numerical vertex id from a link.vertex_id"""
253
254 return vertex_id[0][0]
255
256 @staticmethod
258 """Return the loop_info stored in this vertex id. Notice that the
259 IdentifyME tag does not store the loop_info, but should normally never
260 need access to it."""
261
262 return vertex_id[2]
263
264 @staticmethod
266 """Reorder a permutation with respect to start_perm. Note that
267 both need to start from 1."""
268 if perm == start_perm:
269 return list(range(len(perm)))
270 order = [i for (p,i) in \
271 sorted([(p,i) for (i,p) in enumerate(perm)])]
272 return [start_perm[i]-1 for i in order]
273
274 @staticmethod
276 """Returns the default end link for a leg: ((id, state), number).
277 Note that the number is not taken into account if tag comparison,
278 but is used only to extract leg permutations."""
279 if leg.get('state'):
280
281 return [((leg.get('id'), 0), leg.get('number'))]
282 else:
283
284 return [((leg.get('id'), leg.get('number')), leg.get('number'))]
285
286 @staticmethod
288 """Returns the default vertex id: just the interaction id
289 Note that in the vertex id, like the leg, only the first entry is
290 taken into account in the tag comparison, while the second is for
291 storing information that is not to be used in comparisons and the
292 third for additional info regarding the shrunk loop vertex."""
293
294 if isinstance(vertex,base_objects.ContractedVertex):
295
296 return ((vertex.get('id'),vertex.get('loop_tag')),(),
297 {'PDGs':vertex.get('PDGs')})
298 else:
299 return ((vertex.get('id'),()),(),{})
300
301 @staticmethod
303 """Returns the default vertex flip: just the new_vertex"""
304 return new_vertex
305
307 """Equal if same tag"""
308 if type(self) != type(other):
309 return False
310 return self.tag == other.tag
311
313 return not self.__eq__(other)
314
317
319 return self.tag < other.tag
320
322 return self.tag > other.tag
323
324 __repr__ = __str__
325
327 """Chain link for a DiagramTag. A link is a tuple + vertex id + depth,
328 with a comparison operator defined"""
329
330 - def __init__(self, objects, vertex_id = None):
331 """Initialize, either with a tuple of DiagramTagChainLinks and
332 a vertex_id (defined by DiagramTag.vertex_id_from_vertex), or
333 with an external leg object (end link) defined by
334 DiagramTag.link_from_leg"""
335
336 if vertex_id == None:
337
338 self.links = tuple(objects)
339 self.vertex_id = (0,)
340 self.depth = 0
341 self.end_link = True
342 return
343
344 self.links = tuple(sorted(list(tuple(objects)), reverse=True))
345 self.vertex_id = vertex_id
346
347
348 self.depth = sum([l.depth for l in self.links],
349 max(1, len(self.links)-1))
350 self.end_link = False
351
353 """Get the permutation of external numbers (assumed to be the
354 second entry in the end link tuples)"""
355
356 if self.end_link:
357
358 return [self.links[0][1]]
359
360 return sum([l.get_external_numbers() for l in self.links], [])
361
363 """Compare self with other in the order:
364 1. depth 2. len(links) 3. vertex id 4. measure of links"""
365
366 if self == other:
367 return False
368
369 if self.depth != other.depth:
370 return self.depth < other.depth
371
372 if len(self.links) != len(other.links):
373 return len(self.links) < len(other.links)
374
375 if self.vertex_id[0] != other.vertex_id[0]:
376 if isinstance(self.vertex_id[0], int) and isinstance(other.vertex_id[0], tuple):
377 return True
378 elif isinstance(self.vertex_id[0], tuple) and isinstance(other.vertex_id[0], int):
379 return False
380 elif isinstance(self.vertex_id[0], str) and isinstance(other.vertex_id[0], tuple):
381 return True
382 elif isinstance(self.vertex_id[0], tuple) and isinstance(other.vertex_id[0], str):
383 return False
384 else:
385 try:
386 return self.vertex_id[0] < other.vertex_id[0]
387 except TypeError as error:
388 if error.args == ("'<' not supported between instances of 'tuple' and 'str'",):
389 return False
390 elif error.args == ("'<' not supported between instances of 'str' and 'tuple'",):
391 return True
392 else:
393 raise Exception
394
395
396 for i, link in enumerate(self.links):
397 if i > len(other.links) - 1:
398 return False
399 if link != other.links[i]:
400 return link < other.links[i]
401
403 return self != other and not self.__lt__(other)
404
406 """For end link,
407 consider equal if self.links[0][0] == other.links[0][0],
408 i.e., ignore the leg number (in links[0][1])."""
409
410 if self.end_link and other.end_link and self.depth == other.depth \
411 and self.vertex_id == other.vertex_id:
412 return self.links[0][0] == other.links[0][0]
413
414 return self.end_link == other.end_link and self.depth == other.depth \
415 and self.vertex_id[0] == other.vertex_id[0] \
416 and self.links == other.links
417
419 return not self.__eq__(other)
420
421
423 if self.end_link:
424 return str(self.links)
425 return "%s, %s; %d" % (str(self.links),
426 str(self.vertex_id),
427 self.depth)
428
429 __repr__ = __str__
430
431
432
433
434 -class Amplitude(base_objects.PhysicsObject):
435 """Amplitude: process + list of diagrams (ordered)
436 Initialize with a process, then call generate_diagrams() to
437 generate the diagrams for the amplitude
438 """
439
441 """Default values for all properties"""
442
443 self['process'] = base_objects.Process()
444 self['diagrams'] = None
445
446
447 self['has_mirror_process'] = False
448
461
462 - def filter(self, name, value):
475
476 - def get(self, name):
485
486
487
489 """Return diagram property names as a nicely sorted list."""
490
491 return ['process', 'diagrams', 'has_mirror_process']
492
494 """Returns number of diagrams for this amplitude"""
495 return len(self.get('diagrams'))
496
498 """Return an AmplitudeList with just this amplitude.
499 Needed for DecayChainAmplitude."""
500
501 return AmplitudeList([self])
502
504 """Returns a nicely formatted string of the amplitude content."""
505 return self.get('process').nice_string(indent) + "\n" + \
506 self.get('diagrams').nice_string(indent)
507
509 """Returns a nicely formatted string of the amplitude process."""
510 return self.get('process').nice_string(indent)
511
513 """Returns the number of initial state particles in the process."""
514 return self.get('process').get_ninitial()
515
517 """ Returns wether this amplitude has a loop process."""
518
519 return self.get('process').get('perturbation_couplings')
520
522 """Generate diagrams. Algorithm:
523
524 1. Define interaction dictionaries:
525 * 2->0 (identity), 3->0, 4->0, ... , maxlegs->0
526 * 2 -> 1, 3 -> 1, ..., maxlegs-1 -> 1
527
528 2. Set flag from_group=true for all external particles.
529 Flip particle/anti particle for incoming particles.
530
531 3. If there is a dictionary n->0 with n=number of external
532 particles, create if possible the combination [(1,2,3,4,...)]
533 with *at least two* from_group==true. This will give a
534 finished (set of) diagram(s) (done by reduce_leglist)
535
536 4. Create all allowed groupings of particles with at least one
537 from_group==true (according to dictionaries n->1):
538 [(1,2),3,4...],[1,(2,3),4,...],...,
539 [(1,2),(3,4),...],...,[(1,2,3),4,...],...
540 (done by combine_legs)
541
542 5. Replace each group with a (list of) new particle(s) with number
543 n = min(group numbers). Set from_group true for these
544 particles and false for all other particles. Store vertex info.
545 (done by merge_comb_legs)
546
547 6. Stop algorithm when at most 2 particles remain.
548 Return all diagrams (lists of vertices).
549
550 7. Repeat from 3 (recursion done by reduce_leglist)
551
552 8. Replace final p=p vertex
553
554 Be aware that the resulting vertices have all particles outgoing,
555 so need to flip for incoming particles when used.
556
557 SPECIAL CASE: For A>BC... processes which are legs in decay
558 chains, we need to ensure that BC... combine first, giving A=A
559 as a final vertex. This case is defined by the Process
560 property is_decay_chain = True.
561 This function can also be called by the generate_diagram function
562 of LoopAmplitudes, in which case the generated diagrams here must not
563 be directly assigned to the 'diagrams' attributed but returned as a
564 DiagramList by the function. This is controlled by the argument
565 returndiag.
566 """
567
568 process = self.get('process')
569 model = process.get('model')
570 legs = process.get('legs')
571
572 for key in process.get('overall_orders').keys():
573 try:
574 process.get('orders')[key] = \
575 min(process.get('orders')[key],
576 process.get('overall_orders')[key])
577 except KeyError:
578 process.get('orders')[key] = process.get('overall_orders')[key]
579
580 assert model.get('particles'), \
581 "particles are missing in model: %s" % model.get('particles')
582
583 assert model.get('interactions'), \
584 "interactions are missing in model"
585
586
587 res = base_objects.DiagramList()
588
589 if len([leg for leg in legs if model.get('particle_dict')[\
590 leg.get('id')].is_fermion()]) % 2 == 1:
591 if not returndiag:
592 self['diagrams'] = res
593 raise InvalidCmd('The number of fermion is odd')
594 else:
595 return False, res
596
597
598
599 if not model.get('got_majoranas') and \
600 len([leg for leg in legs if leg.is_incoming_fermion(model)]) != \
601 len([leg for leg in legs if leg.is_outgoing_fermion(model)]):
602 if not returndiag:
603 self['diagrams'] = res
604 raise InvalidCmd('The number of of incoming/outcoming fermions are different')
605 else:
606 return False, res
607
608
609
610 for charge in model.get('conserved_charge'):
611 total = 0
612 for leg in legs:
613 part = model.get('particle_dict')[leg.get('id')]
614 try:
615 value = part.get(charge)
616 except (AttributeError, base_objects.PhysicsObject.PhysicsObjectError):
617 try:
618 value = getattr(part, charge)
619 except AttributeError:
620 value = 0
621
622 if (leg.get('id') != part['pdg_code']) != leg['state']:
623 total -= value
624 else:
625 total += value
626
627 if abs(total) > 1e-10:
628 if not returndiag:
629 self['diagrams'] = res
630 raise InvalidCmd('No %s conservation for this process ' % charge)
631 return res
632 else:
633 raise InvalidCmd('No %s conservation for this process ' % charge)
634 return res, res
635
636 if not returndiag:
637 logger.info("Trying %s " % process.nice_string().replace('Process', 'process'))
638
639
640 for i in range(0, len(process.get('legs'))):
641
642 leg = copy.copy(process.get('legs')[i])
643 process.get('legs')[i] = leg
644 if leg.get('number') == 0:
645 leg.set('number', i + 1)
646
647
648
649
650 leglist = self.copy_leglist(process.get('legs'))
651
652 for leg in leglist:
653
654
655 leg.set('from_group', True)
656
657
658
659 if leg.get('state') == False:
660 part = model.get('particle_dict')[leg.get('id')]
661 leg.set('id', part.get_anti_pdg_code())
662
663
664
665 max_multi_to1 = max([len(key) for key in \
666 model.get('ref_dict_to1').keys()])
667
668
669
670
671
672
673
674
675 is_decay_proc = process.get_ninitial() == 1
676 if is_decay_proc:
677 part = model.get('particle_dict')[leglist[0].get('id')]
678
679
680
681 ref_dict_to0 = {(part.get_pdg_code(),part.get_anti_pdg_code()):[0],
682 (part.get_anti_pdg_code(),part.get_pdg_code()):[0]}
683
684
685 leglist[0].set('from_group', None)
686 reduced_leglist = self.reduce_leglist(leglist,
687 max_multi_to1,
688 ref_dict_to0,
689 is_decay_proc,
690 process.get('orders'))
691 else:
692 reduced_leglist = self.reduce_leglist(leglist,
693 max_multi_to1,
694 model.get('ref_dict_to0'),
695 is_decay_proc,
696 process.get('orders'))
697
698
699
700
701 self.convert_dgleg_to_leg(reduced_leglist)
702
703 if reduced_leglist:
704 for vertex_list in reduced_leglist:
705 res.append(self.create_diagram(base_objects.VertexList(vertex_list)))
706
707
708
709 failed_crossing = not res
710
711
712
713
714
715
716 if process.get('required_s_channels') and \
717 process.get('required_s_channels')[0]:
718
719
720 lastvx = -1
721
722
723
724 if is_decay_proc: lastvx = -2
725 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False])
726
727 old_res = res
728 res = base_objects.DiagramList()
729 for id_list in process.get('required_s_channels'):
730 res_diags = [diagram for diagram in old_res if all([req_s_channel in \
731 [vertex.get_s_channel_id(\
732 process.get('model'), ninitial) \
733 for vertex in diagram.get('vertices')[:lastvx]] \
734 for req_s_channel in \
735 id_list])]
736
737 res.extend([diag for diag in res_diags if diag not in res])
738
739
740
741
742
743 if process.get('forbidden_s_channels'):
744 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False])
745 if ninitial == 2:
746 res = base_objects.DiagramList(\
747 [diagram for diagram in res if not any([vertex.get_s_channel_id(\
748 process.get('model'), ninitial) \
749 in process.get('forbidden_s_channels')
750 for vertex in diagram.get('vertices')[:-1]])])
751 else:
752
753
754 newres= []
755 for diagram in res:
756 leg1 = 1
757
758
759
760 vertex = diagram.get('vertices')[-1]
761 if any([l['number'] ==1 for l in vertex.get('legs')]):
762 leg1 = [l['number'] for l in vertex.get('legs') if l['number'] !=1][0]
763 to_loop = list(range(len(diagram.get('vertices'))-1))
764 if leg1 >1:
765 to_loop.reverse()
766 for i in to_loop:
767 vertex = diagram.get('vertices')[i]
768 if leg1:
769 if any([l['number'] ==leg1 for l in vertex.get('legs')]):
770 leg1 = 0
771 continue
772 if vertex.get_s_channel_id(process.get('model'), ninitial)\
773 in process.get('forbidden_s_channels'):
774 break
775 else:
776 newres.append(diagram)
777 res = base_objects.DiagramList(newres)
778
779
780
781
782 if process.get('forbidden_onsh_s_channels'):
783 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False])
784
785 verts = base_objects.VertexList(sum([[vertex for vertex \
786 in diagram.get('vertices')[:-1]
787 if vertex.get_s_channel_id(\
788 process.get('model'), ninitial) \
789 in process.get('forbidden_onsh_s_channels')] \
790 for diagram in res], []))
791 for vert in verts:
792
793 newleg = copy.copy(vert.get('legs').pop(-1))
794 newleg.set('onshell', False)
795 vert.get('legs').append(newleg)
796
797
798 for diagram in res:
799 diagram.calculate_orders(model)
800
801
802
803
804
805
806
807
808 if not returndiag and len(res)>0:
809 res = self.apply_squared_order_constraints(res)
810
811 if diagram_filter:
812 res = self.apply_user_filter(res)
813
814
815 if not process.get('is_decay_chain'):
816 for diagram in res:
817 vertices = diagram.get('vertices')
818 if len(vertices) > 1 and vertices[-1].get('id') == 0:
819
820
821
822
823 vertices = copy.copy(vertices)
824 lastvx = vertices.pop()
825 nexttolastvertex = copy.copy(vertices.pop())
826 legs = copy.copy(nexttolastvertex.get('legs'))
827 ntlnumber = legs[-1].get('number')
828 lastleg = [leg for leg in lastvx.get('legs') if leg.get('number') != ntlnumber][0]
829
830 if lastleg.get('onshell') == False:
831 lastleg.set('onshell', None)
832
833 legs[-1] = lastleg
834 nexttolastvertex.set('legs', legs)
835 vertices.append(nexttolastvertex)
836 diagram.set('vertices', vertices)
837
838 if res and not returndiag:
839 logger.info("Process has %d diagrams" % len(res))
840
841
842 self.trim_diagrams(diaglist=res)
843
844
845 pertur = 'QCD'
846 if self.get('process')['perturbation_couplings']:
847 pertur = sorted(self.get('process')['perturbation_couplings'])[0]
848 self.get('process').get('legs').sort(pert=pertur)
849
850
851 if not returndiag:
852 self['diagrams'] = res
853 return not failed_crossing
854 else:
855 return not failed_crossing, res
856
858 """Applies the user specified squared order constraints on the diagram
859 list in argument."""
860
861 res = copy.copy(diag_list)
862
863
864
865 for name, (value, operator) in self['process'].get('constrained_orders').items():
866 res.filter_constrained_orders(name, value, operator)
867
868
869
870
871 while True:
872 new_res = res.apply_positive_sq_orders(res,
873 self['process'].get('squared_orders'),
874 self['process']['sqorders_types'])
875
876 if len(res)==len(new_res):
877 break
878 elif (len(new_res)>len(res)):
879 raise MadGraph5Error(
880 'Inconsistency in function apply_squared_order_constraints().')
881
882 res = new_res
883
884
885
886
887 neg_orders = [(order, value) for order, value in \
888 self['process'].get('squared_orders').items() if value<0]
889 if len(neg_orders)==1:
890 neg_order, neg_value = neg_orders[0]
891
892 res, target_order = res.apply_negative_sq_order(res, neg_order,\
893 neg_value, self['process']['sqorders_types'][neg_order])
894
895
896
897
898 self['process']['squared_orders'][neg_order]=target_order
899 elif len(neg_orders)>1:
900 raise InvalidCmd('At most one negative squared order constraint'+\
901 ' can be specified, not %s.'%str(neg_orders))
902
903 return res
904
906 """Applies the user specified squared order constraints on the diagram
907 list in argument."""
908
909 if True:
910 remove_diag = misc.plugin_import('user_filter',
911 'user filter required to be defined in PLUGIN/user_filter.py with the function remove_diag(ONEDIAG) which returns True if the diagram has to be removed',
912 fcts=['remove_diag'])
913 else:
914
915 def remove_diag(diag, model=None):
916 for vertex in diag['vertices']:
917 if vertex['id'] == 0:
918 continue
919 if vertex['legs'][-1]['number'] < 3:
920 if abs(vertex['legs'][-1]['id']) <6:
921 return True
922 return False
923
924 res = diag_list.__class__()
925 nb_removed = 0
926 model = self['process']['model']
927 for diag in diag_list:
928 if remove_diag(diag, model):
929 nb_removed +=1
930 else:
931 res.append(diag)
932
933 if nb_removed:
934 logger.warning('Diagram filter is ON and removed %s diagrams for this subprocess.' % nb_removed)
935
936 return res
937
938
939
941 """ Return a Diagram created from the vertex list. This function can be
942 overloaded by daughter classes."""
943 return base_objects.Diagram({'vertices':vertexlist})
944
946 """ In LoopAmplitude, it converts back all DGLoopLegs into Legs.
947 In Amplitude, there is nothing to do. """
948
949 return True
950
952 """ Simply returns a copy of the leg list. This function is
953 overloaded in LoopAmplitude so that a DGLoopLeg list is returned.
954 The DGLoopLeg has some additional parameters only useful during
955 loop diagram generation"""
956
957 return base_objects.LegList(\
958 [ copy.copy(leg) for leg in legs ])
959
960 - def reduce_leglist(self, curr_leglist, max_multi_to1, ref_dict_to0,
961 is_decay_proc = False, coupling_orders = None):
962 """Recursive function to reduce N LegList to N-1
963 For algorithm, see doc for generate_diagrams.
964 """
965
966
967
968 res = []
969
970
971
972 if curr_leglist is None:
973 return None
974
975
976 model = self.get('process').get('model')
977 ref_dict_to1 = self.get('process').get('model').get('ref_dict_to1')
978
979
980
981
982
983
984 if curr_leglist.can_combine_to_0(ref_dict_to0, is_decay_proc):
985
986
987 vertex_ids = self.get_combined_vertices(curr_leglist,
988 copy.copy(ref_dict_to0[tuple(sorted([leg.get('id') for \
989 leg in curr_leglist]))]))
990
991 final_vertices = [base_objects.Vertex({'legs':curr_leglist,
992 'id':vertex_id}) for \
993 vertex_id in vertex_ids]
994
995 for final_vertex in final_vertices:
996 if self.reduce_orders(coupling_orders, model,
997 [final_vertex.get('id')]) != False:
998 res.append([final_vertex])
999
1000
1001 if len(curr_leglist) == 2:
1002 if res:
1003 return res
1004 else:
1005 return None
1006
1007
1008 comb_lists = self.combine_legs(curr_leglist,
1009 ref_dict_to1, max_multi_to1)
1010
1011
1012 leg_vertex_list = self.merge_comb_legs(comb_lists, ref_dict_to1)
1013
1014
1015 for leg_vertex_tuple in leg_vertex_list:
1016
1017
1018 if self.get('process').get('forbidden_particles') and \
1019 any([abs(vertex.get('legs')[-1].get('id')) in \
1020 self.get('process').get('forbidden_particles') \
1021 for vertex in leg_vertex_tuple[1]]):
1022 continue
1023
1024
1025 new_coupling_orders = self.reduce_orders(coupling_orders,
1026 model,
1027 [vertex.get('id') for vertex in \
1028 leg_vertex_tuple[1]])
1029 if new_coupling_orders == False:
1030
1031 continue
1032
1033
1034
1035 reduced_diagram = self.reduce_leglist(leg_vertex_tuple[0],
1036 max_multi_to1,
1037 ref_dict_to0,
1038 is_decay_proc,
1039 new_coupling_orders)
1040
1041 if reduced_diagram:
1042 vertex_list_list = [list(leg_vertex_tuple[1])]
1043 vertex_list_list.append(reduced_diagram)
1044 expanded_list = expand_list_list(vertex_list_list)
1045 res.extend(expanded_list)
1046
1047 return res
1048
1049 - def reduce_orders(self, coupling_orders, model, vertex_id_list):
1050 """Return False if the coupling orders for any coupling is <
1051 0, otherwise return the new coupling orders with the vertex
1052 orders subtracted. If coupling_orders is not given, return
1053 None (which counts as success).
1054 WEIGHTED is a special order, which corresponds to the sum of
1055 order hierarchies for the couplings.
1056 We ignore negative constraints as these cannot be taken into
1057 account on the fly but only after generation."""
1058
1059 if not coupling_orders:
1060 return None
1061
1062 present_couplings = copy.copy(coupling_orders)
1063 for id in vertex_id_list:
1064
1065 if not id:
1066 continue
1067 inter = model.get("interaction_dict")[id]
1068 for coupling in inter.get('orders').keys():
1069
1070
1071 if coupling in present_couplings and \
1072 present_couplings[coupling]>=0:
1073
1074 present_couplings[coupling] -= \
1075 inter.get('orders')[coupling]
1076 if present_couplings[coupling] < 0:
1077
1078 return False
1079
1080 if 'WEIGHTED' in present_couplings and \
1081 present_couplings['WEIGHTED']>=0:
1082 weight = sum([model.get('order_hierarchy')[c]*n for \
1083 (c,n) in inter.get('orders').items()])
1084 present_couplings['WEIGHTED'] -= weight
1085 if present_couplings['WEIGHTED'] < 0:
1086
1087 return False
1088
1089 return present_couplings
1090
1091 - def combine_legs(self, list_legs, ref_dict_to1, max_multi_to1):
1092 """Recursive function. Take a list of legs as an input, with
1093 the reference dictionary n-1->1, and output a list of list of
1094 tuples of Legs (allowed combinations) and Legs (rest). Algorithm:
1095
1096 1. Get all n-combinations from list [123456]: [12],..,[23],..,[123],..
1097
1098 2. For each combination, say [34]. Check if combination is valid.
1099 If so:
1100
1101 a. Append [12[34]56] to result array
1102
1103 b. Split [123456] at index(first element in combination+1),
1104 i.e. [12],[456] and subtract combination from second half,
1105 i.e.: [456]-[34]=[56]. Repeat from 1. with this array
1106
1107 3. Take result array from call to 1. (here, [[56]]) and append
1108 (first half in step b - combination) + combination + (result
1109 from 1.) = [12[34][56]] to result array
1110
1111 4. After appending results from all n-combinations, return
1112 resulting array. Example, if [13] and [45] are valid
1113 combinations:
1114 [[[13]2456],[[13]2[45]6],[123[45]6]]
1115 """
1116
1117 res = []
1118
1119
1120 for comb_length in range(2, max_multi_to1 + 1):
1121
1122
1123 if comb_length > len(list_legs):
1124 return res
1125
1126
1127
1128 for comb in itertools.combinations(list_legs, comb_length):
1129
1130
1131 if base_objects.LegList(comb).can_combine_to_1(ref_dict_to1):
1132
1133
1134
1135 res_list = copy.copy(list_legs)
1136 for leg in comb:
1137 res_list.remove(leg)
1138 res_list.insert(list_legs.index(comb[0]), comb)
1139 res.append(res_list)
1140
1141
1142
1143
1144
1145
1146 res_list1 = list_legs[0:list_legs.index(comb[0])]
1147 res_list2 = list_legs[list_legs.index(comb[0]) + 1:]
1148 for leg in comb[1:]:
1149 res_list2.remove(leg)
1150
1151
1152 res_list = res_list1
1153 res_list.append(comb)
1154
1155
1156 for item in self.combine_legs(res_list2,
1157 ref_dict_to1,
1158 max_multi_to1):
1159 final_res_list = copy.copy(res_list)
1160 final_res_list.extend(item)
1161 res.append(final_res_list)
1162
1163 return res
1164
1165
1167 """Takes a list of allowed leg combinations as an input and returns
1168 a set of lists where combinations have been properly replaced
1169 (one list per element in the ref_dict, so that all possible intermediate
1170 particles are included). For each list, give the list of vertices
1171 corresponding to the executed merging, group the two as a tuple.
1172 """
1173
1174 res = []
1175
1176 for comb_list in comb_lists:
1177
1178 reduced_list = []
1179 vertex_list = []
1180
1181 for entry in comb_list:
1182
1183
1184 if isinstance(entry, tuple):
1185
1186
1187
1188 leg_vert_ids = copy.copy(ref_dict_to1[\
1189 tuple(sorted([leg.get('id') for leg in entry]))])
1190
1191
1192 number = min([leg.get('number') for leg in entry])
1193
1194
1195 if len([leg for leg in entry if leg.get('state') == False]) == 1:
1196 state = False
1197 else:
1198 state = True
1199
1200
1201
1202
1203
1204 new_leg_vert_ids = []
1205 if leg_vert_ids:
1206 new_leg_vert_ids = self.get_combined_legs(entry,
1207 leg_vert_ids,
1208 number,
1209 state)
1210
1211 reduced_list.append([l[0] for l in new_leg_vert_ids])
1212
1213
1214
1215
1216
1217 vlist = base_objects.VertexList()
1218 for (myleg, vert_id) in new_leg_vert_ids:
1219
1220 myleglist = base_objects.LegList(list(entry))
1221
1222 myleglist.append(myleg)
1223
1224 vlist.append(base_objects.Vertex(
1225 {'legs':myleglist,
1226 'id':vert_id}))
1227
1228 vertex_list.append(vlist)
1229
1230
1231
1232 else:
1233 cp_entry = copy.copy(entry)
1234
1235
1236
1237 if cp_entry.get('from_group') != None:
1238 cp_entry.set('from_group', False)
1239 reduced_list.append(cp_entry)
1240
1241
1242 flat_red_lists = expand_list(reduced_list)
1243 flat_vx_lists = expand_list(vertex_list)
1244
1245
1246 for i in range(0, len(flat_vx_lists)):
1247 res.append((base_objects.LegList(flat_red_lists[i]), \
1248 base_objects.VertexList(flat_vx_lists[i])))
1249
1250 return res
1251
1253 """Create a set of new legs from the info given. This can be
1254 overloaded by daughter classes."""
1255
1256 mylegs = [(base_objects.Leg({'id':leg_id,
1257 'number':number,
1258 'state':state,
1259 'from_group':True}),
1260 vert_id)\
1261 for leg_id, vert_id in leg_vert_ids]
1262
1263 return mylegs
1264
1266 """Allow for selection of vertex ids. This can be
1267 overloaded by daughter classes."""
1268
1269 return vert_ids
1270
1272 """Reduce the number of legs and vertices used in memory.
1273 When called by a diagram generation initiated by LoopAmplitude,
1274 this function should not trim the diagrams in the attribute 'diagrams'
1275 but rather a given list in the 'diaglist' argument."""
1276
1277 legs = []
1278 vertices = []
1279
1280 if diaglist is None:
1281 diaglist=self.get('diagrams')
1282
1283
1284 process = self.get('process')
1285 for leg in process.get('legs'):
1286 if leg.get('state') and leg.get('id') in decay_ids:
1287 leg.set('onshell', True)
1288
1289 for diagram in diaglist:
1290
1291 leg_external = set()
1292 for ivx, vertex in enumerate(diagram.get('vertices')):
1293 for ileg, leg in enumerate(vertex.get('legs')):
1294
1295 if leg.get('state') and leg.get('id') in decay_ids and \
1296 leg.get('number') not in leg_external:
1297
1298
1299 leg = copy.copy(leg)
1300 leg.set('onshell', True)
1301 try:
1302 index = legs.index(leg)
1303 except ValueError:
1304 vertex.get('legs')[ileg] = leg
1305 legs.append(leg)
1306 else:
1307 vertex.get('legs')[ileg] = legs[index]
1308 leg_external.add(leg.get('number'))
1309 try:
1310 index = vertices.index(vertex)
1311 diagram.get('vertices')[ivx] = vertices[index]
1312 except ValueError:
1313 vertices.append(vertex)
1314
1315
1316
1317
1318 -class AmplitudeList(base_objects.PhysicsObjectList):
1319 """List of Amplitude objects
1320 """
1321
1323 """ Check the content of all processes of the amplitudes in this list to
1324 see if there is any which defines perturbation couplings. """
1325
1326 for amp in self:
1327 if amp.has_loop_process():
1328 return True
1329
1331 """Test if object obj is a valid Amplitude for the list."""
1332
1333 return isinstance(obj, Amplitude)
1334
1339 """A list of amplitudes + a list of decay chain amplitude lists;
1340 corresponding to a ProcessDefinition with a list of decay chains
1341 """
1342
1348
1349 - def __init__(self, argument = None, collect_mirror_procs = False,
1350 ignore_six_quark_processes = False, loop_filter=None, diagram_filter=False):
1351 """Allow initialization with Process and with ProcessDefinition"""
1352
1353 if isinstance(argument, base_objects.Process):
1354 super(DecayChainAmplitude, self).__init__()
1355 from madgraph.loop.loop_diagram_generation import LoopMultiProcess
1356 if argument['perturbation_couplings']:
1357 MultiProcessClass=LoopMultiProcess
1358 else:
1359 MultiProcessClass=MultiProcess
1360 if isinstance(argument, base_objects.ProcessDefinition):
1361 self['amplitudes'].extend(\
1362 MultiProcessClass.generate_multi_amplitudes(argument,
1363 collect_mirror_procs,
1364 ignore_six_quark_processes,
1365 loop_filter=loop_filter,
1366 diagram_filter=diagram_filter))
1367 else:
1368 self['amplitudes'].append(\
1369 MultiProcessClass.get_amplitude_from_proc(argument,
1370 loop_filter=loop_filter,
1371 diagram_filter=diagram_filter))
1372
1373
1374 process = copy.copy(self.get('amplitudes')[0].get('process'))
1375 process.set('decay_chains', base_objects.ProcessList())
1376 self['amplitudes'][0].set('process', process)
1377
1378 for process in argument.get('decay_chains'):
1379 if process.get('perturbation_couplings'):
1380 raise MadGraph5Error("Decay processes can not be perturbed")
1381 process.set('overall_orders', argument.get('overall_orders'))
1382 if not process.get('is_decay_chain'):
1383 process.set('is_decay_chain',True)
1384 if not process.get_ninitial() == 1:
1385 raise InvalidCmd("Decay chain process must have exactly one" + \
1386 " incoming particle")
1387 self['decay_chains'].append(\
1388 DecayChainAmplitude(process, collect_mirror_procs,
1389 ignore_six_quark_processes,
1390 diagram_filter=diagram_filter))
1391
1392
1393 decay_ids = sum([[a.get('process').get('legs')[0].get('id') \
1394 for a in dec.get('amplitudes')] for dec in \
1395 self['decay_chains']], [])
1396 decay_ids = set(decay_ids)
1397 for amp in self['amplitudes']:
1398 amp.trim_diagrams(decay_ids)
1399
1400
1401 for amp in self['amplitudes']:
1402 for l in amp.get('process').get('legs'):
1403 if l.get('id') in decay_ids:
1404 decay_ids.remove(l.get('id'))
1405
1406 if decay_ids:
1407 model = amp.get('process').get('model')
1408 names = [model.get_particle(id).get('name') for id in decay_ids]
1409
1410 logger.warning(
1411 "$RED Decay without corresponding particle in core process found.\n" + \
1412 "Decay information for particle(s) %s is discarded.\n" % ','.join(names) + \
1413 "Please check your process definition carefully. \n" + \
1414 "This warning usually means that you forgot parentheses in presence of subdecay.\n" + \
1415 "Example of correct syntax: p p > t t~, ( t > w+ b, w+ > l+ vl)")
1416
1417
1418 for dc in reversed(self['decay_chains']):
1419 for a in reversed(dc.get('amplitudes')):
1420
1421 if a.get('process').get('legs')[0].get('id') in decay_ids:
1422 dc.get('amplitudes').remove(a)
1423 if not dc.get('amplitudes'):
1424
1425 self['decay_chains'].remove(dc)
1426
1427
1428
1429 bad_procs = []
1430 for dc in self['decay_chains']:
1431 for amp in dc.get('amplitudes'):
1432 legs = amp.get('process').get('legs')
1433 fs_parts = [abs(l.get('id')) for l in legs if
1434 l.get('state')]
1435 is_part = [l.get('id') for l in legs if not
1436 l.get('state')][0]
1437 if abs(is_part) in fs_parts:
1438 bad_procs.append(amp.get('process'))
1439
1440 if bad_procs:
1441 logger.warning(
1442 "$RED Decay(s) with particle decaying to itself:\n" + \
1443 '\n'.join([p.nice_string() for p in bad_procs]) + \
1444 "\nPlease check your process definition carefully. \n")
1445
1446
1447 elif argument != None:
1448
1449 super(DecayChainAmplitude, self).__init__(argument)
1450 else:
1451
1452 super(DecayChainAmplitude, self).__init__()
1453
1454 - def filter(self, name, value):
1455 """Filter for valid amplitude property values."""
1456
1457 if name == 'amplitudes':
1458 if not isinstance(value, AmplitudeList):
1459 raise self.PhysicsObjectError("%s is not a valid AmplitudeList" % str(value))
1460 if name == 'decay_chains':
1461 if not isinstance(value, DecayChainAmplitudeList):
1462 raise self.PhysicsObjectError("%s is not a valid DecayChainAmplitudeList object" % \
1463 str(value))
1464 return True
1465
1467 """Return diagram property names as a nicely sorted list."""
1468
1469 return ['amplitudes', 'decay_chains']
1470
1471
1472
1474 """Returns number of diagrams for this amplitude"""
1475 return sum(len(a.get('diagrams')) for a in self.get('amplitudes')) \
1476 + sum(d.get_number_of_diagrams() for d in \
1477 self.get('decay_chains'))
1478
1480 """Returns a nicely formatted string of the amplitude content."""
1481 mystr = ""
1482 for amplitude in self.get('amplitudes'):
1483 mystr = mystr + amplitude.nice_string(indent) + "\n"
1484
1485 if self.get('decay_chains'):
1486 mystr = mystr + " " * indent + "Decays:\n"
1487 for dec in self.get('decay_chains'):
1488 mystr = mystr + dec.nice_string(indent + 2) + "\n"
1489
1490 return mystr[:-1]
1491
1493 """Returns a nicely formatted string of the amplitude processes."""
1494 mystr = ""
1495 for amplitude in self.get('amplitudes'):
1496 mystr = mystr + amplitude.nice_string_processes(indent) + "\n"
1497
1498 if self.get('decay_chains'):
1499 mystr = mystr + " " * indent + "Decays:\n"
1500 for dec in self.get('decay_chains'):
1501 mystr = mystr + dec.nice_string_processes(indent + 2) + "\n"
1502
1503 return mystr[:-1]
1504
1506 """Returns the number of initial state particles in the process."""
1507 return self.get('amplitudes')[0].get('process').get_ninitial()
1508
1510 """Returns a set of all particle ids for which a decay is defined"""
1511
1512 decay_ids = []
1513
1514
1515 for amp in sum([dc.get('amplitudes') for dc \
1516 in self['decay_chains']], []):
1517
1518 decay_ids.append(amp.get('process').get_initial_ids()[0])
1519
1520
1521 return list(set(decay_ids))
1522
1524 """ Returns wether this amplitude has a loop process."""
1525 return self['amplitudes'].has_any_loop_process()
1526
1528 """Recursive function to extract all amplitudes for this process"""
1529
1530 amplitudes = AmplitudeList()
1531
1532 amplitudes.extend(self.get('amplitudes'))
1533 for decay in self.get('decay_chains'):
1534 amplitudes.extend(decay.get_amplitudes())
1535
1536 return amplitudes
1537
1543 """List of DecayChainAmplitude objects
1544 """
1545
1547 """Test if object obj is a valid DecayChainAmplitude for the list."""
1548
1549 return isinstance(obj, DecayChainAmplitude)
1550
1551
1552
1553
1554
1555 -class MultiProcess(base_objects.PhysicsObject):
1556 """MultiProcess: list of process definitions
1557 list of processes (after cleaning)
1558 list of amplitudes (after generation)
1559 """
1560
1562 """Default values for all properties"""
1563
1564 self['process_definitions'] = base_objects.ProcessDefinitionList()
1565
1566
1567
1568 self['amplitudes'] = AmplitudeList()
1569
1570 self['collect_mirror_procs'] = False
1571
1572
1573 self['ignore_six_quark_processes'] = []
1574
1575
1576 self['use_numerical'] = False
1577
1578 - def __init__(self, argument=None, collect_mirror_procs = False,
1579 ignore_six_quark_processes = [], optimize=False,
1580 loop_filter=None, diagram_filter=None):
1608
1609
1610 - def filter(self, name, value):
1611 """Filter for valid process property values."""
1612
1613 if name == 'process_definitions':
1614 if not isinstance(value, base_objects.ProcessDefinitionList):
1615 raise self.PhysicsObjectError("%s is not a valid ProcessDefinitionList object" % str(value))
1616
1617 if name == 'amplitudes':
1618 if not isinstance(value, AmplitudeList):
1619 raise self.PhysicsObjectError("%s is not a valid AmplitudeList object" % str(value))
1620
1621 if name in ['collect_mirror_procs']:
1622 if not isinstance(value, bool):
1623 raise self.PhysicsObjectError("%s is not a valid boolean" % str(value))
1624
1625 if name == 'ignore_six_quark_processes':
1626 if not isinstance(value, list):
1627 raise self.PhysicsObjectError("%s is not a valid list" % str(value))
1628
1629 return True
1630
1631 - def get(self, name):
1632 """Get the value of the property name."""
1633
1634 if (name == 'amplitudes') and not self[name]:
1635 for process_def in self.get('process_definitions'):
1636 if process_def.get('decay_chains'):
1637
1638
1639 self['amplitudes'].append(\
1640 DecayChainAmplitude(process_def,
1641 self.get('collect_mirror_procs'),
1642 self.get('ignore_six_quark_processes'),
1643 diagram_filter=self['diagram_filter']))
1644 else:
1645 self['amplitudes'].extend(\
1646 self.generate_multi_amplitudes(process_def,
1647 self.get('collect_mirror_procs'),
1648 self.get('ignore_six_quark_processes'),
1649 self['use_numerical'],
1650 loop_filter=self['loop_filter'],
1651 diagram_filter=self['diagram_filter']))
1652
1653 return MultiProcess.__bases__[0].get(self, name)
1654
1656 """Return process property names as a nicely sorted list."""
1657
1658 return ['process_definitions', 'amplitudes']
1659
1661
1662 return self['process_definitions'][0]['model']
1663
1664 @classmethod
1665 - def generate_multi_amplitudes(cls,process_definition,
1666 collect_mirror_procs = False,
1667 ignore_six_quark_processes = [],
1668 use_numerical=False,
1669 loop_filter=None,
1670 diagram_filter=False):
1671 """Generate amplitudes in a semi-efficient way.
1672 Make use of crossing symmetry for processes that fail diagram
1673 generation, but not for processes that succeed diagram
1674 generation. Doing so will risk making it impossible to
1675 identify processes with identical amplitudes.
1676 """
1677 assert isinstance(process_definition, base_objects.ProcessDefinition), \
1678 "%s not valid ProcessDefinition object" % \
1679 repr(process_definition)
1680
1681
1682 process_definition.set('orders', MultiProcess.\
1683 find_optimal_process_orders(process_definition,
1684 diagram_filter))
1685
1686 process_definition.check_expansion_orders()
1687
1688 processes = base_objects.ProcessList()
1689 amplitudes = AmplitudeList()
1690
1691
1692
1693 failed_procs = []
1694 success_procs = []
1695
1696 non_permuted_procs = []
1697
1698 permutations = []
1699
1700
1701
1702 model = process_definition['model']
1703
1704 islegs = [leg for leg in process_definition['legs'] \
1705 if leg['state'] == False]
1706 fslegs = [leg for leg in process_definition['legs'] \
1707 if leg['state'] == True]
1708
1709 isids = [leg['ids'] for leg in process_definition['legs'] \
1710 if leg['state'] == False]
1711 fsids = [leg['ids'] for leg in process_definition['legs'] \
1712 if leg['state'] == True]
1713 polids = [tuple(leg['polarization']) for leg in process_definition['legs'] \
1714 if leg['state'] == True]
1715
1716 for prod in itertools.product(*isids):
1717 islegs = [\
1718 base_objects.Leg({'id':id, 'state': False,
1719 'polarization': islegs[i]['polarization']})
1720 for i,id in enumerate(prod)]
1721
1722
1723
1724
1725 red_fsidlist = set()
1726
1727 for prod in itertools.product(*fsids):
1728 tag = zip(prod, polids)
1729 tag = sorted(tag)
1730
1731 if tuple(tag) in red_fsidlist:
1732 continue
1733
1734 red_fsidlist.add(tuple(tag))
1735
1736 leg_list = [copy.copy(leg) for leg in islegs]
1737 leg_list.extend([\
1738 base_objects.Leg({'id':id, 'state': True, 'polarization': fslegs[i]['polarization']}) \
1739 for i,id in enumerate(prod)])
1740
1741 legs = base_objects.LegList(leg_list)
1742
1743
1744 sorted_legs = sorted([(l,i+1) for (i,l) in \
1745 enumerate(legs.get_outgoing_id_list(model))])
1746 permutation = [l[1] for l in sorted_legs]
1747
1748 sorted_legs = array.array('i', [l[0] for l in sorted_legs])
1749
1750
1751 if ignore_six_quark_processes and \
1752 len([i for i in sorted_legs if abs(i) in \
1753 ignore_six_quark_processes]) >= 6:
1754 continue
1755
1756
1757
1758 if sorted_legs in failed_procs:
1759 continue
1760
1761
1762 if use_numerical:
1763
1764 initial_mass = abs(model['parameter_dict'][model.get_particle(legs[0].get('id')).get('mass')])
1765 if initial_mass == 0:
1766 continue
1767 for leg in legs[1:]:
1768 m = model['parameter_dict'][model.get_particle(leg.get('id')).get('mass')]
1769 initial_mass -= abs(m)
1770 if initial_mass.real <= 0:
1771 continue
1772
1773
1774 process = process_definition.get_process_with_legs(legs)
1775
1776 fast_proc = \
1777 array.array('i',[leg.get('id') for leg in legs])
1778 if collect_mirror_procs and \
1779 process_definition.get_ninitial() == 2:
1780
1781 mirror_proc = \
1782 array.array('i', [fast_proc[1], fast_proc[0]] + \
1783 list(fast_proc[2:]))
1784 try:
1785 mirror_amp = \
1786 amplitudes[non_permuted_procs.index(mirror_proc)]
1787 except Exception:
1788
1789 pass
1790 else:
1791
1792 mirror_amp.set('has_mirror_process', True)
1793 logger.info("Process %s added to mirror process %s" % \
1794 (process.base_string(),
1795 mirror_amp.get('process').base_string()))
1796 continue
1797
1798
1799
1800 if not process.get('required_s_channels') and \
1801 not process.get('forbidden_onsh_s_channels') and \
1802 not process.get('forbidden_s_channels') and \
1803 not process.get('is_decay_chain') and not diagram_filter:
1804 try:
1805 crossed_index = success_procs.index(sorted_legs)
1806
1807
1808
1809
1810 if 'loop_diagrams' in amplitudes[crossed_index]:
1811 raise ValueError
1812 except ValueError:
1813
1814 pass
1815 else:
1816
1817 amplitude = MultiProcess.cross_amplitude(\
1818 amplitudes[crossed_index],
1819 process,
1820 permutations[crossed_index],
1821 permutation)
1822 amplitudes.append(amplitude)
1823 success_procs.append(sorted_legs)
1824 permutations.append(permutation)
1825 non_permuted_procs.append(fast_proc)
1826 logger.info("Crossed process found for %s, reuse diagrams." % \
1827 process.base_string())
1828 continue
1829
1830
1831 amplitude = cls.get_amplitude_from_proc(process,
1832 loop_filter=loop_filter)
1833
1834 try:
1835 result = amplitude.generate_diagrams(diagram_filter=diagram_filter)
1836 except InvalidCmd as error:
1837 failed_procs.append(sorted_legs)
1838 else:
1839
1840 if amplitude.get('diagrams'):
1841 amplitudes.append(amplitude)
1842 success_procs.append(sorted_legs)
1843 permutations.append(permutation)
1844 non_permuted_procs.append(fast_proc)
1845 elif not result:
1846
1847 failed_procs.append(sorted_legs)
1848
1849
1850 if not amplitudes:
1851 if len(failed_procs) == 1 and 'error' in locals():
1852 raise error
1853 else:
1854 raise NoDiagramException("No amplitudes generated from process %s. Please enter a valid process" % \
1855 process_definition.nice_string())
1856
1857
1858
1859 return amplitudes
1860
1861 @classmethod
1863 """ Return the correct amplitude type according to the characteristics of
1864 the process proc. The only option that could be specified here is
1865 loop_filter and it is of course not relevant for a tree amplitude."""
1866
1867 return Amplitude({"process": proc})
1868
1869
1870 @staticmethod
1872 """Find the minimal WEIGHTED order for this set of processes.
1873
1874 The algorithm:
1875
1876 1) Check the coupling hierarchy of the model. Assign all
1877 particles to the different coupling hierarchies so that a
1878 particle is considered to be in the highest hierarchy (i.e.,
1879 with lowest value) where it has an interaction.
1880
1881 2) Pick out the legs in the multiprocess according to the
1882 highest hierarchy represented (so don't mix particles from
1883 different hierarchy classes in the same multiparticles!)
1884
1885 3) Find the starting maximum WEIGHTED order as the sum of the
1886 highest n-2 weighted orders
1887
1888 4) Pick out required s-channel particle hierarchies, and use
1889 the highest of the maximum WEIGHTED order from the legs and
1890 the minimum WEIGHTED order extracted from 2*s-channel
1891 hierarchys plus the n-2-2*(number of s-channels) lowest
1892 leg weighted orders.
1893
1894 5) Run process generation with the WEIGHTED order determined
1895 in 3)-4) - # final state gluons, with all gluons removed from
1896 the final state
1897
1898 6) If no process is found, increase WEIGHTED order by 1 and go
1899 back to 5), until we find a process which passes. Return that
1900 order.
1901
1902 7) Continue 5)-6) until we reach (n-2)*(highest hierarchy)-1.
1903 If still no process has passed, return
1904 WEIGHTED = (n-2)*(highest hierarchy)
1905 """
1906
1907 assert isinstance(process_definition, base_objects.ProcessDefinition), \
1908 "%s not valid ProcessDefinition object" % \
1909 repr(process_definition)
1910
1911 processes = base_objects.ProcessList()
1912 amplitudes = AmplitudeList()
1913
1914
1915 if process_definition.get('orders') or \
1916 process_definition.get('overall_orders') or \
1917 process_definition.get('NLO_mode')=='virt':
1918 return process_definition.get('orders')
1919
1920
1921 if process_definition.get_ninitial() == 1 and not \
1922 process_definition.get('is_decay_chain'):
1923 return process_definition.get('orders')
1924
1925 logger.info("Checking for minimal orders which gives processes.")
1926 logger.info("Please specify coupling orders to bypass this step.")
1927
1928
1929 max_order_now, particles, hierarchy = \
1930 process_definition.get_minimum_WEIGHTED()
1931 coupling = 'WEIGHTED'
1932
1933 model = process_definition.get('model')
1934
1935
1936 isids = [leg['ids'] for leg in \
1937 [leg for leg in process_definition['legs'] if leg['state'] == False]]
1938 fsids = [leg['ids'] for leg in \
1939 [leg for leg in process_definition['legs'] if leg['state'] == True]]
1940
1941 max_WEIGHTED_order = \
1942 (len(fsids + isids) - 2)*int(model.get_max_WEIGHTED())
1943
1944 hierarchydef = process_definition['model'].get('order_hierarchy')
1945 tmp = []
1946 hierarchy = list(hierarchydef.items())
1947 hierarchy.sort()
1948 for key, value in hierarchydef.items():
1949 if value>1:
1950 tmp.append('%s*%s' % (value,key))
1951 else:
1952 tmp.append('%s' % key)
1953 wgtdef = '+'.join(tmp)
1954
1955
1956 while max_order_now < max_WEIGHTED_order:
1957 logger.info("Trying coupling order WEIGHTED<=%d: WEIGTHED IS %s" % (max_order_now, wgtdef))
1958
1959 oldloglevel = logger.level
1960 logger.setLevel(logging.WARNING)
1961
1962
1963
1964 failed_procs = []
1965
1966 for prod in itertools.product(*isids):
1967 islegs = [ base_objects.Leg({'id':id, 'state': False}) \
1968 for id in prod]
1969
1970
1971
1972
1973 red_fsidlist = []
1974
1975 for prod in itertools.product(*fsids):
1976
1977
1978 if tuple(sorted(prod)) in red_fsidlist:
1979 continue
1980
1981 red_fsidlist.append(tuple(sorted(prod)));
1982
1983
1984
1985 nglue = 0
1986 if 21 in particles[0]:
1987 nglue = len([id for id in prod if id == 21])
1988 prod = [id for id in prod if id != 21]
1989
1990
1991 leg_list = [copy.copy(leg) for leg in islegs]
1992
1993 leg_list.extend([\
1994 base_objects.Leg({'id':id, 'state': True}) \
1995 for id in prod])
1996
1997 legs = base_objects.LegList(leg_list)
1998
1999
2000
2001 coupling_orders_now = {coupling: max_order_now - \
2002 nglue * model['order_hierarchy']['QCD']}
2003
2004
2005 process = base_objects.Process({\
2006 'legs':legs,
2007 'model':model,
2008 'id': process_definition.get('id'),
2009 'orders': coupling_orders_now,
2010 'required_s_channels': \
2011 process_definition.get('required_s_channels'),
2012 'forbidden_onsh_s_channels': \
2013 process_definition.get('forbidden_onsh_s_channels'),
2014 'sqorders_types': \
2015 process_definition.get('sqorders_types'),
2016 'squared_orders': \
2017 process_definition.get('squared_orders'),
2018 'split_orders': \
2019 process_definition.get('split_orders'),
2020 'forbidden_s_channels': \
2021 process_definition.get('forbidden_s_channels'),
2022 'forbidden_particles': \
2023 process_definition.get('forbidden_particles'),
2024 'is_decay_chain': \
2025 process_definition.get('is_decay_chain'),
2026 'overall_orders': \
2027 process_definition.get('overall_orders'),
2028 'split_orders': \
2029 process_definition.get('split_orders')})
2030
2031
2032 process.check_expansion_orders()
2033
2034
2035 sorted_legs = sorted(legs.get_outgoing_id_list(model))
2036
2037
2038 if tuple(sorted_legs) in failed_procs and not process_definition.get('forbidden_s_channels'):
2039 continue
2040
2041 amplitude = Amplitude({'process': process})
2042 try:
2043 amplitude.generate_diagrams(diagram_filter=diagram_filter)
2044 except InvalidCmd as error:
2045 failed_procs.append(tuple(sorted_legs))
2046 else:
2047 if amplitude.get('diagrams'):
2048
2049 logger.setLevel(oldloglevel)
2050 return {coupling: max_order_now}
2051 else:
2052 failed_procs.append(tuple(sorted_legs))
2053
2054 max_order_now += 1
2055 logger.setLevel(oldloglevel)
2056
2057
2058 return {coupling: max_order_now}
2059
2060 @staticmethod
2062 """Return the amplitude crossed with the permutation new_perm"""
2063
2064 perm_map = dict(list(zip(org_perm, new_perm)))
2065
2066 new_amp = copy.copy(amplitude)
2067
2068 for i, leg in enumerate(process.get('legs')):
2069 leg.set('number', i+1)
2070
2071 new_amp.set('process', process)
2072
2073 diagrams = base_objects.DiagramList([d.renumber_legs(perm_map,
2074 process.get('legs'),) for \
2075 d in new_amp.get('diagrams')])
2076 new_amp.set('diagrams', diagrams)
2077 new_amp.trim_diagrams()
2078
2079
2080 new_amp.set('has_mirror_process', False)
2081
2082 return new_amp
2083
2089 """Takes a list of lists and elements and returns a list of flat lists.
2090 Example: [[1,2], 3, [4,5]] -> [[1,3,4], [1,3,5], [2,3,4], [2,3,5]]
2091 """
2092
2093
2094 assert isinstance(mylist, list), "Expand_list argument must be a list"
2095
2096 res = []
2097
2098 tmplist = []
2099 for item in mylist:
2100 if isinstance(item, list):
2101 tmplist.append(item)
2102 else:
2103 tmplist.append([item])
2104
2105 for item in itertools.product(*tmplist):
2106 res.append(list(item))
2107
2108 return res
2109
2111 """Recursive function. Takes a list of lists and lists of lists
2112 and returns a list of flat lists.
2113 Example: [[1,2],[[4,5],[6,7]]] -> [[1,2,4,5], [1,2,6,7]]
2114 """
2115
2116 res = []
2117
2118 if not mylist or len(mylist) == 1 and not mylist[0]:
2119 return [[]]
2120
2121
2122 assert isinstance(mylist[0], list), \
2123 "Expand_list_list needs a list of lists and lists of lists"
2124
2125
2126 if len(mylist) == 1:
2127 if isinstance(mylist[0][0], list):
2128 return mylist[0]
2129 else:
2130 return mylist
2131
2132 if isinstance(mylist[0][0], list):
2133 for item in mylist[0]:
2134
2135
2136
2137 for rest in expand_list_list(mylist[1:]):
2138 reslist = copy.copy(item)
2139 reslist.extend(rest)
2140 res.append(reslist)
2141 else:
2142 for rest in expand_list_list(mylist[1:]):
2143 reslist = copy.copy(mylist[0])
2144 reslist.extend(rest)
2145 res.append(reslist)
2146
2147
2148 return res
2149