1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """Classes for diagram generation. Amplitude performs the diagram
16 generation, DecayChainAmplitude keeps track of processes with decay
17 chains, and MultiProcess allows generation of processes with
18 multiparticle definitions. DiagramTag allows to identify diagrams
19 based on relevant properties.
20 """
21
22 from __future__ import absolute_import
23 from six.moves import filter
24
25
26
27 import array
28 import copy
29 import itertools
30 import logging
31
32 import madgraph.core.base_objects as base_objects
33 import madgraph.various.misc as misc
34 from madgraph import InvalidCmd, MadGraph5Error
35 from six.moves import range
36 from six.moves import zip
37
38 logger = logging.getLogger('madgraph.diagram_generation')
42
48 """Class to tag diagrams based on objects with some __lt__ measure, e.g.
49 PDG code/interaction id (for comparing diagrams from the same amplitude),
50 or Lorentz/coupling/mass/width (for comparing AMPs from different MEs).
51 Algorithm: Create chains starting from external particles:
52 1 \ / 6
53 2 /\______/\ 7
54 3_ / | \_ 8
55 4 / 5 \_ 9
56 \ 10
57 gives ((((9,10,id910),8,id9108),(6,7,id67),id910867)
58 (((1,2,id12),(3,4,id34)),id1234),
59 5,id91086712345)
60 where idN is the id of the corresponding interaction. The ordering within
61 chains is based on chain length (depth; here, 1234 has depth 3, 910867 has
62 depth 4, 5 has depht 0), and if equal on the ordering of the chain elements.
63 The determination of central vertex is based on minimizing the chain length
64 for the longest subchain.
65 This gives a unique tag which can be used to identify diagrams
66 (instead of symmetry), as well as identify identical matrix elements from
67 different processes."""
68
70 """Exception for any problems in DiagramTags"""
71 pass
72
73 - def __init__(self, diagram, model=None, ninitial=2):
74 """Initialize with a diagram. Create DiagramTagChainLinks according to
75 the diagram, and figure out if we need to shift the central vertex."""
76
77
78 leg_dict = {}
79
80 for vertex in diagram.get('vertices'):
81
82 legs = vertex.get('legs')[:-1]
83 lastvx = vertex == diagram.get('vertices')[-1]
84 if lastvx:
85
86 legs = vertex.get('legs')
87
88 link = DiagramTagChainLink([leg_dict.setdefault(leg.get('number'),
89 DiagramTagChainLink(self.link_from_leg(leg, model))) \
90 for leg in legs],
91 self.vertex_id_from_vertex(vertex,
92 lastvx,
93 model,
94 ninitial))
95
96 if not lastvx:
97 leg_dict[vertex.get('legs')[-1].get('number')] = link
98
99
100 self.tag = link
101
102
103
104 done = max([l.depth for l in self.tag.links]) == 0
105 while not done:
106
107 longest_chain = self.tag.links[0]
108
109 new_link = DiagramTagChainLink(self.tag.links[1:],
110 self.flip_vertex(\
111 self.tag.vertex_id,
112 longest_chain.vertex_id,
113 self.tag.links[1:]))
114
115 other_links = list(longest_chain.links) + [new_link]
116 other_link = DiagramTagChainLink(other_links,
117 self.flip_vertex(\
118 longest_chain.vertex_id,
119 self.tag.vertex_id,
120 other_links))
121
122 if other_link.links[0] < self.tag.links[0]:
123
124 self.tag = other_link
125 else:
126
127 done = True
128
133
135 """Output a diagram from a DiagramTag. Note that each daughter
136 class must implement the static functions id_from_vertex_id
137 (if the vertex id is something else than an integer) and
138 leg_from_link (to pass the correct info from an end link to a
139 leg)."""
140
141
142 diagram = base_objects.Diagram({'vertices': \
143 self.vertices_from_link(self.tag,
144 model,
145 True)})
146 diagram.calculate_orders(model)
147 return diagram
148
149 @classmethod
151 """Recursively return the leg corresponding to this link and
152 the list of all vertices from all previous links"""
153
154 if link.end_link:
155
156 return cls.leg_from_link(link), []
157
158
159 leg_vertices = [cls.vertices_from_link(l, model) for l in link.links]
160
161 legs = base_objects.LegList(sorted([l for l,v in leg_vertices],
162 key= lambda l: l.get('number'), reverse=True))
163
164
165 vertices = base_objects.VertexList(sum([v for l, v in leg_vertices],
166 []))
167
168 if not first_vertex:
169
170
171 last_leg = cls.leg_from_legs(legs,link.vertex_id,model)
172 legs.append(last_leg)
173
174
175 vertices.append(cls.vertex_from_link(legs,
176 link.vertex_id,
177 model))
178 if first_vertex:
179
180 return vertices
181 else:
182
183 return last_leg, vertices
184
185 @classmethod
187 """Returns the list of external PDGs of the interaction corresponding
188 to this vertex_id."""
189
190
191
192
193 if (len(vertex_id)>=3 and 'PDGs' in vertex_id[2]):
194 return vertex_id[2]['PDGs']
195 else:
196 return [part.get_pdg_code() for part in model.get_interaction(
197 cls.id_from_vertex_id(vertex_id)).get('particles')]
198
199 @classmethod
201 """Return a leg from a leg list and the model info"""
202
203 pdgs = list(cls.legPDGs_from_vertex_id(vertex_id, model))
204
205
206 for pdg in [leg.get('id') for leg in legs]:
207 pdgs.remove(pdg)
208
209 assert len(pdgs) == 1
210
211 pdg = model.get_particle(pdgs[0]).get_anti_pdg_code()
212 number = min([l.get('number') for l in legs])
213
214 state = (len([l for l in legs if l.get('state') == False]) != 1)
215
216 onshell= False
217
218 return base_objects.Leg({'id': pdg,
219 'number': number,
220 'state': state,
221 'onshell': onshell})
222
223 @classmethod
236
237 @staticmethod
239 """Return a leg from a link"""
240
241 if link.end_link:
242
243 return base_objects.Leg({'number':link.links[0][1],
244 'id':link.links[0][0][0],
245 'state':(link.links[0][0][1] == 0),
246 'onshell':False})
247
248
249 assert False
250
251 @staticmethod
253 """Return the numerical vertex id from a link.vertex_id"""
254
255 return vertex_id[0][0]
256
257 @staticmethod
259 """Return the loop_info stored in this vertex id. Notice that the
260 IdentifyME tag does not store the loop_info, but should normally never
261 need access to it."""
262
263 return vertex_id[2]
264
265 @staticmethod
267 """Reorder a permutation with respect to start_perm. Note that
268 both need to start from 1."""
269 if perm == start_perm:
270 return list(range(len(perm)))
271 order = [i for (p,i) in \
272 sorted([(p,i) for (i,p) in enumerate(perm)])]
273 return [start_perm[i]-1 for i in order]
274
275 @staticmethod
277 """Returns the default end link for a leg: ((id, state), number).
278 Note that the number is not taken into account if tag comparison,
279 but is used only to extract leg permutations."""
280 if leg.get('state'):
281
282 return [((leg.get('id'), 0), leg.get('number'))]
283 else:
284
285 return [((leg.get('id'), leg.get('number')), leg.get('number'))]
286
287 @staticmethod
289 """Returns the default vertex id: just the interaction id
290 Note that in the vertex id, like the leg, only the first entry is
291 taken into account in the tag comparison, while the second is for
292 storing information that is not to be used in comparisons and the
293 third for additional info regarding the shrunk loop vertex."""
294
295 if isinstance(vertex,base_objects.ContractedVertex):
296
297 return ((vertex.get('id'),vertex.get('loop_tag')),(),
298 {'PDGs':vertex.get('PDGs')})
299 else:
300 return ((vertex.get('id'),()),(),{})
301
302 @staticmethod
304 """Returns the default vertex flip: just the new_vertex"""
305 return new_vertex
306
308 """Equal if same tag"""
309 if type(self) != type(other):
310 return False
311 return self.tag == other.tag
312
314 return not self.__eq__(other)
315
318
320 return self.tag < other.tag
321
323 return self.tag > other.tag
324
325 __repr__ = __str__
326
328 """Chain link for a DiagramTag. A link is a tuple + vertex id + depth,
329 with a comparison operator defined"""
330
331 - def __init__(self, objects, vertex_id = None):
332 """Initialize, either with a tuple of DiagramTagChainLinks and
333 a vertex_id (defined by DiagramTag.vertex_id_from_vertex), or
334 with an external leg object (end link) defined by
335 DiagramTag.link_from_leg"""
336
337 if vertex_id == None:
338
339 self.links = tuple(objects)
340 self.vertex_id = (0,)
341 self.depth = 0
342 self.end_link = True
343 return
344
345 self.links = tuple(sorted(list(tuple(objects)), reverse=True))
346 self.vertex_id = vertex_id
347
348
349 self.depth = sum([l.depth for l in self.links],
350 max(1, len(self.links)-1))
351 self.end_link = False
352
354 """Get the permutation of external numbers (assumed to be the
355 second entry in the end link tuples)"""
356
357 if self.end_link:
358 return [self.links[0][1]]
359
360 return sum([l.get_external_numbers() for l in self.links], [])
361
363 """Compare self with other in the order:
364 1. depth 2. len(links) 3. vertex id 4. measure of links"""
365
366 if self == other:
367 return False
368
369 if self.depth != other.depth:
370 return self.depth < other.depth
371
372 if len(self.links) != len(other.links):
373 return len(self.links) < len(other.links)
374
375 if self.vertex_id[0] != other.vertex_id[0]:
376 if isinstance(self.vertex_id[0], int) and isinstance(other.vertex_id[0], tuple):
377 return True
378 elif isinstance(self.vertex_id[0], tuple) and isinstance(other.vertex_id[0], int):
379 return False
380 elif isinstance(self.vertex_id[0], str) and isinstance(other.vertex_id[0], tuple):
381 return True
382 elif isinstance(self.vertex_id[0], tuple) and isinstance(other.vertex_id[0], str):
383 return False
384 else:
385 try:
386 return self.vertex_id[0] < other.vertex_id[0]
387 except TypeError as error:
388 if error.args == ("'<' not supported between instances of 'tuple' and 'str'",):
389 return False
390 elif error.args == ("'<' not supported between instances of 'str' and 'tuple'",):
391 return True
392 else:
393 raise Exception
394
395
396 for i, link in enumerate(self.links):
397 if i > len(other.links) - 1:
398 return False
399 if link != other.links[i]:
400 return link < other.links[i]
401
403 return self != other and not self.__lt__(other)
404
406 """For end link,
407 consider equal if self.links[0][0] == other.links[0][0],
408 i.e., ignore the leg number (in links[0][1])."""
409
410 if self.end_link and other.end_link and self.depth == other.depth \
411 and self.vertex_id == other.vertex_id:
412 return self.links[0][0] == other.links[0][0]
413
414 return self.end_link == other.end_link and self.depth == other.depth \
415 and self.vertex_id[0] == other.vertex_id[0] \
416 and self.links == other.links
417
419 return not self.__eq__(other)
420
421
423 if self.end_link:
424 return str(self.links)
425 return "%s, %s; %d" % (str(self.links),
426 str(self.vertex_id),
427 self.depth)
428
429 __repr__ = __str__
430
431
432
433
434 -class Amplitude(base_objects.PhysicsObject):
435 """Amplitude: process + list of diagrams (ordered)
436 Initialize with a process, then call generate_diagrams() to
437 generate the diagrams for the amplitude
438 """
439
441 """Default values for all properties"""
442
443 self['process'] = base_objects.Process()
444 self['diagrams'] = None
445
446
447 self['has_mirror_process'] = False
448
461
462 - def filter(self, name, value):
475
476 - def get(self, name):
485
486
487
489 """Return diagram property names as a nicely sorted list."""
490
491 return ['process', 'diagrams', 'has_mirror_process']
492
494 """Returns number of diagrams for this amplitude"""
495 return len(self.get('diagrams'))
496
498 """Return an AmplitudeList with just this amplitude.
499 Needed for DecayChainAmplitude."""
500
501 return AmplitudeList([self])
502
504 """Returns a nicely formatted string of the amplitude content."""
505 return self.get('process').nice_string(indent) + "\n" + \
506 self.get('diagrams').nice_string(indent)
507
509 """Returns a nicely formatted string of the amplitude process."""
510 return self.get('process').nice_string(indent)
511
513 """Returns the number of initial state particles in the process."""
514 return self.get('process').get_ninitial()
515
517 """ Returns wether this amplitude has a loop process."""
518
519 return self.get('process').get('perturbation_couplings')
520
522 """Generate diagrams. Algorithm:
523
524 1. Define interaction dictionaries:
525 * 2->0 (identity), 3->0, 4->0, ... , maxlegs->0
526 * 2 -> 1, 3 -> 1, ..., maxlegs-1 -> 1
527
528 2. Set flag from_group=true for all external particles.
529 Flip particle/anti particle for incoming particles.
530
531 3. If there is a dictionary n->0 with n=number of external
532 particles, create if possible the combination [(1,2,3,4,...)]
533 with *at least two* from_group==true. This will give a
534 finished (set of) diagram(s) (done by reduce_leglist)
535
536 4. Create all allowed groupings of particles with at least one
537 from_group==true (according to dictionaries n->1):
538 [(1,2),3,4...],[1,(2,3),4,...],...,
539 [(1,2),(3,4),...],...,[(1,2,3),4,...],...
540 (done by combine_legs)
541
542 5. Replace each group with a (list of) new particle(s) with number
543 n = min(group numbers). Set from_group true for these
544 particles and false for all other particles. Store vertex info.
545 (done by merge_comb_legs)
546
547 6. Stop algorithm when at most 2 particles remain.
548 Return all diagrams (lists of vertices).
549
550 7. Repeat from 3 (recursion done by reduce_leglist)
551
552 8. Replace final p=p vertex
553
554 Be aware that the resulting vertices have all particles outgoing,
555 so need to flip for incoming particles when used.
556
557 SPECIAL CASE: For A>BC... processes which are legs in decay
558 chains, we need to ensure that BC... combine first, giving A=A
559 as a final vertex. This case is defined by the Process
560 property is_decay_chain = True.
561 This function can also be called by the generate_diagram function
562 of LoopAmplitudes, in which case the generated diagrams here must not
563 be directly assigned to the 'diagrams' attributed but returned as a
564 DiagramList by the function. This is controlled by the argument
565 returndiag.
566 """
567
568 process = self.get('process')
569 model = process.get('model')
570 legs = process.get('legs')
571
572 for key in process.get('overall_orders').keys():
573 try:
574 process.get('orders')[key] = \
575 min(process.get('orders')[key],
576 process.get('overall_orders')[key])
577 except KeyError:
578 process.get('orders')[key] = process.get('overall_orders')[key]
579
580 assert model.get('particles'), \
581 "particles are missing in model: %s" % model.get('particles')
582
583 assert model.get('interactions'), \
584 "interactions are missing in model"
585
586
587 res = base_objects.DiagramList()
588
589 if len([leg for leg in legs if model.get('particle_dict')[\
590 leg.get('id')].is_fermion()]) % 2 == 1:
591 if not returndiag:
592 self['diagrams'] = res
593 raise InvalidCmd('The number of fermion is odd')
594 else:
595 return False, res
596
597
598
599 if not model.get('got_majoranas') and \
600 len([leg for leg in legs if leg.is_incoming_fermion(model)]) != \
601 len([leg for leg in legs if leg.is_outgoing_fermion(model)]):
602 if not returndiag:
603 self['diagrams'] = res
604 raise InvalidCmd('The number of of incoming/outcoming fermions are different')
605 else:
606 return False, res
607
608
609
610 for charge in model.get('conserved_charge'):
611 total = 0
612 for leg in legs:
613 part = model.get('particle_dict')[leg.get('id')]
614 try:
615 value = part.get(charge)
616 except (AttributeError, base_objects.PhysicsObject.PhysicsObjectError):
617 try:
618 value = getattr(part, charge)
619 except AttributeError:
620 value = 0
621
622 if (leg.get('id') != part['pdg_code']) != leg['state']:
623 total -= value
624 else:
625 total += value
626
627 if abs(total) > 1e-10:
628 if not returndiag:
629 self['diagrams'] = res
630 raise InvalidCmd('No %s conservation for this process ' % charge)
631 return res
632 else:
633 raise InvalidCmd('No %s conservation for this process ' % charge)
634 return res, res
635
636 if not returndiag:
637 logger.info("Trying %s " % process.nice_string().replace('Process', 'process'))
638
639
640 for i in range(0, len(process.get('legs'))):
641
642 leg = copy.copy(process.get('legs')[i])
643 process.get('legs')[i] = leg
644 if leg.get('number') == 0:
645 leg.set('number', i + 1)
646
647
648
649 leglist = self.copy_leglist(process.get('legs'))
650
651 for leg in leglist:
652
653
654 leg.set('from_group', True)
655
656
657
658 if leg.get('state') == False:
659 part = model.get('particle_dict')[leg.get('id')]
660 leg.set('id', part.get_anti_pdg_code())
661
662
663
664 max_multi_to1 = max([len(key) for key in \
665 model.get('ref_dict_to1').keys()])
666
667
668
669
670
671
672
673
674 is_decay_proc = process.get_ninitial() == 1
675 if is_decay_proc:
676 part = model.get('particle_dict')[leglist[0].get('id')]
677
678
679
680 ref_dict_to0 = {(part.get_pdg_code(),part.get_anti_pdg_code()):[0],
681 (part.get_anti_pdg_code(),part.get_pdg_code()):[0]}
682
683
684 leglist[0].set('from_group', None)
685 reduced_leglist = self.reduce_leglist(leglist,
686 max_multi_to1,
687 ref_dict_to0,
688 is_decay_proc,
689 process.get('orders'))
690 else:
691 reduced_leglist = self.reduce_leglist(leglist,
692 max_multi_to1,
693 model.get('ref_dict_to0'),
694 is_decay_proc,
695 process.get('orders'))
696
697
698
699
700 self.convert_dgleg_to_leg(reduced_leglist)
701
702 if reduced_leglist:
703 for vertex_list in reduced_leglist:
704 res.append(self.create_diagram(base_objects.VertexList(vertex_list)))
705
706
707
708 failed_crossing = not res
709
710
711
712
713
714
715 if process.get('required_s_channels') and \
716 process.get('required_s_channels')[0]:
717
718
719 lastvx = -1
720
721
722
723 if is_decay_proc: lastvx = -2
724 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False])
725
726 old_res = res
727 res = base_objects.DiagramList()
728 for id_list in process.get('required_s_channels'):
729 res_diags = [diagram for diagram in old_res if all([req_s_channel in \
730 [vertex.get_s_channel_id(\
731 process.get('model'), ninitial) \
732 for vertex in diagram.get('vertices')[:lastvx]] \
733 for req_s_channel in \
734 id_list])]
735
736 res.extend([diag for diag in res_diags if diag not in res])
737
738
739
740
741
742 if process.get('forbidden_s_channels'):
743 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False])
744 if ninitial == 2:
745 res = base_objects.DiagramList(\
746 [diagram for diagram in res if not any([vertex.get_s_channel_id(\
747 process.get('model'), ninitial) \
748 in process.get('forbidden_s_channels')
749 for vertex in diagram.get('vertices')[:-1]])])
750 else:
751
752
753 newres= []
754 for diagram in res:
755 leg1 = 1
756
757
758
759 vertex = diagram.get('vertices')[-1]
760 if any([l['number'] ==1 for l in vertex.get('legs')]):
761 leg1 = [l['number'] for l in vertex.get('legs') if l['number'] !=1][0]
762 to_loop = list(range(len(diagram.get('vertices'))-1))
763 if leg1 >1:
764 to_loop.reverse()
765 for i in to_loop:
766 vertex = diagram.get('vertices')[i]
767 if leg1:
768 if any([l['number'] ==leg1 for l in vertex.get('legs')]):
769 leg1 = 0
770 continue
771 if vertex.get_s_channel_id(process.get('model'), ninitial)\
772 in process.get('forbidden_s_channels'):
773 break
774 else:
775 newres.append(diagram)
776 res = base_objects.DiagramList(newres)
777
778
779
780
781 if process.get('forbidden_onsh_s_channels'):
782 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False])
783
784 verts = base_objects.VertexList(sum([[vertex for vertex \
785 in diagram.get('vertices')[:-1]
786 if vertex.get_s_channel_id(\
787 process.get('model'), ninitial) \
788 in process.get('forbidden_onsh_s_channels')] \
789 for diagram in res], []))
790 for vert in verts:
791
792 newleg = copy.copy(vert.get('legs').pop(-1))
793 newleg.set('onshell', False)
794 vert.get('legs').append(newleg)
795
796
797 for diagram in res:
798 diagram.calculate_orders(model)
799
800
801
802
803
804
805
806
807 if not returndiag and len(res)>0:
808 res = self.apply_squared_order_constraints(res)
809
810 if diagram_filter:
811 res = self.apply_user_filter(res)
812
813
814 if not process.get('is_decay_chain'):
815 for diagram in res:
816 vertices = diagram.get('vertices')
817 if len(vertices) > 1 and vertices[-1].get('id') == 0:
818
819
820
821
822 vertices = copy.copy(vertices)
823 lastvx = vertices.pop()
824 nexttolastvertex = copy.copy(vertices.pop())
825 legs = copy.copy(nexttolastvertex.get('legs'))
826 ntlnumber = legs[-1].get('number')
827 lastleg = [leg for leg in lastvx.get('legs') if leg.get('number') != ntlnumber][0]
828
829 if lastleg.get('onshell') == False:
830 lastleg.set('onshell', None)
831
832 legs[-1] = lastleg
833 nexttolastvertex.set('legs', legs)
834 vertices.append(nexttolastvertex)
835 diagram.set('vertices', vertices)
836
837 if res and not returndiag:
838 logger.info("Process has %d diagrams" % len(res))
839
840
841 self.trim_diagrams(diaglist=res)
842
843
844 pertur = 'QCD'
845 if self.get('process')['perturbation_couplings']:
846 pertur = sorted(self.get('process')['perturbation_couplings'])[0]
847 self.get('process').get('legs').sort(pert=pertur)
848
849
850 if not returndiag:
851 self['diagrams'] = res
852 return not failed_crossing
853 else:
854 return not failed_crossing, res
855
857 """Applies the user specified squared order constraints on the diagram
858 list in argument."""
859
860 res = copy.copy(diag_list)
861
862
863
864 for name, (value, operator) in self['process'].get('constrained_orders').items():
865 res.filter_constrained_orders(name, value, operator)
866
867
868
869
870 while True:
871 new_res = res.apply_positive_sq_orders(res,
872 self['process'].get('squared_orders'),
873 self['process']['sqorders_types'])
874
875 if len(res)==len(new_res):
876 break
877 elif (len(new_res)>len(res)):
878 raise MadGraph5Error(
879 'Inconsistency in function apply_squared_order_constraints().')
880
881 res = new_res
882
883
884
885
886 neg_orders = [(order, value) for order, value in \
887 self['process'].get('squared_orders').items() if value<0]
888 if len(neg_orders)==1:
889 neg_order, neg_value = neg_orders[0]
890
891 res, target_order = res.apply_negative_sq_order(res, neg_order,\
892 neg_value, self['process']['sqorders_types'][neg_order])
893
894
895
896
897 self['process']['squared_orders'][neg_order]=target_order
898 elif len(neg_orders)>1:
899 raise InvalidCmd('At most one negative squared order constraint'+\
900 ' can be specified, not %s.'%str(neg_orders))
901
902 return res
903
905 """Applies the user specified squared order constraints on the diagram
906 list in argument."""
907
908 if True:
909 remove_diag = misc.plugin_import('user_filter',
910 'user filter required to be defined in PLUGIN/user_filter.py with the function remove_diag(ONEDIAG) which returns True if the diagram has to be removed',
911 fcts=['remove_diag'])
912 else:
913
914 def remove_diag(diag, model=None):
915 for vertex in diag['vertices']:
916 if vertex['id'] == 0:
917 continue
918 if vertex['legs'][-1]['number'] < 3:
919 if abs(vertex['legs'][-1]['id']) <6:
920 return True
921 return False
922
923 res = diag_list.__class__()
924 nb_removed = 0
925 model = self['process']['model']
926 for diag in diag_list:
927 if remove_diag(diag, model):
928 nb_removed +=1
929 else:
930 res.append(diag)
931
932 if nb_removed:
933 logger.warning('Diagram filter is ON and removed %s diagrams for this subprocess.' % nb_removed)
934
935 return res
936
937
938
940 """ Return a Diagram created from the vertex list. This function can be
941 overloaded by daughter classes."""
942 return base_objects.Diagram({'vertices':vertexlist})
943
945 """ In LoopAmplitude, it converts back all DGLoopLegs into Legs.
946 In Amplitude, there is nothing to do. """
947
948 return True
949
951 """ Simply returns a copy of the leg list. This function is
952 overloaded in LoopAmplitude so that a DGLoopLeg list is returned.
953 The DGLoopLeg has some additional parameters only useful during
954 loop diagram generation"""
955
956 return base_objects.LegList(\
957 [ copy.copy(leg) for leg in legs ])
958
959 - def reduce_leglist(self, curr_leglist, max_multi_to1, ref_dict_to0,
960 is_decay_proc = False, coupling_orders = None):
961 """Recursive function to reduce N LegList to N-1
962 For algorithm, see doc for generate_diagrams.
963 """
964
965
966
967 res = []
968
969
970
971 if curr_leglist is None:
972 return None
973
974
975 model = self.get('process').get('model')
976 ref_dict_to1 = self.get('process').get('model').get('ref_dict_to1')
977
978
979
980
981
982
983 if curr_leglist.can_combine_to_0(ref_dict_to0, is_decay_proc):
984
985
986 vertex_ids = self.get_combined_vertices(curr_leglist,
987 copy.copy(ref_dict_to0[tuple(sorted([leg.get('id') for \
988 leg in curr_leglist]))]))
989
990 final_vertices = [base_objects.Vertex({'legs':curr_leglist,
991 'id':vertex_id}) for \
992 vertex_id in vertex_ids]
993
994 for final_vertex in final_vertices:
995 if self.reduce_orders(coupling_orders, model,
996 [final_vertex.get('id')]) != False:
997 res.append([final_vertex])
998
999
1000 if len(curr_leglist) == 2:
1001 if res:
1002 return res
1003 else:
1004 return None
1005
1006
1007 comb_lists = self.combine_legs(curr_leglist,
1008 ref_dict_to1, max_multi_to1)
1009
1010
1011 leg_vertex_list = self.merge_comb_legs(comb_lists, ref_dict_to1)
1012
1013
1014 for leg_vertex_tuple in leg_vertex_list:
1015
1016
1017 if self.get('process').get('forbidden_particles') and \
1018 any([abs(vertex.get('legs')[-1].get('id')) in \
1019 self.get('process').get('forbidden_particles') \
1020 for vertex in leg_vertex_tuple[1]]):
1021 continue
1022
1023
1024 new_coupling_orders = self.reduce_orders(coupling_orders,
1025 model,
1026 [vertex.get('id') for vertex in \
1027 leg_vertex_tuple[1]])
1028 if new_coupling_orders == False:
1029
1030 continue
1031
1032
1033
1034 reduced_diagram = self.reduce_leglist(leg_vertex_tuple[0],
1035 max_multi_to1,
1036 ref_dict_to0,
1037 is_decay_proc,
1038 new_coupling_orders)
1039
1040 if reduced_diagram:
1041 vertex_list_list = [list(leg_vertex_tuple[1])]
1042 vertex_list_list.append(reduced_diagram)
1043 expanded_list = expand_list_list(vertex_list_list)
1044 res.extend(expanded_list)
1045
1046 return res
1047
1048 - def reduce_orders(self, coupling_orders, model, vertex_id_list):
1049 """Return False if the coupling orders for any coupling is <
1050 0, otherwise return the new coupling orders with the vertex
1051 orders subtracted. If coupling_orders is not given, return
1052 None (which counts as success).
1053 WEIGHTED is a special order, which corresponds to the sum of
1054 order hierarchies for the couplings.
1055 We ignore negative constraints as these cannot be taken into
1056 account on the fly but only after generation."""
1057
1058 if not coupling_orders:
1059 return None
1060
1061 present_couplings = copy.copy(coupling_orders)
1062 for id in vertex_id_list:
1063
1064 if not id:
1065 continue
1066 inter = model.get("interaction_dict")[id]
1067 for coupling in inter.get('orders').keys():
1068
1069
1070 if coupling in present_couplings and \
1071 present_couplings[coupling]>=0:
1072
1073 present_couplings[coupling] -= \
1074 inter.get('orders')[coupling]
1075 if present_couplings[coupling] < 0:
1076
1077 return False
1078
1079 if 'WEIGHTED' in present_couplings and \
1080 present_couplings['WEIGHTED']>=0:
1081 weight = sum([model.get('order_hierarchy')[c]*n for \
1082 (c,n) in inter.get('orders').items()])
1083 present_couplings['WEIGHTED'] -= weight
1084 if present_couplings['WEIGHTED'] < 0:
1085
1086 return False
1087
1088 return present_couplings
1089
1090 - def combine_legs(self, list_legs, ref_dict_to1, max_multi_to1):
1091 """Recursive function. Take a list of legs as an input, with
1092 the reference dictionary n-1->1, and output a list of list of
1093 tuples of Legs (allowed combinations) and Legs (rest). Algorithm:
1094
1095 1. Get all n-combinations from list [123456]: [12],..,[23],..,[123],..
1096
1097 2. For each combination, say [34]. Check if combination is valid.
1098 If so:
1099
1100 a. Append [12[34]56] to result array
1101
1102 b. Split [123456] at index(first element in combination+1),
1103 i.e. [12],[456] and subtract combination from second half,
1104 i.e.: [456]-[34]=[56]. Repeat from 1. with this array
1105
1106 3. Take result array from call to 1. (here, [[56]]) and append
1107 (first half in step b - combination) + combination + (result
1108 from 1.) = [12[34][56]] to result array
1109
1110 4. After appending results from all n-combinations, return
1111 resulting array. Example, if [13] and [45] are valid
1112 combinations:
1113 [[[13]2456],[[13]2[45]6],[123[45]6]]
1114 """
1115
1116 res = []
1117
1118
1119 for comb_length in range(2, max_multi_to1 + 1):
1120
1121
1122 if comb_length > len(list_legs):
1123 return res
1124
1125
1126
1127 for comb in itertools.combinations(list_legs, comb_length):
1128
1129
1130 if base_objects.LegList(comb).can_combine_to_1(ref_dict_to1):
1131
1132
1133
1134 res_list = copy.copy(list_legs)
1135 for leg in comb:
1136 res_list.remove(leg)
1137 res_list.insert(list_legs.index(comb[0]), comb)
1138 res.append(res_list)
1139
1140
1141
1142
1143
1144
1145 res_list1 = list_legs[0:list_legs.index(comb[0])]
1146 res_list2 = list_legs[list_legs.index(comb[0]) + 1:]
1147 for leg in comb[1:]:
1148 res_list2.remove(leg)
1149
1150
1151 res_list = res_list1
1152 res_list.append(comb)
1153
1154
1155 for item in self.combine_legs(res_list2,
1156 ref_dict_to1,
1157 max_multi_to1):
1158 final_res_list = copy.copy(res_list)
1159 final_res_list.extend(item)
1160 res.append(final_res_list)
1161
1162 return res
1163
1164
1166 """Takes a list of allowed leg combinations as an input and returns
1167 a set of lists where combinations have been properly replaced
1168 (one list per element in the ref_dict, so that all possible intermediate
1169 particles are included). For each list, give the list of vertices
1170 corresponding to the executed merging, group the two as a tuple.
1171 """
1172
1173 res = []
1174
1175 for comb_list in comb_lists:
1176
1177 reduced_list = []
1178 vertex_list = []
1179
1180 for entry in comb_list:
1181
1182
1183 if isinstance(entry, tuple):
1184
1185
1186
1187 leg_vert_ids = copy.copy(ref_dict_to1[\
1188 tuple(sorted([leg.get('id') for leg in entry]))])
1189
1190
1191 number = min([leg.get('number') for leg in entry])
1192
1193
1194 if len([leg for leg in entry if leg.get('state') == False]) == 1:
1195 state = False
1196 else:
1197 state = True
1198
1199
1200
1201
1202
1203 new_leg_vert_ids = []
1204 if leg_vert_ids:
1205 new_leg_vert_ids = self.get_combined_legs(entry,
1206 leg_vert_ids,
1207 number,
1208 state)
1209
1210 reduced_list.append([l[0] for l in new_leg_vert_ids])
1211
1212
1213
1214
1215
1216 vlist = base_objects.VertexList()
1217 for (myleg, vert_id) in new_leg_vert_ids:
1218
1219 myleglist = base_objects.LegList(list(entry))
1220
1221 myleglist.append(myleg)
1222
1223 vlist.append(base_objects.Vertex(
1224 {'legs':myleglist,
1225 'id':vert_id}))
1226
1227 vertex_list.append(vlist)
1228
1229
1230
1231 else:
1232 cp_entry = copy.copy(entry)
1233
1234
1235
1236 if cp_entry.get('from_group') != None:
1237 cp_entry.set('from_group', False)
1238 reduced_list.append(cp_entry)
1239
1240
1241 flat_red_lists = expand_list(reduced_list)
1242 flat_vx_lists = expand_list(vertex_list)
1243
1244
1245 for i in range(0, len(flat_vx_lists)):
1246 res.append((base_objects.LegList(flat_red_lists[i]), \
1247 base_objects.VertexList(flat_vx_lists[i])))
1248
1249 return res
1250
1252 """Create a set of new legs from the info given. This can be
1253 overloaded by daughter classes."""
1254
1255 mylegs = [(base_objects.Leg({'id':leg_id,
1256 'number':number,
1257 'state':state,
1258 'from_group':True}),
1259 vert_id)\
1260 for leg_id, vert_id in leg_vert_ids]
1261
1262 return mylegs
1263
1265 """Allow for selection of vertex ids. This can be
1266 overloaded by daughter classes."""
1267
1268 return vert_ids
1269
1271 """Reduce the number of legs and vertices used in memory.
1272 When called by a diagram generation initiated by LoopAmplitude,
1273 this function should not trim the diagrams in the attribute 'diagrams'
1274 but rather a given list in the 'diaglist' argument."""
1275
1276 legs = []
1277 vertices = []
1278
1279 if diaglist is None:
1280 diaglist=self.get('diagrams')
1281
1282
1283 process = self.get('process')
1284 for leg in process.get('legs'):
1285 if leg.get('state') and leg.get('id') in decay_ids:
1286 leg.set('onshell', True)
1287
1288 for diagram in diaglist:
1289
1290 leg_external = set()
1291 for ivx, vertex in enumerate(diagram.get('vertices')):
1292 for ileg, leg in enumerate(vertex.get('legs')):
1293
1294 if leg.get('state') and leg.get('id') in decay_ids and \
1295 leg.get('number') not in leg_external:
1296
1297
1298 leg = copy.copy(leg)
1299 leg.set('onshell', True)
1300 try:
1301 index = legs.index(leg)
1302 except ValueError:
1303 vertex.get('legs')[ileg] = leg
1304 legs.append(leg)
1305 else:
1306 vertex.get('legs')[ileg] = legs[index]
1307 leg_external.add(leg.get('number'))
1308 try:
1309 index = vertices.index(vertex)
1310 diagram.get('vertices')[ivx] = vertices[index]
1311 except ValueError:
1312 vertices.append(vertex)
1313
1314
1315
1316
1317 -class AmplitudeList(base_objects.PhysicsObjectList):
1318 """List of Amplitude objects
1319 """
1320
1322 """ Check the content of all processes of the amplitudes in this list to
1323 see if there is any which defines perturbation couplings. """
1324
1325 for amp in self:
1326 if amp.has_loop_process():
1327 return True
1328
1330 """Test if object obj is a valid Amplitude for the list."""
1331
1332 return isinstance(obj, Amplitude)
1333
1338 """A list of amplitudes + a list of decay chain amplitude lists;
1339 corresponding to a ProcessDefinition with a list of decay chains
1340 """
1341
1347
1348 - def __init__(self, argument = None, collect_mirror_procs = False,
1349 ignore_six_quark_processes = False, loop_filter=None, diagram_filter=False):
1350 """Allow initialization with Process and with ProcessDefinition"""
1351
1352 if isinstance(argument, base_objects.Process):
1353 super(DecayChainAmplitude, self).__init__()
1354 from madgraph.loop.loop_diagram_generation import LoopMultiProcess
1355 if argument['perturbation_couplings']:
1356 MultiProcessClass=LoopMultiProcess
1357 else:
1358 MultiProcessClass=MultiProcess
1359 if isinstance(argument, base_objects.ProcessDefinition):
1360 self['amplitudes'].extend(\
1361 MultiProcessClass.generate_multi_amplitudes(argument,
1362 collect_mirror_procs,
1363 ignore_six_quark_processes,
1364 loop_filter=loop_filter,
1365 diagram_filter=diagram_filter))
1366 else:
1367 self['amplitudes'].append(\
1368 MultiProcessClass.get_amplitude_from_proc(argument,
1369 loop_filter=loop_filter,
1370 diagram_filter=diagram_filter))
1371
1372
1373 process = copy.copy(self.get('amplitudes')[0].get('process'))
1374 process.set('decay_chains', base_objects.ProcessList())
1375 self['amplitudes'][0].set('process', process)
1376
1377 for process in argument.get('decay_chains'):
1378 if process.get('perturbation_couplings'):
1379 raise MadGraph5Error("Decay processes can not be perturbed")
1380 process.set('overall_orders', argument.get('overall_orders'))
1381 if not process.get('is_decay_chain'):
1382 process.set('is_decay_chain',True)
1383 if not process.get_ninitial() == 1:
1384 raise InvalidCmd("Decay chain process must have exactly one" + \
1385 " incoming particle")
1386 self['decay_chains'].append(\
1387 DecayChainAmplitude(process, collect_mirror_procs,
1388 ignore_six_quark_processes,
1389 diagram_filter=diagram_filter))
1390
1391
1392 decay_ids = sum([[a.get('process').get('legs')[0].get('id') \
1393 for a in dec.get('amplitudes')] for dec in \
1394 self['decay_chains']], [])
1395 decay_ids = set(decay_ids)
1396 for amp in self['amplitudes']:
1397 amp.trim_diagrams(decay_ids)
1398
1399
1400 for amp in self['amplitudes']:
1401 for l in amp.get('process').get('legs'):
1402 if l.get('id') in decay_ids:
1403 decay_ids.remove(l.get('id'))
1404
1405 if decay_ids:
1406 model = amp.get('process').get('model')
1407 names = [model.get_particle(id).get('name') for id in decay_ids]
1408
1409 logger.warning(
1410 "$RED Decay without corresponding particle in core process found.\n" + \
1411 "Decay information for particle(s) %s is discarded.\n" % ','.join(names) + \
1412 "Please check your process definition carefully. \n" + \
1413 "This warning usually means that you forgot parentheses in presence of subdecay.\n" + \
1414 "Example of correct syntax: p p > t t~, ( t > w+ b, w+ > l+ vl)")
1415
1416
1417 for dc in reversed(self['decay_chains']):
1418 for a in reversed(dc.get('amplitudes')):
1419
1420 if a.get('process').get('legs')[0].get('id') in decay_ids:
1421 dc.get('amplitudes').remove(a)
1422 if not dc.get('amplitudes'):
1423
1424 self['decay_chains'].remove(dc)
1425
1426
1427
1428 bad_procs = []
1429 for dc in self['decay_chains']:
1430 for amp in dc.get('amplitudes'):
1431 legs = amp.get('process').get('legs')
1432 fs_parts = [abs(l.get('id')) for l in legs if
1433 l.get('state')]
1434 is_part = [l.get('id') for l in legs if not
1435 l.get('state')][0]
1436 if abs(is_part) in fs_parts:
1437 bad_procs.append(amp.get('process'))
1438
1439 if bad_procs:
1440 logger.warning(
1441 "$RED Decay(s) with particle decaying to itself:\n" + \
1442 '\n'.join([p.nice_string() for p in bad_procs]) + \
1443 "\nPlease check your process definition carefully. \n")
1444
1445
1446 elif argument != None:
1447
1448 super(DecayChainAmplitude, self).__init__(argument)
1449 else:
1450
1451 super(DecayChainAmplitude, self).__init__()
1452
1453 - def filter(self, name, value):
1454 """Filter for valid amplitude property values."""
1455
1456 if name == 'amplitudes':
1457 if not isinstance(value, AmplitudeList):
1458 raise self.PhysicsObjectError("%s is not a valid AmplitudeList" % str(value))
1459 if name == 'decay_chains':
1460 if not isinstance(value, DecayChainAmplitudeList):
1461 raise self.PhysicsObjectError("%s is not a valid DecayChainAmplitudeList object" % \
1462 str(value))
1463 return True
1464
1466 """Return diagram property names as a nicely sorted list."""
1467
1468 return ['amplitudes', 'decay_chains']
1469
1470
1471
1473 """Returns number of diagrams for this amplitude"""
1474 return sum(len(a.get('diagrams')) for a in self.get('amplitudes')) \
1475 + sum(d.get_number_of_diagrams() for d in \
1476 self.get('decay_chains'))
1477
1479 """Returns a nicely formatted string of the amplitude content."""
1480 mystr = ""
1481 for amplitude in self.get('amplitudes'):
1482 mystr = mystr + amplitude.nice_string(indent) + "\n"
1483
1484 if self.get('decay_chains'):
1485 mystr = mystr + " " * indent + "Decays:\n"
1486 for dec in self.get('decay_chains'):
1487 mystr = mystr + dec.nice_string(indent + 2) + "\n"
1488
1489 return mystr[:-1]
1490
1492 """Returns a nicely formatted string of the amplitude processes."""
1493 mystr = ""
1494 for amplitude in self.get('amplitudes'):
1495 mystr = mystr + amplitude.nice_string_processes(indent) + "\n"
1496
1497 if self.get('decay_chains'):
1498 mystr = mystr + " " * indent + "Decays:\n"
1499 for dec in self.get('decay_chains'):
1500 mystr = mystr + dec.nice_string_processes(indent + 2) + "\n"
1501
1502 return mystr[:-1]
1503
1505 """Returns the number of initial state particles in the process."""
1506 return self.get('amplitudes')[0].get('process').get_ninitial()
1507
1509 """Returns a set of all particle ids for which a decay is defined"""
1510
1511 decay_ids = []
1512
1513
1514 for amp in sum([dc.get('amplitudes') for dc \
1515 in self['decay_chains']], []):
1516
1517 decay_ids.append(amp.get('process').get_initial_ids()[0])
1518
1519
1520 return list(set(decay_ids))
1521
1523 """ Returns wether this amplitude has a loop process."""
1524 return self['amplitudes'].has_any_loop_process()
1525
1527 """Recursive function to extract all amplitudes for this process"""
1528
1529 amplitudes = AmplitudeList()
1530
1531 amplitudes.extend(self.get('amplitudes'))
1532 for decay in self.get('decay_chains'):
1533 amplitudes.extend(decay.get_amplitudes())
1534
1535 return amplitudes
1536
1542 """List of DecayChainAmplitude objects
1543 """
1544
1546 """Test if object obj is a valid DecayChainAmplitude for the list."""
1547
1548 return isinstance(obj, DecayChainAmplitude)
1549
1550
1551
1552
1553
1554 -class MultiProcess(base_objects.PhysicsObject):
1555 """MultiProcess: list of process definitions
1556 list of processes (after cleaning)
1557 list of amplitudes (after generation)
1558 """
1559
1561 """Default values for all properties"""
1562
1563 self['process_definitions'] = base_objects.ProcessDefinitionList()
1564
1565
1566
1567 self['amplitudes'] = AmplitudeList()
1568
1569 self['collect_mirror_procs'] = False
1570
1571
1572 self['ignore_six_quark_processes'] = []
1573
1574
1575 self['use_numerical'] = False
1576
1577 - def __init__(self, argument=None, collect_mirror_procs = False,
1578 ignore_six_quark_processes = [], optimize=False,
1579 loop_filter=None, diagram_filter=None):
1607
1608
1609 - def filter(self, name, value):
1610 """Filter for valid process property values."""
1611
1612 if name == 'process_definitions':
1613 if not isinstance(value, base_objects.ProcessDefinitionList):
1614 raise self.PhysicsObjectError("%s is not a valid ProcessDefinitionList object" % str(value))
1615
1616 if name == 'amplitudes':
1617 if not isinstance(value, AmplitudeList):
1618 raise self.PhysicsObjectError("%s is not a valid AmplitudeList object" % str(value))
1619
1620 if name in ['collect_mirror_procs']:
1621 if not isinstance(value, bool):
1622 raise self.PhysicsObjectError("%s is not a valid boolean" % str(value))
1623
1624 if name == 'ignore_six_quark_processes':
1625 if not isinstance(value, list):
1626 raise self.PhysicsObjectError("%s is not a valid list" % str(value))
1627
1628 return True
1629
1630 - def get(self, name):
1631 """Get the value of the property name."""
1632
1633 if (name == 'amplitudes') and not self[name]:
1634 for process_def in self.get('process_definitions'):
1635 if process_def.get('decay_chains'):
1636
1637
1638 self['amplitudes'].append(\
1639 DecayChainAmplitude(process_def,
1640 self.get('collect_mirror_procs'),
1641 self.get('ignore_six_quark_processes'),
1642 diagram_filter=self['diagram_filter']))
1643 else:
1644 self['amplitudes'].extend(\
1645 self.generate_multi_amplitudes(process_def,
1646 self.get('collect_mirror_procs'),
1647 self.get('ignore_six_quark_processes'),
1648 self['use_numerical'],
1649 loop_filter=self['loop_filter'],
1650 diagram_filter=self['diagram_filter']))
1651
1652 return MultiProcess.__bases__[0].get(self, name)
1653
1655 """Return process property names as a nicely sorted list."""
1656
1657 return ['process_definitions', 'amplitudes']
1658
1660
1661 return self['process_definitions'][0]['model']
1662
1663 @classmethod
1664 - def generate_multi_amplitudes(cls,process_definition,
1665 collect_mirror_procs = False,
1666 ignore_six_quark_processes = [],
1667 use_numerical=False,
1668 loop_filter=None,
1669 diagram_filter=False):
1670 """Generate amplitudes in a semi-efficient way.
1671 Make use of crossing symmetry for processes that fail diagram
1672 generation, but not for processes that succeed diagram
1673 generation. Doing so will risk making it impossible to
1674 identify processes with identical amplitudes.
1675 """
1676 assert isinstance(process_definition, base_objects.ProcessDefinition), \
1677 "%s not valid ProcessDefinition object" % \
1678 repr(process_definition)
1679
1680
1681 process_definition.set('orders', MultiProcess.\
1682 find_optimal_process_orders(process_definition,
1683 diagram_filter))
1684
1685 process_definition.check_expansion_orders()
1686
1687 processes = base_objects.ProcessList()
1688 amplitudes = AmplitudeList()
1689
1690
1691
1692 failed_procs = []
1693 success_procs = []
1694
1695 non_permuted_procs = []
1696
1697 permutations = []
1698
1699
1700
1701 model = process_definition['model']
1702
1703 islegs = [leg for leg in process_definition['legs'] \
1704 if leg['state'] == False]
1705 fslegs = [leg for leg in process_definition['legs'] \
1706 if leg['state'] == True]
1707
1708 isids = [leg['ids'] for leg in process_definition['legs'] \
1709 if leg['state'] == False]
1710 fsids = [leg['ids'] for leg in process_definition['legs'] \
1711 if leg['state'] == True]
1712 polids = [tuple(leg['polarization']) for leg in process_definition['legs'] \
1713 if leg['state'] == True]
1714
1715 for prod in itertools.product(*isids):
1716 islegs = [\
1717 base_objects.Leg({'id':id, 'state': False,
1718 'polarization': islegs[i]['polarization']})
1719 for i,id in enumerate(prod)]
1720
1721
1722
1723
1724 red_fsidlist = set()
1725
1726 for prod in itertools.product(*fsids):
1727 tag = zip(prod, polids)
1728 tag = sorted(tag)
1729
1730 if tuple(tag) in red_fsidlist:
1731 continue
1732
1733 red_fsidlist.add(tuple(tag))
1734
1735 leg_list = [copy.copy(leg) for leg in islegs]
1736 leg_list.extend([\
1737 base_objects.Leg({'id':id, 'state': True, 'polarization': fslegs[i]['polarization']}) \
1738 for i,id in enumerate(prod)])
1739
1740 legs = base_objects.LegList(leg_list)
1741
1742
1743 sorted_legs = sorted([(l,i+1) for (i,l) in \
1744 enumerate(legs.get_outgoing_id_list(model))])
1745 permutation = [l[1] for l in sorted_legs]
1746
1747 sorted_legs = array.array('i', [l[0] for l in sorted_legs])
1748
1749
1750 if ignore_six_quark_processes and \
1751 len([i for i in sorted_legs if abs(i) in \
1752 ignore_six_quark_processes]) >= 6:
1753 continue
1754
1755
1756
1757 if sorted_legs in failed_procs:
1758 continue
1759
1760
1761 if use_numerical:
1762
1763 initial_mass = abs(model['parameter_dict'][model.get_particle(legs[0].get('id')).get('mass')])
1764 if initial_mass == 0:
1765 continue
1766 for leg in legs[1:]:
1767 m = model['parameter_dict'][model.get_particle(leg.get('id')).get('mass')]
1768 initial_mass -= abs(m)
1769 if initial_mass.real <= 0:
1770 continue
1771
1772
1773 process = process_definition.get_process_with_legs(legs)
1774
1775 fast_proc = \
1776 array.array('i',[leg.get('id') for leg in legs])
1777 if collect_mirror_procs and \
1778 process_definition.get_ninitial() == 2:
1779
1780 mirror_proc = \
1781 array.array('i', [fast_proc[1], fast_proc[0]] + \
1782 list(fast_proc[2:]))
1783 try:
1784 mirror_amp = \
1785 amplitudes[non_permuted_procs.index(mirror_proc)]
1786 except Exception:
1787
1788 pass
1789 else:
1790
1791 mirror_amp.set('has_mirror_process', True)
1792 logger.info("Process %s added to mirror process %s" % \
1793 (process.base_string(),
1794 mirror_amp.get('process').base_string()))
1795 continue
1796
1797
1798
1799 if not process.get('required_s_channels') and \
1800 not process.get('forbidden_onsh_s_channels') and \
1801 not process.get('forbidden_s_channels') and \
1802 not process.get('is_decay_chain') and not diagram_filter:
1803 try:
1804 crossed_index = success_procs.index(sorted_legs)
1805
1806
1807
1808
1809 if 'loop_diagrams' in amplitudes[crossed_index]:
1810 raise ValueError
1811 except ValueError:
1812
1813 pass
1814 else:
1815
1816 amplitude = MultiProcess.cross_amplitude(\
1817 amplitudes[crossed_index],
1818 process,
1819 permutations[crossed_index],
1820 permutation)
1821 amplitudes.append(amplitude)
1822 success_procs.append(sorted_legs)
1823 permutations.append(permutation)
1824 non_permuted_procs.append(fast_proc)
1825 logger.info("Crossed process found for %s, reuse diagrams." % \
1826 process.base_string())
1827 continue
1828
1829
1830 amplitude = cls.get_amplitude_from_proc(process,
1831 loop_filter=loop_filter)
1832
1833 try:
1834 result = amplitude.generate_diagrams(diagram_filter=diagram_filter)
1835 except InvalidCmd as error:
1836 failed_procs.append(sorted_legs)
1837 else:
1838
1839 if amplitude.get('diagrams'):
1840 amplitudes.append(amplitude)
1841 success_procs.append(sorted_legs)
1842 permutations.append(permutation)
1843 non_permuted_procs.append(fast_proc)
1844 elif not result:
1845
1846 failed_procs.append(sorted_legs)
1847
1848
1849 if not amplitudes:
1850 if len(failed_procs) == 1 and 'error' in locals():
1851 raise error
1852 else:
1853 raise NoDiagramException("No amplitudes generated from process %s. Please enter a valid process" % \
1854 process_definition.nice_string())
1855
1856
1857
1858 return amplitudes
1859
1860 @classmethod
1862 """ Return the correct amplitude type according to the characteristics of
1863 the process proc. The only option that could be specified here is
1864 loop_filter and it is of course not relevant for a tree amplitude."""
1865
1866 return Amplitude({"process": proc})
1867
1868
1869 @staticmethod
1871 """Find the minimal WEIGHTED order for this set of processes.
1872
1873 The algorithm:
1874
1875 1) Check the coupling hierarchy of the model. Assign all
1876 particles to the different coupling hierarchies so that a
1877 particle is considered to be in the highest hierarchy (i.e.,
1878 with lowest value) where it has an interaction.
1879
1880 2) Pick out the legs in the multiprocess according to the
1881 highest hierarchy represented (so don't mix particles from
1882 different hierarchy classes in the same multiparticles!)
1883
1884 3) Find the starting maximum WEIGHTED order as the sum of the
1885 highest n-2 weighted orders
1886
1887 4) Pick out required s-channel particle hierarchies, and use
1888 the highest of the maximum WEIGHTED order from the legs and
1889 the minimum WEIGHTED order extracted from 2*s-channel
1890 hierarchys plus the n-2-2*(number of s-channels) lowest
1891 leg weighted orders.
1892
1893 5) Run process generation with the WEIGHTED order determined
1894 in 3)-4) - # final state gluons, with all gluons removed from
1895 the final state
1896
1897 6) If no process is found, increase WEIGHTED order by 1 and go
1898 back to 5), until we find a process which passes. Return that
1899 order.
1900
1901 7) Continue 5)-6) until we reach (n-2)*(highest hierarchy)-1.
1902 If still no process has passed, return
1903 WEIGHTED = (n-2)*(highest hierarchy)
1904 """
1905
1906 assert isinstance(process_definition, base_objects.ProcessDefinition), \
1907 "%s not valid ProcessDefinition object" % \
1908 repr(process_definition)
1909
1910 processes = base_objects.ProcessList()
1911 amplitudes = AmplitudeList()
1912
1913
1914 if process_definition.get('orders') or \
1915 process_definition.get('overall_orders') or \
1916 process_definition.get('NLO_mode')=='virt':
1917 return process_definition.get('orders')
1918
1919
1920 if process_definition.get_ninitial() == 1 and not \
1921 process_definition.get('is_decay_chain'):
1922 return process_definition.get('orders')
1923
1924 logger.info("Checking for minimal orders which gives processes.")
1925 logger.info("Please specify coupling orders to bypass this step.")
1926
1927
1928 max_order_now, particles, hierarchy = \
1929 process_definition.get_minimum_WEIGHTED()
1930 coupling = 'WEIGHTED'
1931
1932 model = process_definition.get('model')
1933
1934
1935 isids = [leg['ids'] for leg in \
1936 [leg for leg in process_definition['legs'] if leg['state'] == False]]
1937 fsids = [leg['ids'] for leg in \
1938 [leg for leg in process_definition['legs'] if leg['state'] == True]]
1939
1940 max_WEIGHTED_order = \
1941 (len(fsids + isids) - 2)*int(model.get_max_WEIGHTED())
1942
1943 hierarchydef = process_definition['model'].get('order_hierarchy')
1944 tmp = []
1945 hierarchy = list(hierarchydef.items())
1946 hierarchy.sort()
1947 for key, value in hierarchydef.items():
1948 if value>1:
1949 tmp.append('%s*%s' % (value,key))
1950 else:
1951 tmp.append('%s' % key)
1952 wgtdef = '+'.join(tmp)
1953
1954
1955 while max_order_now < max_WEIGHTED_order:
1956 logger.info("Trying coupling order WEIGHTED<=%d: WEIGTHED IS %s" % (max_order_now, wgtdef))
1957
1958 oldloglevel = logger.level
1959 logger.setLevel(logging.WARNING)
1960
1961
1962
1963 failed_procs = []
1964
1965 for prod in itertools.product(*isids):
1966 islegs = [ base_objects.Leg({'id':id, 'state': False}) \
1967 for id in prod]
1968
1969
1970
1971
1972 red_fsidlist = []
1973
1974 for prod in itertools.product(*fsids):
1975
1976
1977 if tuple(sorted(prod)) in red_fsidlist:
1978 continue
1979
1980 red_fsidlist.append(tuple(sorted(prod)));
1981
1982
1983
1984 nglue = 0
1985 if 21 in particles[0]:
1986 nglue = len([id for id in prod if id == 21])
1987 prod = [id for id in prod if id != 21]
1988
1989
1990 leg_list = [copy.copy(leg) for leg in islegs]
1991
1992 leg_list.extend([\
1993 base_objects.Leg({'id':id, 'state': True}) \
1994 for id in prod])
1995
1996 legs = base_objects.LegList(leg_list)
1997
1998
1999
2000 coupling_orders_now = {coupling: max_order_now - \
2001 nglue * model['order_hierarchy']['QCD']}
2002
2003
2004 process = base_objects.Process({\
2005 'legs':legs,
2006 'model':model,
2007 'id': process_definition.get('id'),
2008 'orders': coupling_orders_now,
2009 'required_s_channels': \
2010 process_definition.get('required_s_channels'),
2011 'forbidden_onsh_s_channels': \
2012 process_definition.get('forbidden_onsh_s_channels'),
2013 'sqorders_types': \
2014 process_definition.get('sqorders_types'),
2015 'squared_orders': \
2016 process_definition.get('squared_orders'),
2017 'split_orders': \
2018 process_definition.get('split_orders'),
2019 'forbidden_s_channels': \
2020 process_definition.get('forbidden_s_channels'),
2021 'forbidden_particles': \
2022 process_definition.get('forbidden_particles'),
2023 'is_decay_chain': \
2024 process_definition.get('is_decay_chain'),
2025 'overall_orders': \
2026 process_definition.get('overall_orders'),
2027 'split_orders': \
2028 process_definition.get('split_orders')})
2029
2030
2031 process.check_expansion_orders()
2032
2033
2034 sorted_legs = sorted(legs.get_outgoing_id_list(model))
2035
2036
2037 if tuple(sorted_legs) in failed_procs and not process_definition.get('forbidden_s_channels'):
2038 continue
2039
2040 amplitude = Amplitude({'process': process})
2041 try:
2042 amplitude.generate_diagrams(diagram_filter=diagram_filter)
2043 except InvalidCmd as error:
2044 failed_procs.append(tuple(sorted_legs))
2045 else:
2046 if amplitude.get('diagrams'):
2047
2048 logger.setLevel(oldloglevel)
2049 return {coupling: max_order_now}
2050 else:
2051 failed_procs.append(tuple(sorted_legs))
2052
2053 max_order_now += 1
2054 logger.setLevel(oldloglevel)
2055
2056
2057 return {coupling: max_order_now}
2058
2059 @staticmethod
2061 """Return the amplitude crossed with the permutation new_perm"""
2062
2063 perm_map = dict(list(zip(org_perm, new_perm)))
2064
2065 new_amp = copy.copy(amplitude)
2066
2067 for i, leg in enumerate(process.get('legs')):
2068 leg.set('number', i+1)
2069
2070 new_amp.set('process', process)
2071
2072 diagrams = base_objects.DiagramList([d.renumber_legs(perm_map,
2073 process.get('legs'),) for \
2074 d in new_amp.get('diagrams')])
2075 new_amp.set('diagrams', diagrams)
2076 new_amp.trim_diagrams()
2077
2078
2079 new_amp.set('has_mirror_process', False)
2080
2081 return new_amp
2082
2088 """Takes a list of lists and elements and returns a list of flat lists.
2089 Example: [[1,2], 3, [4,5]] -> [[1,3,4], [1,3,5], [2,3,4], [2,3,5]]
2090 """
2091
2092
2093 assert isinstance(mylist, list), "Expand_list argument must be a list"
2094
2095 res = []
2096
2097 tmplist = []
2098 for item in mylist:
2099 if isinstance(item, list):
2100 tmplist.append(item)
2101 else:
2102 tmplist.append([item])
2103
2104 for item in itertools.product(*tmplist):
2105 res.append(list(item))
2106
2107 return res
2108
2110 """Recursive function. Takes a list of lists and lists of lists
2111 and returns a list of flat lists.
2112 Example: [[1,2],[[4,5],[6,7]]] -> [[1,2,4,5], [1,2,6,7]]
2113 """
2114
2115 res = []
2116
2117 if not mylist or len(mylist) == 1 and not mylist[0]:
2118 return [[]]
2119
2120
2121 assert isinstance(mylist[0], list), \
2122 "Expand_list_list needs a list of lists and lists of lists"
2123
2124
2125 if len(mylist) == 1:
2126 if isinstance(mylist[0][0], list):
2127 return mylist[0]
2128 else:
2129 return mylist
2130
2131 if isinstance(mylist[0][0], list):
2132 for item in mylist[0]:
2133
2134
2135
2136 for rest in expand_list_list(mylist[1:]):
2137 reslist = copy.copy(item)
2138 reslist.extend(rest)
2139 res.append(reslist)
2140 else:
2141 for rest in expand_list_list(mylist[1:]):
2142 reslist = copy.copy(mylist[0])
2143 reslist.extend(rest)
2144 res.append(reslist)
2145
2146
2147 return res
2148