1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """Classes for diagram generation. Amplitude performs the diagram
16 generation, DecayChainAmplitude keeps track of processes with decay
17 chains, and MultiProcess allows generation of processes with
18 multiparticle definitions. DiagramTag allows to identify diagrams
19 based on relevant properties.
20 """
21
22 import array
23 import copy
24 import itertools
25 import logging
26
27 import madgraph.core.base_objects as base_objects
28 import madgraph.various.misc as misc
29 from madgraph import InvalidCmd, MadGraph5Error
30
31 logger = logging.getLogger('madgraph.diagram_generation')
35
41 """Class to tag diagrams based on objects with some __lt__ measure, e.g.
42 PDG code/interaction id (for comparing diagrams from the same amplitude),
43 or Lorentz/coupling/mass/width (for comparing AMPs from different MEs).
44 Algorithm: Create chains starting from external particles:
45 1 \ / 6
46 2 /\______/\ 7
47 3_ / | \_ 8
48 4 / 5 \_ 9
49 \ 10
50 gives ((((9,10,id910),8,id9108),(6,7,id67),id910867)
51 (((1,2,id12),(3,4,id34)),id1234),
52 5,id91086712345)
53 where idN is the id of the corresponding interaction. The ordering within
54 chains is based on chain length (depth; here, 1234 has depth 3, 910867 has
55 depth 4, 5 has depht 0), and if equal on the ordering of the chain elements.
56 The determination of central vertex is based on minimizing the chain length
57 for the longest subchain.
58 This gives a unique tag which can be used to identify diagrams
59 (instead of symmetry), as well as identify identical matrix elements from
60 different processes."""
61
63 """Exception for any problems in DiagramTags"""
64 pass
65
66 - def __init__(self, diagram, model=None, ninitial=2):
67 """Initialize with a diagram. Create DiagramTagChainLinks according to
68 the diagram, and figure out if we need to shift the central vertex."""
69
70
71 leg_dict = {}
72
73 for vertex in diagram.get('vertices'):
74
75 legs = vertex.get('legs')[:-1]
76 lastvx = vertex == diagram.get('vertices')[-1]
77 if lastvx:
78
79 legs = vertex.get('legs')
80
81 link = DiagramTagChainLink([leg_dict.setdefault(leg.get('number'),
82 DiagramTagChainLink(self.link_from_leg(leg, model))) \
83 for leg in legs],
84 self.vertex_id_from_vertex(vertex,
85 lastvx,
86 model,
87 ninitial))
88
89 if not lastvx:
90 leg_dict[vertex.get('legs')[-1].get('number')] = link
91
92
93 self.tag = link
94
95
96
97 done = max([l.depth for l in self.tag.links]) == 0
98 while not done:
99
100 longest_chain = self.tag.links[0]
101
102 new_link = DiagramTagChainLink(self.tag.links[1:],
103 self.flip_vertex(\
104 self.tag.vertex_id,
105 longest_chain.vertex_id,
106 self.tag.links[1:]))
107
108 other_links = list(longest_chain.links) + [new_link]
109 other_link = DiagramTagChainLink(other_links,
110 self.flip_vertex(\
111 longest_chain.vertex_id,
112 self.tag.vertex_id,
113 other_links))
114
115 if other_link.links[0] < self.tag.links[0]:
116
117 self.tag = other_link
118 else:
119
120 done = True
121
126
128 """Output a diagram from a DiagramTag. Note that each daughter
129 class must implement the static functions id_from_vertex_id
130 (if the vertex id is something else than an integer) and
131 leg_from_link (to pass the correct info from an end link to a
132 leg)."""
133
134
135 diagram = base_objects.Diagram({'vertices': \
136 self.vertices_from_link(self.tag,
137 model,
138 True)})
139 diagram.calculate_orders(model)
140 return diagram
141
142 @classmethod
144 """Recursively return the leg corresponding to this link and
145 the list of all vertices from all previous links"""
146
147 if link.end_link:
148
149 return cls.leg_from_link(link), []
150
151
152 leg_vertices = [cls.vertices_from_link(l, model) for l in link.links]
153
154 legs = base_objects.LegList(sorted([l for l,v in leg_vertices],
155 lambda l1,l2: l2.get('number') - \
156 l1.get('number')))
157
158 vertices = base_objects.VertexList(sum([v for l, v in leg_vertices],
159 []))
160
161 if not first_vertex:
162
163
164 last_leg = cls.leg_from_legs(legs,link.vertex_id,model)
165 legs.append(last_leg)
166
167
168 vertices.append(cls.vertex_from_link(legs,
169 link.vertex_id,
170 model))
171 if first_vertex:
172
173 return vertices
174 else:
175
176 return last_leg, vertices
177
178 @classmethod
180 """Returns the list of external PDGs of the interaction corresponding
181 to this vertex_id."""
182
183
184
185
186 if (len(vertex_id)>=3 and 'PDGs' in vertex_id[2]):
187 return vertex_id[2]['PDGs']
188 else:
189 return [part.get_pdg_code() for part in model.get_interaction(
190 cls.id_from_vertex_id(vertex_id)).get('particles')]
191
192 @classmethod
194 """Return a leg from a leg list and the model info"""
195
196 pdgs = list(cls.legPDGs_from_vertex_id(vertex_id, model))
197
198
199 for pdg in [leg.get('id') for leg in legs]:
200 pdgs.remove(pdg)
201
202 assert len(pdgs) == 1
203
204 pdg = model.get_particle(pdgs[0]).get_anti_pdg_code()
205 number = min([l.get('number') for l in legs])
206
207 state = (len([l for l in legs if l.get('state') == False]) != 1)
208
209 onshell= False
210
211 return base_objects.Leg({'id': pdg,
212 'number': number,
213 'state': state,
214 'onshell': onshell})
215
216 @classmethod
229
230 @staticmethod
232 """Return a leg from a link"""
233
234 if link.end_link:
235
236 return base_objects.Leg({'number':link.links[0][1],
237 'id':link.links[0][0][0],
238 'state':(link.links[0][0][1] == 0),
239 'onshell':False})
240
241
242 assert False
243
244 @staticmethod
246 """Return the numerical vertex id from a link.vertex_id"""
247
248 return vertex_id[0][0]
249
250 @staticmethod
252 """Return the loop_info stored in this vertex id. Notice that the
253 IdentifyME tag does not store the loop_info, but should normally never
254 need access to it."""
255
256 return vertex_id[2]
257
258 @staticmethod
260 """Reorder a permutation with respect to start_perm. Note that
261 both need to start from 1."""
262 if perm == start_perm:
263 return range(len(perm))
264 order = [i for (p,i) in \
265 sorted([(p,i) for (i,p) in enumerate(perm)])]
266 return [start_perm[i]-1 for i in order]
267
268 @staticmethod
270 """Returns the default end link for a leg: ((id, state), number).
271 Note that the number is not taken into account if tag comparison,
272 but is used only to extract leg permutations."""
273 if leg.get('state'):
274
275 return [((leg.get('id'), 0), leg.get('number'))]
276 else:
277
278 return [((leg.get('id'), leg.get('number')), leg.get('number'))]
279
280 @staticmethod
282 """Returns the default vertex id: just the interaction id
283 Note that in the vertex id, like the leg, only the first entry is
284 taken into account in the tag comparison, while the second is for
285 storing information that is not to be used in comparisons and the
286 third for additional info regarding the shrunk loop vertex."""
287
288 if isinstance(vertex,base_objects.ContractedVertex):
289
290 return ((vertex.get('id'),vertex.get('loop_tag')),(),
291 {'PDGs':vertex.get('PDGs')})
292 else:
293 return ((vertex.get('id'),()),(),{})
294
295 @staticmethod
297 """Returns the default vertex flip: just the new_vertex"""
298 return new_vertex
299
301 """Equal if same tag"""
302 if type(self) != type(other):
303 return False
304 return self.tag == other.tag
305
307 return not self.__eq__(other)
308
311
313 return self.tag < other.tag
314
316 return self.tag > other.tag
317
318 __repr__ = __str__
319
321 """Chain link for a DiagramTag. A link is a tuple + vertex id + depth,
322 with a comparison operator defined"""
323
324 - def __init__(self, objects, vertex_id = None):
325 """Initialize, either with a tuple of DiagramTagChainLinks and
326 a vertex_id (defined by DiagramTag.vertex_id_from_vertex), or
327 with an external leg object (end link) defined by
328 DiagramTag.link_from_leg"""
329
330 if vertex_id == None:
331
332 self.links = tuple(objects)
333 self.vertex_id = (0,)
334 self.depth = 0
335 self.end_link = True
336 return
337
338 self.links = tuple(sorted(list(tuple(objects)), reverse=True))
339 self.vertex_id = vertex_id
340
341
342 self.depth = sum([l.depth for l in self.links],
343 max(1, len(self.links)-1))
344 self.end_link = False
345
347 """Get the permutation of external numbers (assumed to be the
348 second entry in the end link tuples)"""
349
350 if self.end_link:
351 return [self.links[0][1]]
352
353 return sum([l.get_external_numbers() for l in self.links], [])
354
356 """Compare self with other in the order:
357 1. depth 2. len(links) 3. vertex id 4. measure of links"""
358
359 if self == other:
360 return False
361
362 if self.depth != other.depth:
363 return self.depth < other.depth
364
365 if len(self.links) != len(other.links):
366 return len(self.links) < len(other.links)
367
368 if self.vertex_id[0] != other.vertex_id[0]:
369 return self.vertex_id[0] < other.vertex_id[0]
370
371 for i, link in enumerate(self.links):
372 if i > len(other.links) - 1:
373 return False
374 if link != other.links[i]:
375 return link < other.links[i]
376
378 return self != other and not self.__lt__(other)
379
381 """For end link,
382 consider equal if self.links[0][0] == other.links[0][0],
383 i.e., ignore the leg number (in links[0][1])."""
384
385 if self.end_link and other.end_link and self.depth == other.depth \
386 and self.vertex_id == other.vertex_id:
387 return self.links[0][0] == other.links[0][0]
388
389 return self.end_link == other.end_link and self.depth == other.depth \
390 and self.vertex_id[0] == other.vertex_id[0] \
391 and self.links == other.links
392
394 return not self.__eq__(other)
395
396
398 if self.end_link:
399 return str(self.links)
400 return "%s, %s; %d" % (str(self.links),
401 str(self.vertex_id),
402 self.depth)
403
404 __repr__ = __str__
405
406
407
408
409 -class Amplitude(base_objects.PhysicsObject):
410 """Amplitude: process + list of diagrams (ordered)
411 Initialize with a process, then call generate_diagrams() to
412 generate the diagrams for the amplitude
413 """
414
416 """Default values for all properties"""
417
418 self['process'] = base_objects.Process()
419 self['diagrams'] = None
420
421
422 self['has_mirror_process'] = False
423
436
437 - def filter(self, name, value):
453
454 - def get(self, name):
463
464
465
467 """Return diagram property names as a nicely sorted list."""
468
469 return ['process', 'diagrams', 'has_mirror_process']
470
472 """Returns number of diagrams for this amplitude"""
473 return len(self.get('diagrams'))
474
476 """Return an AmplitudeList with just this amplitude.
477 Needed for DecayChainAmplitude."""
478
479 return AmplitudeList([self])
480
482 """Returns a nicely formatted string of the amplitude content."""
483 return self.get('process').nice_string(indent) + "\n" + \
484 self.get('diagrams').nice_string(indent)
485
487 """Returns a nicely formatted string of the amplitude process."""
488 return self.get('process').nice_string(indent)
489
491 """Returns the number of initial state particles in the process."""
492 return self.get('process').get_ninitial()
493
495 """ Returns wether this amplitude has a loop process."""
496
497 return self.get('process').get('perturbation_couplings')
498
500 """Generate diagrams. Algorithm:
501
502 1. Define interaction dictionaries:
503 * 2->0 (identity), 3->0, 4->0, ... , maxlegs->0
504 * 2 -> 1, 3 -> 1, ..., maxlegs-1 -> 1
505
506 2. Set flag from_group=true for all external particles.
507 Flip particle/anti particle for incoming particles.
508
509 3. If there is a dictionary n->0 with n=number of external
510 particles, create if possible the combination [(1,2,3,4,...)]
511 with *at least two* from_group==true. This will give a
512 finished (set of) diagram(s) (done by reduce_leglist)
513
514 4. Create all allowed groupings of particles with at least one
515 from_group==true (according to dictionaries n->1):
516 [(1,2),3,4...],[1,(2,3),4,...],...,
517 [(1,2),(3,4),...],...,[(1,2,3),4,...],...
518 (done by combine_legs)
519
520 5. Replace each group with a (list of) new particle(s) with number
521 n = min(group numbers). Set from_group true for these
522 particles and false for all other particles. Store vertex info.
523 (done by merge_comb_legs)
524
525 6. Stop algorithm when at most 2 particles remain.
526 Return all diagrams (lists of vertices).
527
528 7. Repeat from 3 (recursion done by reduce_leglist)
529
530 8. Replace final p=p vertex
531
532 Be aware that the resulting vertices have all particles outgoing,
533 so need to flip for incoming particles when used.
534
535 SPECIAL CASE: For A>BC... processes which are legs in decay
536 chains, we need to ensure that BC... combine first, giving A=A
537 as a final vertex. This case is defined by the Process
538 property is_decay_chain = True.
539 This function can also be called by the generate_diagram function
540 of LoopAmplitudes, in which case the generated diagrams here must not
541 be directly assigned to the 'diagrams' attributed but returned as a
542 DiagramList by the function. This is controlled by the argument
543 returndiag.
544 """
545
546 process = self.get('process')
547 model = process.get('model')
548 legs = process.get('legs')
549
550 for key in process.get('overall_orders').keys():
551 try:
552 process.get('orders')[key] = \
553 min(process.get('orders')[key],
554 process.get('overall_orders')[key])
555 except KeyError:
556 process.get('orders')[key] = process.get('overall_orders')[key]
557
558 assert model.get('particles'), \
559 "particles are missing in model: %s" % model.get('particles')
560
561 assert model.get('interactions'), \
562 "interactions are missing in model"
563
564
565 res = base_objects.DiagramList()
566
567 if len(filter(lambda leg: model.get('particle_dict')[\
568 leg.get('id')].is_fermion(), legs)) % 2 == 1:
569 if not returndiag:
570 self['diagrams'] = res
571 raise InvalidCmd, 'The number of fermion is odd'
572 else:
573 return False, res
574
575
576
577 if not model.get('got_majoranas') and \
578 len(filter(lambda leg: leg.is_incoming_fermion(model), legs)) != \
579 len(filter(lambda leg: leg.is_outgoing_fermion(model), legs)):
580 if not returndiag:
581 self['diagrams'] = res
582 raise InvalidCmd, 'The number of of incoming/outcoming fermions are different'
583 else:
584 return False, res
585
586
587
588 for charge in model.get('conserved_charge'):
589 total = 0
590 for leg in legs:
591 part = model.get('particle_dict')[leg.get('id')]
592 try:
593 value = part.get(charge)
594 except (AttributeError, base_objects.PhysicsObject.PhysicsObjectError):
595 try:
596 value = getattr(part, charge)
597 except AttributeError:
598 value = 0
599
600 if (leg.get('id') != part['pdg_code']) != leg['state']:
601 total -= value
602 else:
603 total += value
604
605 if abs(total) > 1e-10:
606 if not returndiag:
607 self['diagrams'] = res
608 raise InvalidCmd, 'No %s conservation for this process ' % charge
609 return res
610 else:
611 raise InvalidCmd, 'No %s conservation for this process ' % charge
612 return res, res
613
614 if not returndiag:
615 logger.info("Trying %s " % process.nice_string().replace('Process', 'process'))
616
617
618 for i in range(0, len(process.get('legs'))):
619
620 leg = copy.copy(process.get('legs')[i])
621 process.get('legs')[i] = leg
622 if leg.get('number') == 0:
623 leg.set('number', i + 1)
624
625
626
627 leglist = self.copy_leglist(process.get('legs'))
628
629 for leg in leglist:
630
631
632
633 leg.set('from_group', True)
634
635
636
637 if leg.get('state') == False:
638 part = model.get('particle_dict')[leg.get('id')]
639 leg.set('id', part.get_anti_pdg_code())
640
641
642
643 max_multi_to1 = max([len(key) for key in \
644 model.get('ref_dict_to1').keys()])
645
646
647
648
649
650
651
652
653 is_decay_proc = process.get_ninitial() == 1
654 if is_decay_proc:
655 part = model.get('particle_dict')[leglist[0].get('id')]
656
657
658
659 ref_dict_to0 = {(part.get_pdg_code(),part.get_anti_pdg_code()):[0],
660 (part.get_anti_pdg_code(),part.get_pdg_code()):[0]}
661
662
663 leglist[0].set('from_group', None)
664 reduced_leglist = self.reduce_leglist(leglist,
665 max_multi_to1,
666 ref_dict_to0,
667 is_decay_proc,
668 process.get('orders'))
669 else:
670 reduced_leglist = self.reduce_leglist(leglist,
671 max_multi_to1,
672 model.get('ref_dict_to0'),
673 is_decay_proc,
674 process.get('orders'))
675
676
677
678
679 self.convert_dgleg_to_leg(reduced_leglist)
680
681 if reduced_leglist:
682 for vertex_list in reduced_leglist:
683 res.append(self.create_diagram(base_objects.VertexList(vertex_list)))
684
685
686
687 failed_crossing = not res
688
689
690
691
692
693
694 if process.get('required_s_channels') and \
695 process.get('required_s_channels')[0]:
696
697
698 lastvx = -1
699
700
701
702 if is_decay_proc: lastvx = -2
703 ninitial = len(filter(lambda leg: leg.get('state') == False,
704 process.get('legs')))
705
706 old_res = res
707 res = base_objects.DiagramList()
708 for id_list in process.get('required_s_channels'):
709 res_diags = filter(lambda diagram: \
710 all([req_s_channel in \
711 [vertex.get_s_channel_id(\
712 process.get('model'), ninitial) \
713 for vertex in diagram.get('vertices')[:lastvx]] \
714 for req_s_channel in \
715 id_list]), old_res)
716
717 res.extend([diag for diag in res_diags if diag not in res])
718
719
720
721
722
723 if process.get('forbidden_s_channels'):
724 ninitial = len(filter(lambda leg: leg.get('state') == False,
725 process.get('legs')))
726 if ninitial == 2:
727 res = base_objects.DiagramList(\
728 filter(lambda diagram: \
729 not any([vertex.get_s_channel_id(\
730 process.get('model'), ninitial) \
731 in process.get('forbidden_s_channels')
732 for vertex in diagram.get('vertices')[:-1]]),
733 res))
734 else:
735
736
737 newres= []
738 for diagram in res:
739 leg1 = 1
740
741
742
743 vertex = diagram.get('vertices')[-1]
744 if any([l['number'] ==1 for l in vertex.get('legs')]):
745 leg1 = [l['number'] for l in vertex.get('legs') if l['number'] !=1][0]
746 to_loop = range(len(diagram.get('vertices'))-1)
747 if leg1 >1:
748 to_loop.reverse()
749 for i in to_loop:
750 vertex = diagram.get('vertices')[i]
751 if leg1:
752 if any([l['number'] ==leg1 for l in vertex.get('legs')]):
753 leg1 = 0
754 continue
755 if vertex.get_s_channel_id(process.get('model'), ninitial)\
756 in process.get('forbidden_s_channels'):
757 break
758 else:
759 newres.append(diagram)
760 res = base_objects.DiagramList(newres)
761
762
763
764
765 if process.get('forbidden_onsh_s_channels'):
766 ninitial = len(filter(lambda leg: leg.get('state') == False,
767 process.get('legs')))
768
769 verts = base_objects.VertexList(sum([[vertex for vertex \
770 in diagram.get('vertices')[:-1]
771 if vertex.get_s_channel_id(\
772 process.get('model'), ninitial) \
773 in process.get('forbidden_onsh_s_channels')] \
774 for diagram in res], []))
775 for vert in verts:
776
777 newleg = copy.copy(vert.get('legs').pop(-1))
778 newleg.set('onshell', False)
779 vert.get('legs').append(newleg)
780
781
782 for diagram in res:
783 diagram.calculate_orders(model)
784
785
786
787
788
789
790
791
792 if not returndiag and len(res)>0:
793 res = self.apply_squared_order_constraints(res)
794
795
796 if not process.get('is_decay_chain'):
797 for diagram in res:
798 vertices = diagram.get('vertices')
799 if len(vertices) > 1 and vertices[-1].get('id') == 0:
800
801
802
803
804 vertices = copy.copy(vertices)
805 lastvx = vertices.pop()
806 nexttolastvertex = copy.copy(vertices.pop())
807 legs = copy.copy(nexttolastvertex.get('legs'))
808 ntlnumber = legs[-1].get('number')
809 lastleg = filter(lambda leg: leg.get('number') != ntlnumber,
810 lastvx.get('legs'))[0]
811
812 if lastleg.get('onshell') == False:
813 lastleg.set('onshell', None)
814
815 legs[-1] = lastleg
816 nexttolastvertex.set('legs', legs)
817 vertices.append(nexttolastvertex)
818 diagram.set('vertices', vertices)
819
820 if res and not returndiag:
821 logger.info("Process has %d diagrams" % len(res))
822
823
824 self.trim_diagrams(diaglist=res)
825
826
827 pertur = 'QCD'
828 if self.get('process')['perturbation_couplings']:
829 pertur = sorted(self.get('process')['perturbation_couplings'])[0]
830 self.get('process').get('legs').sort(pert=pertur)
831
832
833 if not returndiag:
834 self['diagrams'] = res
835 return not failed_crossing
836 else:
837 return not failed_crossing, res
838
840 """Applies the user specified squared order constraints on the diagram
841 list in argument."""
842
843 res = copy.copy(diag_list)
844
845
846
847
848 while True:
849 new_res = res.apply_positive_sq_orders(res,
850 self['process'].get('squared_orders'),
851 self['process']['sqorders_types'])
852
853 if len(res)==len(new_res):
854 break
855 elif (len(new_res)>len(res)):
856 raise MadGraph5Error(
857 'Inconsistency in function apply_squared_order_constraints().')
858
859 res = new_res
860
861
862 neg_orders = [(order, value) for order, value in \
863 self['process'].get('squared_orders').items() if value<0]
864 if len(neg_orders)==1:
865 neg_order, neg_value = neg_orders[0]
866
867 res, target_order = res.apply_negative_sq_order(res, neg_order,\
868 neg_value, self['process']['sqorders_types'][neg_order])
869
870
871
872
873 self['process']['squared_orders'][neg_order]=target_order
874 elif len(neg_orders)>1:
875 raise InvalidCmd('At most one negative squared order constraint'+\
876 ' can be specified, not %s.'%str(neg_orders))
877
878 return res
879
881 """ Return a Diagram created from the vertex list. This function can be
882 overloaded by daughter classes."""
883 return base_objects.Diagram({'vertices':vertexlist})
884
886 """ In LoopAmplitude, it converts back all DGLoopLegs into Legs.
887 In Amplitude, there is nothing to do. """
888
889 return True
890
892 """ Simply returns a copy of the leg list. This function is
893 overloaded in LoopAmplitude so that a DGLoopLeg list is returned.
894 The DGLoopLeg has some additional parameters only useful during
895 loop diagram generation"""
896
897 return base_objects.LegList(\
898 [ copy.copy(leg) for leg in legs ])
899
900 - def reduce_leglist(self, curr_leglist, max_multi_to1, ref_dict_to0,
901 is_decay_proc = False, coupling_orders = None):
902 """Recursive function to reduce N LegList to N-1
903 For algorithm, see doc for generate_diagrams.
904 """
905
906
907
908 res = []
909
910
911
912 if curr_leglist is None:
913 return None
914
915
916 model = self.get('process').get('model')
917 ref_dict_to1 = self.get('process').get('model').get('ref_dict_to1')
918
919
920
921
922
923
924 if curr_leglist.can_combine_to_0(ref_dict_to0, is_decay_proc):
925
926
927 vertex_ids = self.get_combined_vertices(curr_leglist,
928 copy.copy(ref_dict_to0[tuple(sorted([leg.get('id') for \
929 leg in curr_leglist]))]))
930
931 final_vertices = [base_objects.Vertex({'legs':curr_leglist,
932 'id':vertex_id}) for \
933 vertex_id in vertex_ids]
934
935 for final_vertex in final_vertices:
936 if self.reduce_orders(coupling_orders, model,
937 [final_vertex.get('id')]) != False:
938 res.append([final_vertex])
939
940
941 if len(curr_leglist) == 2:
942 if res:
943 return res
944 else:
945 return None
946
947
948 comb_lists = self.combine_legs(curr_leglist,
949 ref_dict_to1, max_multi_to1)
950
951
952 leg_vertex_list = self.merge_comb_legs(comb_lists, ref_dict_to1)
953
954
955 for leg_vertex_tuple in leg_vertex_list:
956
957
958 if self.get('process').get('forbidden_particles') and \
959 any([abs(vertex.get('legs')[-1].get('id')) in \
960 self.get('process').get('forbidden_particles') \
961 for vertex in leg_vertex_tuple[1]]):
962 continue
963
964
965 new_coupling_orders = self.reduce_orders(coupling_orders,
966 model,
967 [vertex.get('id') for vertex in \
968 leg_vertex_tuple[1]])
969 if new_coupling_orders == False:
970
971 continue
972
973
974
975 reduced_diagram = self.reduce_leglist(leg_vertex_tuple[0],
976 max_multi_to1,
977 ref_dict_to0,
978 is_decay_proc,
979 new_coupling_orders)
980
981 if reduced_diagram:
982 vertex_list_list = [list(leg_vertex_tuple[1])]
983 vertex_list_list.append(reduced_diagram)
984 expanded_list = expand_list_list(vertex_list_list)
985 res.extend(expanded_list)
986
987 return res
988
989 - def reduce_orders(self, coupling_orders, model, vertex_id_list):
990 """Return False if the coupling orders for any coupling is <
991 0, otherwise return the new coupling orders with the vertex
992 orders subtracted. If coupling_orders is not given, return
993 None (which counts as success).
994 WEIGHTED is a special order, which corresponds to the sum of
995 order hierarchies for the couplings.
996 We ignore negative constraints as these cannot be taken into
997 account on the fly but only after generation."""
998
999 if not coupling_orders:
1000 return None
1001
1002 present_couplings = copy.copy(coupling_orders)
1003 for id in vertex_id_list:
1004
1005 if not id:
1006 continue
1007 inter = model.get("interaction_dict")[id]
1008 for coupling in inter.get('orders').keys():
1009
1010
1011 if coupling in present_couplings and \
1012 present_couplings[coupling]>=0:
1013
1014 present_couplings[coupling] -= \
1015 inter.get('orders')[coupling]
1016 if present_couplings[coupling] < 0:
1017
1018 return False
1019
1020 if 'WEIGHTED' in present_couplings and \
1021 present_couplings['WEIGHTED']>=0:
1022 weight = sum([model.get('order_hierarchy')[c]*n for \
1023 (c,n) in inter.get('orders').items()])
1024 present_couplings['WEIGHTED'] -= weight
1025 if present_couplings['WEIGHTED'] < 0:
1026
1027 return False
1028
1029 return present_couplings
1030
1031 - def combine_legs(self, list_legs, ref_dict_to1, max_multi_to1):
1032 """Recursive function. Take a list of legs as an input, with
1033 the reference dictionary n-1->1, and output a list of list of
1034 tuples of Legs (allowed combinations) and Legs (rest). Algorithm:
1035
1036 1. Get all n-combinations from list [123456]: [12],..,[23],..,[123],..
1037
1038 2. For each combination, say [34]. Check if combination is valid.
1039 If so:
1040
1041 a. Append [12[34]56] to result array
1042
1043 b. Split [123456] at index(first element in combination+1),
1044 i.e. [12],[456] and subtract combination from second half,
1045 i.e.: [456]-[34]=[56]. Repeat from 1. with this array
1046
1047 3. Take result array from call to 1. (here, [[56]]) and append
1048 (first half in step b - combination) + combination + (result
1049 from 1.) = [12[34][56]] to result array
1050
1051 4. After appending results from all n-combinations, return
1052 resulting array. Example, if [13] and [45] are valid
1053 combinations:
1054 [[[13]2456],[[13]2[45]6],[123[45]6]]
1055 """
1056
1057 res = []
1058
1059
1060 for comb_length in range(2, max_multi_to1 + 1):
1061
1062
1063 if comb_length > len(list_legs):
1064 return res
1065
1066
1067
1068 for comb in itertools.combinations(list_legs, comb_length):
1069
1070
1071 if base_objects.LegList(comb).can_combine_to_1(ref_dict_to1):
1072
1073
1074
1075 res_list = copy.copy(list_legs)
1076 for leg in comb:
1077 res_list.remove(leg)
1078 res_list.insert(list_legs.index(comb[0]), comb)
1079 res.append(res_list)
1080
1081
1082
1083
1084
1085
1086 res_list1 = list_legs[0:list_legs.index(comb[0])]
1087 res_list2 = list_legs[list_legs.index(comb[0]) + 1:]
1088 for leg in comb[1:]:
1089 res_list2.remove(leg)
1090
1091
1092 res_list = res_list1
1093 res_list.append(comb)
1094
1095
1096 for item in self.combine_legs(res_list2,
1097 ref_dict_to1,
1098 max_multi_to1):
1099 final_res_list = copy.copy(res_list)
1100 final_res_list.extend(item)
1101 res.append(final_res_list)
1102
1103 return res
1104
1105
1107 """Takes a list of allowed leg combinations as an input and returns
1108 a set of lists where combinations have been properly replaced
1109 (one list per element in the ref_dict, so that all possible intermediate
1110 particles are included). For each list, give the list of vertices
1111 corresponding to the executed merging, group the two as a tuple.
1112 """
1113
1114 res = []
1115
1116 for comb_list in comb_lists:
1117
1118 reduced_list = []
1119 vertex_list = []
1120
1121 for entry in comb_list:
1122
1123
1124 if isinstance(entry, tuple):
1125
1126
1127
1128 leg_vert_ids = copy.copy(ref_dict_to1[\
1129 tuple(sorted([leg.get('id') for leg in entry]))])
1130
1131
1132 number = min([leg.get('number') for leg in entry])
1133
1134
1135 if len(filter(lambda leg: leg.get('state') == False,
1136 entry)) == 1:
1137 state = False
1138 else:
1139 state = True
1140
1141
1142
1143
1144
1145 new_leg_vert_ids = []
1146 if leg_vert_ids:
1147 new_leg_vert_ids = self.get_combined_legs(entry,
1148 leg_vert_ids,
1149 number,
1150 state)
1151
1152 reduced_list.append([l[0] for l in new_leg_vert_ids])
1153
1154
1155
1156
1157
1158 vlist = base_objects.VertexList()
1159 for (myleg, vert_id) in new_leg_vert_ids:
1160
1161 myleglist = base_objects.LegList(list(entry))
1162
1163 myleglist.append(myleg)
1164
1165 vlist.append(base_objects.Vertex(
1166 {'legs':myleglist,
1167 'id':vert_id}))
1168
1169 vertex_list.append(vlist)
1170
1171
1172
1173 else:
1174 cp_entry = copy.copy(entry)
1175
1176
1177
1178 if cp_entry.get('from_group') != None:
1179 cp_entry.set('from_group', False)
1180 reduced_list.append(cp_entry)
1181
1182
1183 flat_red_lists = expand_list(reduced_list)
1184 flat_vx_lists = expand_list(vertex_list)
1185
1186
1187 for i in range(0, len(flat_vx_lists)):
1188 res.append((base_objects.LegList(flat_red_lists[i]), \
1189 base_objects.VertexList(flat_vx_lists[i])))
1190
1191 return res
1192
1194 """Create a set of new legs from the info given. This can be
1195 overloaded by daughter classes."""
1196
1197 mylegs = [(base_objects.Leg({'id':leg_id,
1198 'number':number,
1199 'state':state,
1200 'from_group':True}),
1201 vert_id)\
1202 for leg_id, vert_id in leg_vert_ids]
1203
1204 return mylegs
1205
1207 """Allow for selection of vertex ids. This can be
1208 overloaded by daughter classes."""
1209
1210 return vert_ids
1211
1213 """Reduce the number of legs and vertices used in memory.
1214 When called by a diagram generation initiated by LoopAmplitude,
1215 this function should not trim the diagrams in the attribute 'diagrams'
1216 but rather a given list in the 'diaglist' argument."""
1217
1218 legs = []
1219 vertices = []
1220
1221 if diaglist is None:
1222 diaglist=self.get('diagrams')
1223
1224
1225 process = self.get('process')
1226 for leg in process.get('legs'):
1227 if leg.get('state') and leg.get('id') in decay_ids:
1228 leg.set('onshell', True)
1229
1230 for diagram in diaglist:
1231
1232 leg_external = set()
1233 for ivx, vertex in enumerate(diagram.get('vertices')):
1234 for ileg, leg in enumerate(vertex.get('legs')):
1235
1236 if leg.get('state') and leg.get('id') in decay_ids and \
1237 leg.get('number') not in leg_external:
1238
1239
1240 leg = copy.copy(leg)
1241 leg.set('onshell', True)
1242 try:
1243 index = legs.index(leg)
1244 except ValueError:
1245 vertex.get('legs')[ileg] = leg
1246 legs.append(leg)
1247 else:
1248 vertex.get('legs')[ileg] = legs[index]
1249 leg_external.add(leg.get('number'))
1250 try:
1251 index = vertices.index(vertex)
1252 diagram.get('vertices')[ivx] = vertices[index]
1253 except ValueError:
1254 vertices.append(vertex)
1255
1256
1257
1258
1259 -class AmplitudeList(base_objects.PhysicsObjectList):
1260 """List of Amplitude objects
1261 """
1262
1264 """ Check the content of all processes of the amplitudes in this list to
1265 see if there is any which defines perturbation couplings. """
1266
1267 for amp in self:
1268 if amp.has_loop_process():
1269 return True
1270
1272 """Test if object obj is a valid Amplitude for the list."""
1273
1274 return isinstance(obj, Amplitude)
1275
1280 """A list of amplitudes + a list of decay chain amplitude lists;
1281 corresponding to a ProcessDefinition with a list of decay chains
1282 """
1283
1289
1290 - def __init__(self, argument = None, collect_mirror_procs = False,
1291 ignore_six_quark_processes = False):
1292 """Allow initialization with Process and with ProcessDefinition"""
1293
1294 if isinstance(argument, base_objects.Process):
1295 super(DecayChainAmplitude, self).__init__()
1296 from madgraph.loop.loop_diagram_generation import LoopMultiProcess
1297 if argument['perturbation_couplings']:
1298 MultiProcessClass=LoopMultiProcess
1299 else:
1300 MultiProcessClass=MultiProcess
1301 if isinstance(argument, base_objects.ProcessDefinition):
1302 self['amplitudes'].extend(\
1303 MultiProcessClass.generate_multi_amplitudes(argument,
1304 collect_mirror_procs,
1305 ignore_six_quark_processes))
1306 else:
1307 self['amplitudes'].append(\
1308 MultiProcessClass.get_amplitude_from_proc(argument))
1309
1310
1311 process = copy.copy(self.get('amplitudes')[0].get('process'))
1312 process.set('decay_chains', base_objects.ProcessList())
1313 self['amplitudes'][0].set('process', process)
1314
1315 for process in argument.get('decay_chains'):
1316 if process.get('perturbation_couplings'):
1317 raise MadGraph5Error,\
1318 "Decay processes can not be perturbed"
1319 process.set('overall_orders', argument.get('overall_orders'))
1320 if not process.get('is_decay_chain'):
1321 process.set('is_decay_chain',True)
1322 if not process.get_ninitial() == 1:
1323 raise InvalidCmd,\
1324 "Decay chain process must have exactly one" + \
1325 " incoming particle"
1326 self['decay_chains'].append(\
1327 DecayChainAmplitude(process, collect_mirror_procs,
1328 ignore_six_quark_processes))
1329
1330
1331 decay_ids = sum([[a.get('process').get('legs')[0].get('id') \
1332 for a in dec.get('amplitudes')] for dec in \
1333 self['decay_chains']], [])
1334 decay_ids = set(decay_ids)
1335 for amp in self['amplitudes']:
1336 amp.trim_diagrams(decay_ids)
1337
1338
1339 for amp in self['amplitudes']:
1340 for l in amp.get('process').get('legs'):
1341 if l.get('id') in decay_ids:
1342 decay_ids.remove(l.get('id'))
1343
1344 if decay_ids:
1345 model = amp.get('process').get('model')
1346 names = [model.get_particle(id).get('name') for id in decay_ids]
1347
1348 logger.warning(
1349 "$RED Decay without corresponding particle in core process found.\n" + \
1350 "Decay information for particle(s) %s is discarded.\n" % ','.join(names) + \
1351 "Please check your process definition carefully. \n" + \
1352 "This warning usually means that you forgot parentheses in presence of subdecay.\n" + \
1353 "Example of correct syntax: p p > t t~, ( t > w+ b, w+ > l+ vl)")
1354
1355
1356 for dc in reversed(self['decay_chains']):
1357 for a in reversed(dc.get('amplitudes')):
1358
1359 if a.get('process').get('legs')[0].get('id') in decay_ids:
1360 dc.get('amplitudes').remove(a)
1361 if not dc.get('amplitudes'):
1362
1363 self['decay_chains'].remove(dc)
1364
1365
1366
1367 bad_procs = []
1368 for dc in self['decay_chains']:
1369 for amp in dc.get('amplitudes'):
1370 legs = amp.get('process').get('legs')
1371 fs_parts = [abs(l.get('id')) for l in legs if
1372 l.get('state')]
1373 is_part = [l.get('id') for l in legs if not
1374 l.get('state')][0]
1375 if abs(is_part) in fs_parts:
1376 bad_procs.append(amp.get('process'))
1377
1378 if bad_procs:
1379 logger.warning(
1380 "$RED Decay(s) with particle decaying to itself:\n" + \
1381 '\n'.join([p.nice_string() for p in bad_procs]) + \
1382 "\nPlease check your process definition carefully. \n")
1383
1384
1385 elif argument != None:
1386
1387 super(DecayChainAmplitude, self).__init__(argument)
1388 else:
1389
1390 super(DecayChainAmplitude, self).__init__()
1391
1392 - def filter(self, name, value):
1393 """Filter for valid amplitude property values."""
1394
1395 if name == 'amplitudes':
1396 if not isinstance(value, AmplitudeList):
1397 raise self.PhysicsObjectError, \
1398 "%s is not a valid AmplitudeList" % str(value)
1399 if name == 'decay_chains':
1400 if not isinstance(value, DecayChainAmplitudeList):
1401 raise self.PhysicsObjectError, \
1402 "%s is not a valid DecayChainAmplitudeList object" % \
1403 str(value)
1404 return True
1405
1407 """Return diagram property names as a nicely sorted list."""
1408
1409 return ['amplitudes', 'decay_chains']
1410
1411
1412
1414 """Returns number of diagrams for this amplitude"""
1415 return sum(len(a.get('diagrams')) for a in self.get('amplitudes')) \
1416 + sum(d.get_number_of_diagrams() for d in \
1417 self.get('decay_chains'))
1418
1420 """Returns a nicely formatted string of the amplitude content."""
1421 mystr = ""
1422 for amplitude in self.get('amplitudes'):
1423 mystr = mystr + amplitude.nice_string(indent) + "\n"
1424
1425 if self.get('decay_chains'):
1426 mystr = mystr + " " * indent + "Decays:\n"
1427 for dec in self.get('decay_chains'):
1428 mystr = mystr + dec.nice_string(indent + 2) + "\n"
1429
1430 return mystr[:-1]
1431
1433 """Returns a nicely formatted string of the amplitude processes."""
1434 mystr = ""
1435 for amplitude in self.get('amplitudes'):
1436 mystr = mystr + amplitude.nice_string_processes(indent) + "\n"
1437
1438 if self.get('decay_chains'):
1439 mystr = mystr + " " * indent + "Decays:\n"
1440 for dec in self.get('decay_chains'):
1441 mystr = mystr + dec.nice_string_processes(indent + 2) + "\n"
1442
1443 return mystr[:-1]
1444
1446 """Returns the number of initial state particles in the process."""
1447 return self.get('amplitudes')[0].get('process').get_ninitial()
1448
1450 """Returns a set of all particle ids for which a decay is defined"""
1451
1452 decay_ids = []
1453
1454
1455 for amp in sum([dc.get('amplitudes') for dc \
1456 in self['decay_chains']], []):
1457
1458 decay_ids.append(amp.get('process').get_initial_ids()[0])
1459
1460
1461 return list(set(decay_ids))
1462
1464 """ Returns wether this amplitude has a loop process."""
1465 return self['amplitudes'].has_any_loop_process()
1466
1468 """Recursive function to extract all amplitudes for this process"""
1469
1470 amplitudes = AmplitudeList()
1471
1472 amplitudes.extend(self.get('amplitudes'))
1473 for decay in self.get('decay_chains'):
1474 amplitudes.extend(decay.get_amplitudes())
1475
1476 return amplitudes
1477
1483 """List of DecayChainAmplitude objects
1484 """
1485
1487 """Test if object obj is a valid DecayChainAmplitude for the list."""
1488
1489 return isinstance(obj, DecayChainAmplitude)
1490
1491
1492
1493
1494
1495 -class MultiProcess(base_objects.PhysicsObject):
1496 """MultiProcess: list of process definitions
1497 list of processes (after cleaning)
1498 list of amplitudes (after generation)
1499 """
1500
1502 """Default values for all properties"""
1503
1504 self['process_definitions'] = base_objects.ProcessDefinitionList()
1505
1506
1507
1508 self['amplitudes'] = AmplitudeList()
1509
1510 self['collect_mirror_procs'] = False
1511
1512
1513 self['ignore_six_quark_processes'] = []
1514
1515
1516 self['use_numerical'] = False
1517
1518 - def __init__(self, argument=None, collect_mirror_procs = False,
1519 ignore_six_quark_processes = [], optimize=False):
1545
1546
1547 - def filter(self, name, value):
1548 """Filter for valid process property values."""
1549
1550 if name == 'process_definitions':
1551 if not isinstance(value, base_objects.ProcessDefinitionList):
1552 raise self.PhysicsObjectError, \
1553 "%s is not a valid ProcessDefinitionList object" % str(value)
1554
1555 if name == 'amplitudes':
1556 if not isinstance(value, AmplitudeList):
1557 raise self.PhysicsObjectError, \
1558 "%s is not a valid AmplitudeList object" % str(value)
1559
1560 if name in ['collect_mirror_procs']:
1561 if not isinstance(value, bool):
1562 raise self.PhysicsObjectError, \
1563 "%s is not a valid boolean" % str(value)
1564
1565 if name == 'ignore_six_quark_processes':
1566 if not isinstance(value, list):
1567 raise self.PhysicsObjectError, \
1568 "%s is not a valid list" % str(value)
1569
1570 return True
1571
1572 - def get(self, name):
1573 """Get the value of the property name."""
1574
1575 if (name == 'amplitudes') and not self[name]:
1576 for process_def in self.get('process_definitions'):
1577 if process_def.get('decay_chains'):
1578
1579
1580 self['amplitudes'].append(\
1581 DecayChainAmplitude(process_def,
1582 self.get('collect_mirror_procs'),
1583 self.get('ignore_six_quark_processes')))
1584 else:
1585 self['amplitudes'].extend(\
1586 self.generate_multi_amplitudes(process_def,
1587 self.get('collect_mirror_procs'),
1588 self.get('ignore_six_quark_processes'),
1589 self['use_numerical']))
1590
1591 return MultiProcess.__bases__[0].get(self, name)
1592
1594 """Return process property names as a nicely sorted list."""
1595
1596 return ['process_definitions', 'amplitudes']
1597
1598 @classmethod
1599 - def generate_multi_amplitudes(cls,process_definition,
1600 collect_mirror_procs = False,
1601 ignore_six_quark_processes = [],
1602 use_numerical=False):
1603 """Generate amplitudes in a semi-efficient way.
1604 Make use of crossing symmetry for processes that fail diagram
1605 generation, but not for processes that succeed diagram
1606 generation. Doing so will risk making it impossible to
1607 identify processes with identical amplitudes.
1608 """
1609 assert isinstance(process_definition, base_objects.ProcessDefinition), \
1610 "%s not valid ProcessDefinition object" % \
1611 repr(process_definition)
1612
1613
1614 process_definition.set('orders', MultiProcess.\
1615 find_optimal_process_orders(process_definition))
1616
1617 process_definition.check_expansion_orders()
1618
1619 processes = base_objects.ProcessList()
1620 amplitudes = AmplitudeList()
1621
1622
1623
1624 failed_procs = []
1625 success_procs = []
1626
1627 non_permuted_procs = []
1628
1629 permutations = []
1630
1631
1632
1633 model = process_definition['model']
1634
1635 isids = [leg['ids'] for leg in process_definition['legs'] \
1636 if leg['state'] == False]
1637 fsids = [leg['ids'] for leg in process_definition['legs'] \
1638 if leg['state'] == True]
1639
1640
1641 for prod in itertools.product(*isids):
1642 islegs = [\
1643 base_objects.Leg({'id':id, 'state': False}) \
1644 for id in prod]
1645
1646
1647
1648
1649 red_fsidlist = []
1650
1651 for prod in itertools.product(*fsids):
1652
1653
1654 if tuple(sorted(prod)) in red_fsidlist:
1655 continue
1656
1657 red_fsidlist.append(tuple(sorted(prod)));
1658
1659
1660 leg_list = [copy.copy(leg) for leg in islegs]
1661
1662 leg_list.extend([\
1663 base_objects.Leg({'id':id, 'state': True}) \
1664 for id in prod])
1665
1666 legs = base_objects.LegList(leg_list)
1667
1668
1669 sorted_legs = sorted([(l,i+1) for (i,l) in \
1670 enumerate(legs.get_outgoing_id_list(model))])
1671 permutation = [l[1] for l in sorted_legs]
1672 sorted_legs = array.array('i', [l[0] for l in sorted_legs])
1673
1674
1675 if ignore_six_quark_processes and \
1676 len([i for i in sorted_legs if abs(i) in \
1677 ignore_six_quark_processes]) >= 6:
1678 continue
1679
1680
1681
1682 if sorted_legs in failed_procs:
1683 continue
1684
1685
1686 if use_numerical:
1687
1688 initial_mass = abs(model['parameter_dict'][model.get_particle(legs[0].get('id')).get('mass')])
1689 if initial_mass == 0:
1690 continue
1691 for leg in legs[1:]:
1692 m = model['parameter_dict'][model.get_particle(leg.get('id')).get('mass')]
1693 initial_mass -= abs(m)
1694 if initial_mass.real <= 0:
1695 continue
1696
1697
1698 process = process_definition.get_process_with_legs(legs)
1699
1700 fast_proc = \
1701 array.array('i',[leg.get('id') for leg in legs])
1702 if collect_mirror_procs and \
1703 process_definition.get_ninitial() == 2:
1704
1705 mirror_proc = \
1706 array.array('i', [fast_proc[1], fast_proc[0]] + \
1707 list(fast_proc[2:]))
1708 try:
1709 mirror_amp = \
1710 amplitudes[non_permuted_procs.index(mirror_proc)]
1711 except Exception:
1712
1713 pass
1714 else:
1715
1716 mirror_amp.set('has_mirror_process', True)
1717 logger.info("Process %s added to mirror process %s" % \
1718 (process.base_string(),
1719 mirror_amp.get('process').base_string()))
1720 continue
1721
1722
1723
1724 if not process.get('required_s_channels') and \
1725 not process.get('forbidden_onsh_s_channels') and \
1726 not process.get('forbidden_s_channels') and \
1727 not process.get('is_decay_chain'):
1728 try:
1729 crossed_index = success_procs.index(sorted_legs)
1730
1731
1732
1733
1734 if 'loop_diagrams' in amplitudes[crossed_index]:
1735 raise ValueError
1736 except ValueError:
1737
1738 pass
1739 else:
1740
1741 amplitude = MultiProcess.cross_amplitude(\
1742 amplitudes[crossed_index],
1743 process,
1744 permutations[crossed_index],
1745 permutation)
1746 amplitudes.append(amplitude)
1747 success_procs.append(sorted_legs)
1748 permutations.append(permutation)
1749 non_permuted_procs.append(fast_proc)
1750 logger.info("Crossed process found for %s, reuse diagrams." % \
1751 process.base_string())
1752 continue
1753
1754
1755 amplitude = cls.get_amplitude_from_proc(process)
1756
1757 try:
1758 result = amplitude.generate_diagrams()
1759 except InvalidCmd as error:
1760 failed_procs.append(sorted_legs)
1761 else:
1762
1763 if amplitude.get('diagrams'):
1764 amplitudes.append(amplitude)
1765 success_procs.append(sorted_legs)
1766 permutations.append(permutation)
1767 non_permuted_procs.append(fast_proc)
1768 elif not result:
1769
1770 failed_procs.append(sorted_legs)
1771
1772
1773 if not amplitudes:
1774 if len(failed_procs) == 1 and 'error' in locals():
1775 raise error
1776 else:
1777 raise NoDiagramException, \
1778 "No amplitudes generated from process %s. Please enter a valid process" % \
1779 process_definition.nice_string()
1780
1781
1782
1783 return amplitudes
1784
1785 @classmethod
1787 """ Return the correct amplitude type according to the characteristics of
1788 the process proc """
1789 return Amplitude({"process": proc})
1790
1791
1792 @staticmethod
1794 """Find the minimal WEIGHTED order for this set of processes.
1795
1796 The algorithm:
1797
1798 1) Check the coupling hierarchy of the model. Assign all
1799 particles to the different coupling hierarchies so that a
1800 particle is considered to be in the highest hierarchy (i.e.,
1801 with lowest value) where it has an interaction.
1802
1803 2) Pick out the legs in the multiprocess according to the
1804 highest hierarchy represented (so don't mix particles from
1805 different hierarchy classes in the same multiparticles!)
1806
1807 3) Find the starting maximum WEIGHTED order as the sum of the
1808 highest n-2 weighted orders
1809
1810 4) Pick out required s-channel particle hierarchies, and use
1811 the highest of the maximum WEIGHTED order from the legs and
1812 the minimum WEIGHTED order extracted from 2*s-channel
1813 hierarchys plus the n-2-2*(number of s-channels) lowest
1814 leg weighted orders.
1815
1816 5) Run process generation with the WEIGHTED order determined
1817 in 3)-4) - # final state gluons, with all gluons removed from
1818 the final state
1819
1820 6) If no process is found, increase WEIGHTED order by 1 and go
1821 back to 5), until we find a process which passes. Return that
1822 order.
1823
1824 7) Continue 5)-6) until we reach (n-2)*(highest hierarchy)-1.
1825 If still no process has passed, return
1826 WEIGHTED = (n-2)*(highest hierarchy)
1827 """
1828
1829 assert isinstance(process_definition, base_objects.ProcessDefinition), \
1830 "%s not valid ProcessDefinition object" % \
1831 repr(process_definition)
1832
1833 processes = base_objects.ProcessList()
1834 amplitudes = AmplitudeList()
1835
1836
1837 if process_definition.get('orders') or \
1838 process_definition.get('overall_orders') or \
1839 process_definition.get('NLO_mode')=='virt':
1840 return process_definition.get('orders')
1841
1842
1843 if process_definition.get_ninitial() == 1 and not \
1844 process_definition.get('is_decay_chain'):
1845 return process_definition.get('orders')
1846
1847 logger.info("Checking for minimal orders which gives processes.")
1848 logger.info("Please specify coupling orders to bypass this step.")
1849
1850
1851 max_order_now, particles, hierarchy = \
1852 process_definition.get_minimum_WEIGHTED()
1853 coupling = 'WEIGHTED'
1854
1855 model = process_definition.get('model')
1856
1857
1858 isids = [leg['ids'] for leg in \
1859 filter(lambda leg: leg['state'] == False, process_definition['legs'])]
1860 fsids = [leg['ids'] for leg in \
1861 filter(lambda leg: leg['state'] == True, process_definition['legs'])]
1862
1863 max_WEIGHTED_order = \
1864 (len(fsids + isids) - 2)*int(model.get_max_WEIGHTED())
1865
1866
1867
1868 while max_order_now < max_WEIGHTED_order:
1869
1870 logger.info("Trying coupling order WEIGHTED=%d" % max_order_now)
1871
1872 oldloglevel = logger.level
1873 logger.setLevel(logging.WARNING)
1874
1875
1876
1877 failed_procs = []
1878
1879
1880 for prod in apply(itertools.product, isids):
1881 islegs = [ base_objects.Leg({'id':id, 'state': False}) \
1882 for id in prod]
1883
1884
1885
1886
1887 red_fsidlist = []
1888
1889 for prod in apply(itertools.product, fsids):
1890
1891
1892 if tuple(sorted(prod)) in red_fsidlist:
1893 continue
1894
1895 red_fsidlist.append(tuple(sorted(prod)));
1896
1897
1898
1899 nglue = 0
1900 if 21 in particles[0]:
1901 nglue = len([id for id in prod if id == 21])
1902 prod = [id for id in prod if id != 21]
1903
1904
1905 leg_list = [copy.copy(leg) for leg in islegs]
1906
1907 leg_list.extend([\
1908 base_objects.Leg({'id':id, 'state': True}) \
1909 for id in prod])
1910
1911 legs = base_objects.LegList(leg_list)
1912
1913
1914
1915 coupling_orders_now = {coupling: max_order_now - \
1916 nglue * model['order_hierarchy']['QCD']}
1917
1918
1919 process = base_objects.Process({\
1920 'legs':legs,
1921 'model':model,
1922 'id': process_definition.get('id'),
1923 'orders': coupling_orders_now,
1924 'required_s_channels': \
1925 process_definition.get('required_s_channels'),
1926 'forbidden_onsh_s_channels': \
1927 process_definition.get('forbidden_onsh_s_channels'),
1928 'sqorders_types': \
1929 process_definition.get('sqorders_types'),
1930 'squared_orders': \
1931 process_definition.get('squared_orders'),
1932 'split_orders': \
1933 process_definition.get('split_orders'),
1934 'forbidden_s_channels': \
1935 process_definition.get('forbidden_s_channels'),
1936 'forbidden_particles': \
1937 process_definition.get('forbidden_particles'),
1938 'is_decay_chain': \
1939 process_definition.get('is_decay_chain'),
1940 'overall_orders': \
1941 process_definition.get('overall_orders'),
1942 'split_orders': \
1943 process_definition.get('split_orders')})
1944
1945
1946 process.check_expansion_orders()
1947
1948
1949 sorted_legs = sorted(legs.get_outgoing_id_list(model))
1950
1951
1952 if tuple(sorted_legs) in failed_procs:
1953 continue
1954
1955 amplitude = Amplitude({'process': process})
1956 try:
1957 amplitude.generate_diagrams()
1958 except InvalidCmd:
1959 failed_procs.append(tuple(sorted_legs))
1960 else:
1961 if amplitude.get('diagrams'):
1962
1963 logger.setLevel(oldloglevel)
1964 return {coupling: max_order_now}
1965 else:
1966 failed_procs.append(tuple(sorted_legs))
1967
1968
1969 max_order_now += 1
1970 logger.setLevel(oldloglevel)
1971
1972
1973 return {coupling: max_order_now}
1974
1975 @staticmethod
1977 """Return the amplitude crossed with the permutation new_perm"""
1978
1979 perm_map = dict(zip(org_perm, new_perm))
1980
1981 new_amp = copy.copy(amplitude)
1982
1983 for i, leg in enumerate(process.get('legs')):
1984 leg.set('number', i+1)
1985
1986 new_amp.set('process', process)
1987
1988 diagrams = base_objects.DiagramList([d.renumber_legs(perm_map,
1989 process.get('legs'),) for \
1990 d in new_amp.get('diagrams')])
1991 new_amp.set('diagrams', diagrams)
1992 new_amp.trim_diagrams()
1993
1994
1995 new_amp.set('has_mirror_process', False)
1996
1997 return new_amp
1998
2004 """Takes a list of lists and elements and returns a list of flat lists.
2005 Example: [[1,2], 3, [4,5]] -> [[1,3,4], [1,3,5], [2,3,4], [2,3,5]]
2006 """
2007
2008
2009 assert isinstance(mylist, list), "Expand_list argument must be a list"
2010
2011 res = []
2012
2013 tmplist = []
2014 for item in mylist:
2015 if isinstance(item, list):
2016 tmplist.append(item)
2017 else:
2018 tmplist.append([item])
2019
2020 for item in apply(itertools.product, tmplist):
2021 res.append(list(item))
2022
2023 return res
2024
2026 """Recursive function. Takes a list of lists and lists of lists
2027 and returns a list of flat lists.
2028 Example: [[1,2],[[4,5],[6,7]]] -> [[1,2,4,5], [1,2,6,7]]
2029 """
2030
2031 res = []
2032
2033 if not mylist or len(mylist) == 1 and not mylist[0]:
2034 return [[]]
2035
2036
2037 assert isinstance(mylist[0], list), \
2038 "Expand_list_list needs a list of lists and lists of lists"
2039
2040
2041 if len(mylist) == 1:
2042 if isinstance(mylist[0][0], list):
2043 return mylist[0]
2044 else:
2045 return mylist
2046
2047 if isinstance(mylist[0][0], list):
2048 for item in mylist[0]:
2049
2050
2051
2052 for rest in expand_list_list(mylist[1:]):
2053 reslist = copy.copy(item)
2054 reslist.extend(rest)
2055 res.append(reslist)
2056 else:
2057 for rest in expand_list_list(mylist[1:]):
2058 reslist = copy.copy(mylist[0])
2059 reslist.extend(rest)
2060 res.append(reslist)
2061
2062
2063 return res
2064