1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """Classes for diagram generation. Amplitude performs the diagram
16 generation, DecayChainAmplitude keeps track of processes with decay
17 chains, and MultiProcess allows generation of processes with
18 multiparticle definitions. DiagramTag allows to identify diagrams
19 based on relevant properties.
20 """
21
22 import array
23 import copy
24 import itertools
25 import logging
26
27 import madgraph.core.base_objects as base_objects
28 import madgraph.various.misc as misc
29 from madgraph import InvalidCmd
30 logger = logging.getLogger('madgraph.diagram_generation')
34
40 """Class to tag diagrams based on objects with some __lt__ measure, e.g.
41 PDG code/interaction id (for comparing diagrams from the same amplitude),
42 or Lorentz/coupling/mass/width (for comparing AMPs from different MEs).
43 Algorithm: Create chains starting from external particles:
44 1 \ / 6
45 2 /\______/\ 7
46 3_ / | \_ 8
47 4 / 5 \_ 9
48 \ 10
49 gives ((((9,10,id910),8,id9108),(6,7,id67),id910867)
50 (((1,2,id12),(3,4,id34)),id1234),
51 5,id91086712345)
52 where idN is the id of the corresponding interaction. The ordering within
53 chains is based on chain length (depth; here, 1234 has depth 3, 910867 has
54 depth 4, 5 has depht 0), and if equal on the ordering of the chain elements.
55 The determination of central vertex is based on minimizing the chain length
56 for the longest subchain.
57 This gives a unique tag which can be used to identify diagrams
58 (instead of symmetry), as well as identify identical matrix elements from
59 different processes."""
60
62 """Exception for any problems in DiagramTags"""
63 pass
64
65 - def __init__(self, diagram, model=None, ninitial=2):
66 """Initialize with a diagram. Create DiagramTagChainLinks according to
67 the diagram, and figure out if we need to shift the central vertex."""
68
69
70 leg_dict = {}
71
72 for vertex in diagram.get('vertices'):
73
74 legs = vertex.get('legs')[:-1]
75 lastvx = vertex == diagram.get('vertices')[-1]
76 if lastvx:
77
78 legs = vertex.get('legs')
79
80 link = DiagramTagChainLink([leg_dict.setdefault(leg.get('number'),
81 DiagramTagChainLink(self.link_from_leg(leg, model))) \
82 for leg in legs],
83 self.vertex_id_from_vertex(vertex,
84 lastvx,
85 model,
86 ninitial))
87
88 if not lastvx:
89 leg_dict[vertex.get('legs')[-1].get('number')] = link
90
91
92 self.tag = link
93
94
95
96 done = max([l.depth for l in self.tag.links]) == 0
97 while not done:
98
99 longest_chain = self.tag.links[0]
100
101 new_link = DiagramTagChainLink(self.tag.links[1:],
102 self.flip_vertex(\
103 self.tag.vertex_id,
104 longest_chain.vertex_id,
105 self.tag.links[1:]))
106
107 other_links = list(longest_chain.links) + [new_link]
108 other_link = DiagramTagChainLink(other_links,
109 self.flip_vertex(\
110 longest_chain.vertex_id,
111 self.tag.vertex_id,
112 other_links))
113
114 if other_link.links[0] < self.tag.links[0]:
115
116 self.tag = other_link
117 else:
118
119 done = True
120
125
127 """Output a diagram from a DiagramTag. Note that each daughter
128 class must implement the static functions id_from_vertex_id
129 (if the vertex id is something else than an integer) and
130 leg_from_link (to pass the correct info from an end link to a
131 leg)."""
132
133
134 diagram = base_objects.Diagram({'vertices': \
135 self.vertices_from_link(self.tag,
136 model,
137 True)})
138 diagram.calculate_orders(model)
139 return diagram
140
141 @classmethod
143 """Recursively return the leg corresponding to this link and
144 the list of all vertices from all previous links"""
145
146 if link.end_link:
147
148 return cls.leg_from_link(link), []
149
150
151 leg_vertices = [cls.vertices_from_link(l, model) for l in link.links]
152
153 legs = base_objects.LegList(sorted([l for l,v in leg_vertices],
154 lambda l1,l2: l2.get('number') - \
155 l1.get('number')))
156
157 vertices = base_objects.VertexList(sum([v for l, v in leg_vertices],
158 []))
159
160 if not first_vertex:
161
162
163 last_leg = cls.leg_from_legs(legs,link.vertex_id,model)
164 legs.append(last_leg)
165
166
167 vertices.append(cls.vertex_from_link(legs,
168 link.vertex_id,
169 model))
170 if first_vertex:
171
172 return vertices
173 else:
174
175 return last_leg, vertices
176
177 @classmethod
179 """Returns the list of external PDGs of the interaction corresponding
180 to this vertex_id."""
181
182
183
184
185 if (len(vertex_id)>=3 and 'PDGs' in vertex_id[2]):
186 return vertex_id[2]['PDGs']
187 else:
188 return [part.get_pdg_code() for part in model.get_interaction(
189 cls.id_from_vertex_id(vertex_id)).get('particles')]
190
191 @classmethod
193 """Return a leg from a leg list and the model info"""
194
195 pdgs = list(cls.legPDGs_from_vertex_id(vertex_id, model))
196
197
198 for pdg in [leg.get('id') for leg in legs]:
199 pdgs.remove(pdg)
200
201 assert len(pdgs) == 1
202
203 pdg = model.get_particle(pdgs[0]).get_anti_pdg_code()
204 number = min([l.get('number') for l in legs])
205
206 state = (len([l for l in legs if l.get('state') == False]) != 1)
207
208 onshell= False
209
210 return base_objects.Leg({'id': pdg,
211 'number': number,
212 'state': state,
213 'onshell': onshell})
214
215 @classmethod
228
229 @staticmethod
231 """Return a leg from a link"""
232
233 if link.end_link:
234
235 return base_objects.Leg({'number':link.links[0][1],
236 'id':link.links[0][0][0],
237 'state':(link.links[0][0][1] == 0),
238 'onshell':False})
239
240
241 assert False
242
243 @staticmethod
245 """Return the numerical vertex id from a link.vertex_id"""
246
247 return vertex_id[0][0]
248
249 @staticmethod
251 """Return the loop_info stored in this vertex id. Notice that the
252 IdentifyME tag does not store the loop_info, but should normally never
253 need access to it."""
254
255 return vertex_id[2]
256
257 @staticmethod
259 """Reorder a permutation with respect to start_perm. Note that
260 both need to start from 1."""
261 if perm == start_perm:
262 return range(len(perm))
263 order = [i for (p,i) in \
264 sorted([(p,i) for (i,p) in enumerate(perm)])]
265 return [start_perm[i]-1 for i in order]
266
267 @staticmethod
269 """Returns the default end link for a leg: ((id, state), number).
270 Note that the number is not taken into account if tag comparison,
271 but is used only to extract leg permutations."""
272 if leg.get('state'):
273
274 return [((leg.get('id'), 0), leg.get('number'))]
275 else:
276
277 return [((leg.get('id'), leg.get('number')), leg.get('number'))]
278
279 @staticmethod
281 """Returns the default vertex id: just the interaction id
282 Note that in the vertex id, like the leg, only the first entry is
283 taken into account in the tag comparison, while the second is for
284 storing information that is not to be used in comparisons and the
285 third for additional info regarding the shrunk loop vertex."""
286
287 if isinstance(vertex,base_objects.ContractedVertex):
288
289 return ((vertex.get('id'),vertex.get('loop_tag')),(),
290 {'PDGs':vertex.get('PDGs')})
291 else:
292 return ((vertex.get('id'),()),(),{})
293
294 @staticmethod
296 """Returns the default vertex flip: just the new_vertex"""
297 return new_vertex
298
300 """Equal if same tag"""
301 if type(self) != type(other):
302 return False
303 return self.tag == other.tag
304
306 return not self.__eq__(other)
307
310
312 return self.tag < other.tag
313
315 return self.tag > other.tag
316
317 __repr__ = __str__
318
320 """Chain link for a DiagramTag. A link is a tuple + vertex id + depth,
321 with a comparison operator defined"""
322
323 - def __init__(self, objects, vertex_id = None):
324 """Initialize, either with a tuple of DiagramTagChainLinks and
325 a vertex_id (defined by DiagramTag.vertex_id_from_vertex), or
326 with an external leg object (end link) defined by
327 DiagramTag.link_from_leg"""
328
329 if vertex_id == None:
330
331 self.links = tuple(objects)
332 self.vertex_id = (0,)
333 self.depth = 0
334 self.end_link = True
335 return
336
337 self.links = tuple(sorted(list(tuple(objects)), reverse=True))
338 self.vertex_id = vertex_id
339
340
341 self.depth = sum([l.depth for l in self.links],
342 max(1, len(self.links)-1))
343 self.end_link = False
344
346 """Get the permutation of external numbers (assumed to be the
347 second entry in the end link tuples)"""
348
349 if self.end_link:
350 return [self.links[0][1]]
351
352 return sum([l.get_external_numbers() for l in self.links], [])
353
355 """Compare self with other in the order:
356 1. depth 2. len(links) 3. vertex id 4. measure of links"""
357
358 if self == other:
359 return False
360
361 if self.depth != other.depth:
362 return self.depth < other.depth
363
364 if len(self.links) != len(other.links):
365 return len(self.links) < len(other.links)
366
367 if self.vertex_id[0] != other.vertex_id[0]:
368 return self.vertex_id[0] < other.vertex_id[0]
369
370 for i, link in enumerate(self.links):
371 if i > len(other.links) - 1:
372 return False
373 if link != other.links[i]:
374 return link < other.links[i]
375
377 return self != other and not self.__lt__(other)
378
380 """For end link,
381 consider equal if self.links[0][0] == other.links[0][0],
382 i.e., ignore the leg number (in links[0][1])."""
383
384 if self.end_link and other.end_link and self.depth == other.depth \
385 and self.vertex_id == other.vertex_id:
386 return self.links[0][0] == other.links[0][0]
387
388 return self.end_link == other.end_link and self.depth == other.depth \
389 and self.vertex_id[0] == other.vertex_id[0] \
390 and self.links == other.links
391
393 return not self.__eq__(other)
394
395
397 if self.end_link:
398 return str(self.links)
399 return "%s, %s; %d" % (str(self.links),
400 str(self.vertex_id),
401 self.depth)
402
403 __repr__ = __str__
404
405
406
407
408 -class Amplitude(base_objects.PhysicsObject):
409 """Amplitude: process + list of diagrams (ordered)
410 Initialize with a process, then call generate_diagrams() to
411 generate the diagrams for the amplitude
412 """
413
415 """Default values for all properties"""
416
417 self['process'] = base_objects.Process()
418 self['diagrams'] = None
419
420
421 self['has_mirror_process'] = False
422
435
436 - def filter(self, name, value):
452
453 - def get(self, name):
462
463
464
466 """Return diagram property names as a nicely sorted list."""
467
468 return ['process', 'diagrams', 'has_mirror_process']
469
471 """Returns number of diagrams for this amplitude"""
472 return len(self.get('diagrams'))
473
475 """Return an AmplitudeList with just this amplitude.
476 Needed for DecayChainAmplitude."""
477
478 return AmplitudeList([self])
479
481 """Returns a nicely formatted string of the amplitude content."""
482 return self.get('process').nice_string(indent) + "\n" + \
483 self.get('diagrams').nice_string(indent)
484
486 """Returns a nicely formatted string of the amplitude process."""
487 return self.get('process').nice_string(indent)
488
490 """Returns the number of initial state particles in the process."""
491 return self.get('process').get_ninitial()
492
494 """ Returns wether this amplitude has a loop process."""
495
496 return self.get('process').get('perturbation_couplings')
497
499 """Generate diagrams. Algorithm:
500
501 1. Define interaction dictionaries:
502 * 2->0 (identity), 3->0, 4->0, ... , maxlegs->0
503 * 2 -> 1, 3 -> 1, ..., maxlegs-1 -> 1
504
505 2. Set flag from_group=true for all external particles.
506 Flip particle/anti particle for incoming particles.
507
508 3. If there is a dictionary n->0 with n=number of external
509 particles, create if possible the combination [(1,2,3,4,...)]
510 with *at least two* from_group==true. This will give a
511 finished (set of) diagram(s) (done by reduce_leglist)
512
513 4. Create all allowed groupings of particles with at least one
514 from_group==true (according to dictionaries n->1):
515 [(1,2),3,4...],[1,(2,3),4,...],...,
516 [(1,2),(3,4),...],...,[(1,2,3),4,...],...
517 (done by combine_legs)
518
519 5. Replace each group with a (list of) new particle(s) with number
520 n = min(group numbers). Set from_group true for these
521 particles and false for all other particles. Store vertex info.
522 (done by merge_comb_legs)
523
524 6. Stop algorithm when at most 2 particles remain.
525 Return all diagrams (lists of vertices).
526
527 7. Repeat from 3 (recursion done by reduce_leglist)
528
529 8. Replace final p=p vertex
530
531 Be aware that the resulting vertices have all particles outgoing,
532 so need to flip for incoming particles when used.
533
534 SPECIAL CASE: For A>BC... processes which are legs in decay
535 chains, we need to ensure that BC... combine first, giving A=A
536 as a final vertex. This case is defined by the Process
537 property is_decay_chain = True.
538 This function can also be called by the generate_diagram function
539 of LoopAmplitudes, in which case the generated diagrams here must not
540 be directly assigned to the 'diagrams' attributed but returned as a
541 DiagramList by the function. This is controlled by the argument
542 returndiag.
543 """
544
545 process = self.get('process')
546 model = process.get('model')
547 legs = process.get('legs')
548
549 for key in process.get('overall_orders').keys():
550 try:
551 process.get('orders')[key] = \
552 min(process.get('orders')[key],
553 process.get('overall_orders')[key])
554 except KeyError:
555 process.get('orders')[key] = process.get('overall_orders')[key]
556
557 assert model.get('particles'), \
558 "particles are missing in model: %s" % model.get('particles')
559
560 assert model.get('interactions'), \
561 "interactions are missing in model"
562
563
564 res = base_objects.DiagramList()
565
566 if len(filter(lambda leg: model.get('particle_dict')[\
567 leg.get('id')].is_fermion(), legs)) % 2 == 1:
568 if not returndiag:
569 self['diagrams'] = res
570 raise InvalidCmd, 'The number of fermion is odd'
571 else:
572 return False, res
573
574
575
576 if not model.get('got_majoranas') and \
577 len(filter(lambda leg: leg.is_incoming_fermion(model), legs)) != \
578 len(filter(lambda leg: leg.is_outgoing_fermion(model), legs)):
579 if not returndiag:
580 self['diagrams'] = res
581 raise InvalidCmd, 'The number of of incoming/outcoming fermions are different'
582 else:
583 return False, res
584
585
586
587 for charge in model.get('conserved_charge'):
588 total = 0
589 for leg in legs:
590 part = model.get('particle_dict')[leg.get('id')]
591 try:
592 value = part.get(charge)
593 except (AttributeError, base_objects.PhysicsObject.PhysicsObjectError):
594 try:
595 value = getattr(part, charge)
596 except AttributeError:
597 value = 0
598
599 if (leg.get('id') != part['pdg_code']) != leg['state']:
600 total -= value
601 else:
602 total += value
603
604 if abs(total) > 1e-10:
605 if not returndiag:
606 self['diagrams'] = res
607 raise InvalidCmd, 'No %s conservation for this process ' % charge
608 return res
609 else:
610 raise InvalidCmd, 'No %s conservation for this process ' % charge
611 return res, res
612
613 if not returndiag:
614 logger.info("Trying %s " % process.nice_string().replace('Process', 'process'))
615
616
617 for i in range(0, len(process.get('legs'))):
618
619 leg = copy.copy(process.get('legs')[i])
620 process.get('legs')[i] = leg
621 if leg.get('number') == 0:
622 leg.set('number', i + 1)
623
624
625
626 leglist = self.copy_leglist(process.get('legs'))
627
628 for leg in leglist:
629
630
631
632 leg.set('from_group', True)
633
634
635
636 if leg.get('state') == False:
637 part = model.get('particle_dict')[leg.get('id')]
638 leg.set('id', part.get_anti_pdg_code())
639
640
641
642 max_multi_to1 = max([len(key) for key in \
643 model.get('ref_dict_to1').keys()])
644
645
646
647
648
649
650
651
652 is_decay_proc = process.get_ninitial() == 1
653 if is_decay_proc:
654 part = model.get('particle_dict')[leglist[0].get('id')]
655
656
657
658 ref_dict_to0 = {(part.get_pdg_code(),part.get_anti_pdg_code()):[0],
659 (part.get_anti_pdg_code(),part.get_pdg_code()):[0]}
660
661
662 leglist[0].set('from_group', None)
663 reduced_leglist = self.reduce_leglist(leglist,
664 max_multi_to1,
665 ref_dict_to0,
666 is_decay_proc,
667 process.get('orders'))
668 else:
669 reduced_leglist = self.reduce_leglist(leglist,
670 max_multi_to1,
671 model.get('ref_dict_to0'),
672 is_decay_proc,
673 process.get('orders'))
674
675
676
677
678 self.convert_dgleg_to_leg(reduced_leglist)
679
680 if reduced_leglist:
681 for vertex_list in reduced_leglist:
682 res.append(self.create_diagram(base_objects.VertexList(vertex_list)))
683
684
685
686 failed_crossing = not res
687
688
689
690
691
692
693 if process.get('required_s_channels') and \
694 process.get('required_s_channels')[0]:
695
696
697 lastvx = -1
698
699
700
701 if is_decay_proc: lastvx = -2
702 ninitial = len(filter(lambda leg: leg.get('state') == False,
703 process.get('legs')))
704
705 old_res = res
706 res = base_objects.DiagramList()
707 for id_list in process.get('required_s_channels'):
708 res_diags = filter(lambda diagram: \
709 all([req_s_channel in \
710 [vertex.get_s_channel_id(\
711 process.get('model'), ninitial) \
712 for vertex in diagram.get('vertices')[:lastvx]] \
713 for req_s_channel in \
714 id_list]), old_res)
715
716 res.extend([diag for diag in res_diags if diag not in res])
717
718
719
720
721
722 if process.get('forbidden_s_channels'):
723 ninitial = len(filter(lambda leg: leg.get('state') == False,
724 process.get('legs')))
725 if ninitial == 2:
726 res = base_objects.DiagramList(\
727 filter(lambda diagram: \
728 not any([vertex.get_s_channel_id(\
729 process.get('model'), ninitial) \
730 in process.get('forbidden_s_channels')
731 for vertex in diagram.get('vertices')[:-1]]),
732 res))
733 else:
734
735
736 newres= []
737 for diagram in res:
738 leg1 = 1
739
740
741
742 vertex = diagram.get('vertices')[-1]
743 if any([l['number'] ==1 for l in vertex.get('legs')]):
744 leg1 = [l['number'] for l in vertex.get('legs') if l['number'] !=1][0]
745 to_loop = range(len(diagram.get('vertices'))-1)
746 if leg1 >1:
747 to_loop.reverse()
748 for i in to_loop:
749 vertex = diagram.get('vertices')[i]
750 if leg1:
751 if any([l['number'] ==leg1 for l in vertex.get('legs')]):
752 leg1 = 0
753 continue
754 if vertex.get_s_channel_id(process.get('model'), ninitial)\
755 in process.get('forbidden_s_channels'):
756 break
757 else:
758 newres.append(diagram)
759 res = base_objects.DiagramList(newres)
760
761
762
763
764 if process.get('forbidden_onsh_s_channels'):
765 ninitial = len(filter(lambda leg: leg.get('state') == False,
766 process.get('legs')))
767
768 verts = base_objects.VertexList(sum([[vertex for vertex \
769 in diagram.get('vertices')[:-1]
770 if vertex.get_s_channel_id(\
771 process.get('model'), ninitial) \
772 in process.get('forbidden_onsh_s_channels')] \
773 for diagram in res], []))
774 for vert in verts:
775
776 newleg = copy.copy(vert.get('legs').pop(-1))
777 newleg.set('onshell', False)
778 vert.get('legs').append(newleg)
779
780
781 for diagram in res:
782 diagram.calculate_orders(model)
783
784
785
786
787
788
789
790
791 if not returndiag and len(res)>0:
792 res = self.apply_squared_order_constraints(res)
793
794
795 if not process.get('is_decay_chain'):
796 for diagram in res:
797 vertices = diagram.get('vertices')
798 if len(vertices) > 1 and vertices[-1].get('id') == 0:
799
800
801
802
803 vertices = copy.copy(vertices)
804 lastvx = vertices.pop()
805 nexttolastvertex = copy.copy(vertices.pop())
806 legs = copy.copy(nexttolastvertex.get('legs'))
807 ntlnumber = legs[-1].get('number')
808 lastleg = filter(lambda leg: leg.get('number') != ntlnumber,
809 lastvx.get('legs'))[0]
810
811 if lastleg.get('onshell') == False:
812 lastleg.set('onshell', None)
813
814 legs[-1] = lastleg
815 nexttolastvertex.set('legs', legs)
816 vertices.append(nexttolastvertex)
817 diagram.set('vertices', vertices)
818
819 if res and not returndiag:
820 logger.info("Process has %d diagrams" % len(res))
821
822
823 self.trim_diagrams(diaglist=res)
824
825
826 pertur = 'QCD'
827 if self.get('process')['perturbation_couplings']:
828 pertur = sorted(self.get('process')['perturbation_couplings'])[0]
829 self.get('process').get('legs').sort(pert=pertur)
830
831
832 if not returndiag:
833 self['diagrams'] = res
834 return not failed_crossing
835 else:
836 return not failed_crossing, res
837
839 """Applies the user specified squared order constraints on the diagram
840 list in argument."""
841
842 res = copy.copy(diag_list)
843
844
845
846
847 while True:
848 new_res = res.apply_positive_sq_orders(res,
849 self['process'].get('squared_orders'),
850 self['process']['sqorders_types'])
851
852 if len(res)==len(new_res):
853 break
854 elif (len(new_res)>len(res)):
855 raise MadGraph5Error(
856 'Inconsistency in function apply_squared_order_constraints().')
857
858 res = new_res
859
860
861 neg_orders = [(order, value) for order, value in \
862 self['process'].get('squared_orders').items() if value<0]
863 if len(neg_orders)==1:
864 neg_order, neg_value = neg_orders[0]
865
866 res, target_order = res.apply_negative_sq_order(res, neg_order,\
867 neg_value, self['process']['sqorders_types'][neg_order])
868
869
870
871
872 self['process']['squared_orders'][neg_order]=target_order
873 elif len(neg_orders)>1:
874 raise InvalidCmd('At most one negative squared order constraint'+\
875 ' can be specified, not %s.'%str(neg_orders))
876
877 return res
878
880 """ Return a Diagram created from the vertex list. This function can be
881 overloaded by daughter classes."""
882 return base_objects.Diagram({'vertices':vertexlist})
883
885 """ In LoopAmplitude, it converts back all DGLoopLegs into Legs.
886 In Amplitude, there is nothing to do. """
887
888 return True
889
891 """ Simply returns a copy of the leg list. This function is
892 overloaded in LoopAmplitude so that a DGLoopLeg list is returned.
893 The DGLoopLeg has some additional parameters only useful during
894 loop diagram generation"""
895
896 return base_objects.LegList(\
897 [ copy.copy(leg) for leg in legs ])
898
899 - def reduce_leglist(self, curr_leglist, max_multi_to1, ref_dict_to0,
900 is_decay_proc = False, coupling_orders = None):
901 """Recursive function to reduce N LegList to N-1
902 For algorithm, see doc for generate_diagrams.
903 """
904
905
906
907 res = []
908
909
910
911 if curr_leglist is None:
912 return None
913
914
915 model = self.get('process').get('model')
916 ref_dict_to1 = self.get('process').get('model').get('ref_dict_to1')
917
918
919
920
921
922
923 if curr_leglist.can_combine_to_0(ref_dict_to0, is_decay_proc):
924
925
926 vertex_ids = self.get_combined_vertices(curr_leglist,
927 copy.copy(ref_dict_to0[tuple(sorted([leg.get('id') for \
928 leg in curr_leglist]))]))
929
930 final_vertices = [base_objects.Vertex({'legs':curr_leglist,
931 'id':vertex_id}) for \
932 vertex_id in vertex_ids]
933
934 for final_vertex in final_vertices:
935 if self.reduce_orders(coupling_orders, model,
936 [final_vertex.get('id')]) != False:
937 res.append([final_vertex])
938
939
940 if len(curr_leglist) == 2:
941 if res:
942 return res
943 else:
944 return None
945
946
947 comb_lists = self.combine_legs(curr_leglist,
948 ref_dict_to1, max_multi_to1)
949
950
951 leg_vertex_list = self.merge_comb_legs(comb_lists, ref_dict_to1)
952
953
954 for leg_vertex_tuple in leg_vertex_list:
955
956
957 if self.get('process').get('forbidden_particles') and \
958 any([abs(vertex.get('legs')[-1].get('id')) in \
959 self.get('process').get('forbidden_particles') \
960 for vertex in leg_vertex_tuple[1]]):
961 continue
962
963
964 new_coupling_orders = self.reduce_orders(coupling_orders,
965 model,
966 [vertex.get('id') for vertex in \
967 leg_vertex_tuple[1]])
968 if new_coupling_orders == False:
969
970 continue
971
972
973
974 reduced_diagram = self.reduce_leglist(leg_vertex_tuple[0],
975 max_multi_to1,
976 ref_dict_to0,
977 is_decay_proc,
978 new_coupling_orders)
979
980 if reduced_diagram:
981 vertex_list_list = [list(leg_vertex_tuple[1])]
982 vertex_list_list.append(reduced_diagram)
983 expanded_list = expand_list_list(vertex_list_list)
984 res.extend(expanded_list)
985
986 return res
987
988 - def reduce_orders(self, coupling_orders, model, vertex_id_list):
989 """Return False if the coupling orders for any coupling is <
990 0, otherwise return the new coupling orders with the vertex
991 orders subtracted. If coupling_orders is not given, return
992 None (which counts as success).
993 WEIGHTED is a special order, which corresponds to the sum of
994 order hierarchies for the couplings.
995 We ignore negative constraints as these cannot be taken into
996 account on the fly but only after generation."""
997
998 if not coupling_orders:
999 return None
1000
1001 present_couplings = copy.copy(coupling_orders)
1002 for id in vertex_id_list:
1003
1004 if not id:
1005 continue
1006 inter = model.get("interaction_dict")[id]
1007 for coupling in inter.get('orders').keys():
1008
1009
1010 if coupling in present_couplings and \
1011 present_couplings[coupling]>=0:
1012
1013 present_couplings[coupling] -= \
1014 inter.get('orders')[coupling]
1015 if present_couplings[coupling] < 0:
1016
1017 return False
1018
1019 if 'WEIGHTED' in present_couplings and \
1020 present_couplings['WEIGHTED']>=0:
1021 weight = sum([model.get('order_hierarchy')[c]*n for \
1022 (c,n) in inter.get('orders').items()])
1023 present_couplings['WEIGHTED'] -= weight
1024 if present_couplings['WEIGHTED'] < 0:
1025
1026 return False
1027
1028 return present_couplings
1029
1030 - def combine_legs(self, list_legs, ref_dict_to1, max_multi_to1):
1031 """Recursive function. Take a list of legs as an input, with
1032 the reference dictionary n-1->1, and output a list of list of
1033 tuples of Legs (allowed combinations) and Legs (rest). Algorithm:
1034
1035 1. Get all n-combinations from list [123456]: [12],..,[23],..,[123],..
1036
1037 2. For each combination, say [34]. Check if combination is valid.
1038 If so:
1039
1040 a. Append [12[34]56] to result array
1041
1042 b. Split [123456] at index(first element in combination+1),
1043 i.e. [12],[456] and subtract combination from second half,
1044 i.e.: [456]-[34]=[56]. Repeat from 1. with this array
1045
1046 3. Take result array from call to 1. (here, [[56]]) and append
1047 (first half in step b - combination) + combination + (result
1048 from 1.) = [12[34][56]] to result array
1049
1050 4. After appending results from all n-combinations, return
1051 resulting array. Example, if [13] and [45] are valid
1052 combinations:
1053 [[[13]2456],[[13]2[45]6],[123[45]6]]
1054 """
1055
1056 res = []
1057
1058
1059 for comb_length in range(2, max_multi_to1 + 1):
1060
1061
1062 if comb_length > len(list_legs):
1063 return res
1064
1065
1066
1067 for comb in itertools.combinations(list_legs, comb_length):
1068
1069
1070 if base_objects.LegList(comb).can_combine_to_1(ref_dict_to1):
1071
1072
1073
1074 res_list = copy.copy(list_legs)
1075 for leg in comb:
1076 res_list.remove(leg)
1077 res_list.insert(list_legs.index(comb[0]), comb)
1078 res.append(res_list)
1079
1080
1081
1082
1083
1084
1085 res_list1 = list_legs[0:list_legs.index(comb[0])]
1086 res_list2 = list_legs[list_legs.index(comb[0]) + 1:]
1087 for leg in comb[1:]:
1088 res_list2.remove(leg)
1089
1090
1091 res_list = res_list1
1092 res_list.append(comb)
1093
1094
1095 for item in self.combine_legs(res_list2,
1096 ref_dict_to1,
1097 max_multi_to1):
1098 final_res_list = copy.copy(res_list)
1099 final_res_list.extend(item)
1100 res.append(final_res_list)
1101
1102 return res
1103
1104
1106 """Takes a list of allowed leg combinations as an input and returns
1107 a set of lists where combinations have been properly replaced
1108 (one list per element in the ref_dict, so that all possible intermediate
1109 particles are included). For each list, give the list of vertices
1110 corresponding to the executed merging, group the two as a tuple.
1111 """
1112
1113 res = []
1114
1115 for comb_list in comb_lists:
1116
1117 reduced_list = []
1118 vertex_list = []
1119
1120 for entry in comb_list:
1121
1122
1123 if isinstance(entry, tuple):
1124
1125
1126
1127 leg_vert_ids = copy.copy(ref_dict_to1[\
1128 tuple(sorted([leg.get('id') for leg in entry]))])
1129
1130
1131 number = min([leg.get('number') for leg in entry])
1132
1133
1134 if len(filter(lambda leg: leg.get('state') == False,
1135 entry)) == 1:
1136 state = False
1137 else:
1138 state = True
1139
1140
1141
1142
1143
1144 new_leg_vert_ids = []
1145 if leg_vert_ids:
1146 new_leg_vert_ids = self.get_combined_legs(entry,
1147 leg_vert_ids,
1148 number,
1149 state)
1150
1151 reduced_list.append([l[0] for l in new_leg_vert_ids])
1152
1153
1154
1155
1156
1157 vlist = base_objects.VertexList()
1158 for (myleg, vert_id) in new_leg_vert_ids:
1159
1160 myleglist = base_objects.LegList(list(entry))
1161
1162 myleglist.append(myleg)
1163
1164 vlist.append(base_objects.Vertex(
1165 {'legs':myleglist,
1166 'id':vert_id}))
1167
1168 vertex_list.append(vlist)
1169
1170
1171
1172 else:
1173 cp_entry = copy.copy(entry)
1174
1175
1176
1177 if cp_entry.get('from_group') != None:
1178 cp_entry.set('from_group', False)
1179 reduced_list.append(cp_entry)
1180
1181
1182 flat_red_lists = expand_list(reduced_list)
1183 flat_vx_lists = expand_list(vertex_list)
1184
1185
1186 for i in range(0, len(flat_vx_lists)):
1187 res.append((base_objects.LegList(flat_red_lists[i]), \
1188 base_objects.VertexList(flat_vx_lists[i])))
1189
1190 return res
1191
1193 """Create a set of new legs from the info given. This can be
1194 overloaded by daughter classes."""
1195
1196 mylegs = [(base_objects.Leg({'id':leg_id,
1197 'number':number,
1198 'state':state,
1199 'from_group':True}),
1200 vert_id)\
1201 for leg_id, vert_id in leg_vert_ids]
1202
1203 return mylegs
1204
1206 """Allow for selection of vertex ids. This can be
1207 overloaded by daughter classes."""
1208
1209 return vert_ids
1210
1212 """Reduce the number of legs and vertices used in memory.
1213 When called by a diagram generation initiated by LoopAmplitude,
1214 this function should not trim the diagrams in the attribute 'diagrams'
1215 but rather a given list in the 'diaglist' argument."""
1216
1217 legs = []
1218 vertices = []
1219
1220 if diaglist is None:
1221 diaglist=self.get('diagrams')
1222
1223
1224 process = self.get('process')
1225 for leg in process.get('legs'):
1226 if leg.get('state') and leg.get('id') in decay_ids:
1227 leg.set('onshell', True)
1228
1229 for diagram in diaglist:
1230
1231 leg_external = set()
1232 for ivx, vertex in enumerate(diagram.get('vertices')):
1233 for ileg, leg in enumerate(vertex.get('legs')):
1234
1235 if leg.get('state') and leg.get('id') in decay_ids and \
1236 leg.get('number') not in leg_external:
1237
1238
1239 leg = copy.copy(leg)
1240 leg.set('onshell', True)
1241 try:
1242 index = legs.index(leg)
1243 except ValueError:
1244 vertex.get('legs')[ileg] = leg
1245 legs.append(leg)
1246 else:
1247 vertex.get('legs')[ileg] = legs[index]
1248 leg_external.add(leg.get('number'))
1249 try:
1250 index = vertices.index(vertex)
1251 diagram.get('vertices')[ivx] = vertices[index]
1252 except ValueError:
1253 vertices.append(vertex)
1254
1255
1256
1257
1258 -class AmplitudeList(base_objects.PhysicsObjectList):
1259 """List of Amplitude objects
1260 """
1261
1263 """ Check the content of all processes of the amplitudes in this list to
1264 see if there is any which defines perturbation couplings. """
1265
1266 for amp in self:
1267 if amp.has_loop_process():
1268 return True
1269
1271 """Test if object obj is a valid Amplitude for the list."""
1272
1273 return isinstance(obj, Amplitude)
1274
1279 """A list of amplitudes + a list of decay chain amplitude lists;
1280 corresponding to a ProcessDefinition with a list of decay chains
1281 """
1282
1288
1289 - def __init__(self, argument = None, collect_mirror_procs = False,
1290 ignore_six_quark_processes = False):
1291 """Allow initialization with Process and with ProcessDefinition"""
1292
1293 if isinstance(argument, base_objects.Process):
1294 super(DecayChainAmplitude, self).__init__()
1295 from madgraph.loop.loop_diagram_generation import LoopMultiProcess
1296 if argument['perturbation_couplings']:
1297 MultiProcessClass=LoopMultiProcess
1298 else:
1299 MultiProcessClass=MultiProcess
1300 if isinstance(argument, base_objects.ProcessDefinition):
1301 self['amplitudes'].extend(\
1302 MultiProcessClass.generate_multi_amplitudes(argument,
1303 collect_mirror_procs,
1304 ignore_six_quark_processes))
1305 else:
1306 self['amplitudes'].append(\
1307 MultiProcessClass.get_amplitude_from_proc(argument))
1308
1309
1310 process = copy.copy(self.get('amplitudes')[0].get('process'))
1311 process.set('decay_chains', base_objects.ProcessList())
1312 self['amplitudes'][0].set('process', process)
1313
1314 for process in argument.get('decay_chains'):
1315 if process.get('perturbation_couplings'):
1316 raise MadGraph5Error,\
1317 "Decay processes can not be perturbed"
1318 process.set('overall_orders', argument.get('overall_orders'))
1319 if not process.get('is_decay_chain'):
1320 process.set('is_decay_chain',True)
1321 if not process.get_ninitial() == 1:
1322 raise InvalidCmd,\
1323 "Decay chain process must have exactly one" + \
1324 " incoming particle"
1325 self['decay_chains'].append(\
1326 DecayChainAmplitude(process, collect_mirror_procs,
1327 ignore_six_quark_processes))
1328
1329
1330 decay_ids = sum([[a.get('process').get('legs')[0].get('id') \
1331 for a in dec.get('amplitudes')] for dec in \
1332 self['decay_chains']], [])
1333 decay_ids = set(decay_ids)
1334 for amp in self['amplitudes']:
1335 amp.trim_diagrams(decay_ids)
1336
1337
1338 for amp in self['amplitudes']:
1339 for l in amp.get('process').get('legs'):
1340 if l.get('id') in decay_ids:
1341 decay_ids.remove(l.get('id'))
1342
1343 if decay_ids:
1344 model = amp.get('process').get('model')
1345 names = [model.get_particle(id).get('name') for id in decay_ids]
1346
1347 logger.warning(
1348 "$RED Decay without corresponding particle in core process found.\n" + \
1349 "Decay information for particle(s) %s is discarded.\n" % ','.join(names) + \
1350 "Please check your process definition carefully. \n" + \
1351 "This warning usually means that you forgot parentheses in presence of subdecay.\n" + \
1352 "Example of correct syntax: p p > t t~, ( t > w+ b, w+ > l+ vl)")
1353
1354
1355 for dc in reversed(self['decay_chains']):
1356 for a in reversed(dc.get('amplitudes')):
1357
1358 if a.get('process').get('legs')[0].get('id') in decay_ids:
1359 dc.get('amplitudes').remove(a)
1360 if not dc.get('amplitudes'):
1361
1362 self['decay_chains'].remove(dc)
1363
1364
1365
1366 bad_procs = []
1367 for dc in self['decay_chains']:
1368 for amp in dc.get('amplitudes'):
1369 legs = amp.get('process').get('legs')
1370 fs_parts = [abs(l.get('id')) for l in legs if
1371 l.get('state')]
1372 is_part = [l.get('id') for l in legs if not
1373 l.get('state')][0]
1374 if abs(is_part) in fs_parts:
1375 bad_procs.append(amp.get('process'))
1376
1377 if bad_procs:
1378 logger.warning(
1379 "$RED Decay(s) with particle decaying to itself:\n" + \
1380 '\n'.join([p.nice_string() for p in bad_procs]) + \
1381 "\nPlease check your process definition carefully. \n")
1382
1383
1384 elif argument != None:
1385
1386 super(DecayChainAmplitude, self).__init__(argument)
1387 else:
1388
1389 super(DecayChainAmplitude, self).__init__()
1390
1391 - def filter(self, name, value):
1392 """Filter for valid amplitude property values."""
1393
1394 if name == 'amplitudes':
1395 if not isinstance(value, AmplitudeList):
1396 raise self.PhysicsObjectError, \
1397 "%s is not a valid AmplitudeList" % str(value)
1398 if name == 'decay_chains':
1399 if not isinstance(value, DecayChainAmplitudeList):
1400 raise self.PhysicsObjectError, \
1401 "%s is not a valid DecayChainAmplitudeList object" % \
1402 str(value)
1403 return True
1404
1406 """Return diagram property names as a nicely sorted list."""
1407
1408 return ['amplitudes', 'decay_chains']
1409
1410
1411
1413 """Returns number of diagrams for this amplitude"""
1414 return sum(len(a.get('diagrams')) for a in self.get('amplitudes')) \
1415 + sum(d.get_number_of_diagrams() for d in \
1416 self.get('decay_chains'))
1417
1419 """Returns a nicely formatted string of the amplitude content."""
1420 mystr = ""
1421 for amplitude in self.get('amplitudes'):
1422 mystr = mystr + amplitude.nice_string(indent) + "\n"
1423
1424 if self.get('decay_chains'):
1425 mystr = mystr + " " * indent + "Decays:\n"
1426 for dec in self.get('decay_chains'):
1427 mystr = mystr + dec.nice_string(indent + 2) + "\n"
1428
1429 return mystr[:-1]
1430
1432 """Returns a nicely formatted string of the amplitude processes."""
1433 mystr = ""
1434 for amplitude in self.get('amplitudes'):
1435 mystr = mystr + amplitude.nice_string_processes(indent) + "\n"
1436
1437 if self.get('decay_chains'):
1438 mystr = mystr + " " * indent + "Decays:\n"
1439 for dec in self.get('decay_chains'):
1440 mystr = mystr + dec.nice_string_processes(indent + 2) + "\n"
1441
1442 return mystr[:-1]
1443
1445 """Returns the number of initial state particles in the process."""
1446 return self.get('amplitudes')[0].get('process').get_ninitial()
1447
1449 """Returns a set of all particle ids for which a decay is defined"""
1450
1451 decay_ids = []
1452
1453
1454 for amp in sum([dc.get('amplitudes') for dc \
1455 in self['decay_chains']], []):
1456
1457 decay_ids.append(amp.get('process').get_initial_ids()[0])
1458
1459
1460 return list(set(decay_ids))
1461
1463 """ Returns wether this amplitude has a loop process."""
1464 return self['amplitudes'].has_any_loop_process()
1465
1467 """Recursive function to extract all amplitudes for this process"""
1468
1469 amplitudes = AmplitudeList()
1470
1471 amplitudes.extend(self.get('amplitudes'))
1472 for decay in self.get('decay_chains'):
1473 amplitudes.extend(decay.get_amplitudes())
1474
1475 return amplitudes
1476
1482 """List of DecayChainAmplitude objects
1483 """
1484
1486 """Test if object obj is a valid DecayChainAmplitude for the list."""
1487
1488 return isinstance(obj, DecayChainAmplitude)
1489
1490
1491
1492
1493
1494 -class MultiProcess(base_objects.PhysicsObject):
1495 """MultiProcess: list of process definitions
1496 list of processes (after cleaning)
1497 list of amplitudes (after generation)
1498 """
1499
1501 """Default values for all properties"""
1502
1503 self['process_definitions'] = base_objects.ProcessDefinitionList()
1504
1505
1506
1507 self['amplitudes'] = AmplitudeList()
1508
1509 self['collect_mirror_procs'] = False
1510
1511
1512 self['ignore_six_quark_processes'] = []
1513
1514
1515 self['use_numerical'] = False
1516
1517 - def __init__(self, argument=None, collect_mirror_procs = False,
1518 ignore_six_quark_processes = [], optimize=False):
1544
1545
1546 - def filter(self, name, value):
1547 """Filter for valid process property values."""
1548
1549 if name == 'process_definitions':
1550 if not isinstance(value, base_objects.ProcessDefinitionList):
1551 raise self.PhysicsObjectError, \
1552 "%s is not a valid ProcessDefinitionList object" % str(value)
1553
1554 if name == 'amplitudes':
1555 if not isinstance(value, diagram_generation.AmplitudeList):
1556 raise self.PhysicsObjectError, \
1557 "%s is not a valid AmplitudeList object" % str(value)
1558
1559 if name in ['collect_mirror_procs']:
1560 if not isinstance(value, bool):
1561 raise self.PhysicsObjectError, \
1562 "%s is not a valid boolean" % str(value)
1563
1564 if name == 'ignore_six_quark_processes':
1565 if not isinstance(value, list):
1566 raise self.PhysicsObjectError, \
1567 "%s is not a valid list" % str(value)
1568
1569 return True
1570
1571 - def get(self, name):
1572 """Get the value of the property name."""
1573
1574 if (name == 'amplitudes') and not self[name]:
1575 for process_def in self.get('process_definitions'):
1576 if process_def.get('decay_chains'):
1577
1578
1579 self['amplitudes'].append(\
1580 DecayChainAmplitude(process_def,
1581 self.get('collect_mirror_procs'),
1582 self.get('ignore_six_quark_processes')))
1583 else:
1584 self['amplitudes'].extend(\
1585 self.generate_multi_amplitudes(process_def,
1586 self.get('collect_mirror_procs'),
1587 self.get('ignore_six_quark_processes'),
1588 self['use_numerical']))
1589
1590 return MultiProcess.__bases__[0].get(self, name)
1591
1593 """Return process property names as a nicely sorted list."""
1594
1595 return ['process_definitions', 'amplitudes']
1596
1597 @classmethod
1598 - def generate_multi_amplitudes(cls,process_definition,
1599 collect_mirror_procs = False,
1600 ignore_six_quark_processes = [],
1601 use_numerical=False):
1602 """Generate amplitudes in a semi-efficient way.
1603 Make use of crossing symmetry for processes that fail diagram
1604 generation, but not for processes that succeed diagram
1605 generation. Doing so will risk making it impossible to
1606 identify processes with identical amplitudes.
1607 """
1608 assert isinstance(process_definition, base_objects.ProcessDefinition), \
1609 "%s not valid ProcessDefinition object" % \
1610 repr(process_definition)
1611
1612
1613 process_definition.set('orders', MultiProcess.\
1614 find_optimal_process_orders(process_definition))
1615
1616 process_definition.check_expansion_orders()
1617
1618 processes = base_objects.ProcessList()
1619 amplitudes = AmplitudeList()
1620
1621
1622
1623 failed_procs = []
1624 success_procs = []
1625
1626 non_permuted_procs = []
1627
1628 permutations = []
1629
1630
1631
1632 model = process_definition['model']
1633
1634 isids = [leg['ids'] for leg in process_definition['legs'] \
1635 if leg['state'] == False]
1636 fsids = [leg['ids'] for leg in process_definition['legs'] \
1637 if leg['state'] == True]
1638
1639
1640 for prod in itertools.product(*isids):
1641 islegs = [\
1642 base_objects.Leg({'id':id, 'state': False}) \
1643 for id in prod]
1644
1645
1646
1647
1648 red_fsidlist = []
1649
1650 for prod in itertools.product(*fsids):
1651
1652
1653 if tuple(sorted(prod)) in red_fsidlist:
1654 continue
1655
1656 red_fsidlist.append(tuple(sorted(prod)));
1657
1658
1659 leg_list = [copy.copy(leg) for leg in islegs]
1660
1661 leg_list.extend([\
1662 base_objects.Leg({'id':id, 'state': True}) \
1663 for id in prod])
1664
1665 legs = base_objects.LegList(leg_list)
1666
1667
1668 sorted_legs = sorted([(l,i+1) for (i,l) in \
1669 enumerate(legs.get_outgoing_id_list(model))])
1670 permutation = [l[1] for l in sorted_legs]
1671 sorted_legs = array.array('i', [l[0] for l in sorted_legs])
1672
1673
1674 if ignore_six_quark_processes and \
1675 len([i for i in sorted_legs if abs(i) in \
1676 ignore_six_quark_processes]) >= 6:
1677 continue
1678
1679
1680
1681 if sorted_legs in failed_procs:
1682 continue
1683
1684
1685 if use_numerical:
1686
1687 initial_mass = abs(model['parameter_dict'][model.get_particle(legs[0].get('id')).get('mass')])
1688 if initial_mass == 0:
1689 continue
1690 for leg in legs[1:]:
1691 m = model['parameter_dict'][model.get_particle(leg.get('id')).get('mass')]
1692 initial_mass -= abs(m)
1693 if initial_mass.real <= 0:
1694 continue
1695
1696
1697 process = process_definition.get_process_with_legs(legs)
1698
1699 fast_proc = \
1700 array.array('i',[leg.get('id') for leg in legs])
1701 if collect_mirror_procs and \
1702 process_definition.get_ninitial() == 2:
1703
1704 mirror_proc = \
1705 array.array('i', [fast_proc[1], fast_proc[0]] + \
1706 list(fast_proc[2:]))
1707 try:
1708 mirror_amp = \
1709 amplitudes[non_permuted_procs.index(mirror_proc)]
1710 except Exception:
1711
1712 pass
1713 else:
1714
1715 mirror_amp.set('has_mirror_process', True)
1716 logger.info("Process %s added to mirror process %s" % \
1717 (process.base_string(),
1718 mirror_amp.get('process').base_string()))
1719 continue
1720
1721
1722
1723 if not process.get('required_s_channels') and \
1724 not process.get('forbidden_onsh_s_channels') and \
1725 not process.get('forbidden_s_channels') and \
1726 not process.get('is_decay_chain'):
1727 try:
1728 crossed_index = success_procs.index(sorted_legs)
1729
1730
1731
1732
1733 if 'loop_diagrams' in amplitudes[crossed_index]:
1734 raise ValueError
1735 except ValueError:
1736
1737 pass
1738 else:
1739
1740 amplitude = MultiProcess.cross_amplitude(\
1741 amplitudes[crossed_index],
1742 process,
1743 permutations[crossed_index],
1744 permutation)
1745 amplitudes.append(amplitude)
1746 success_procs.append(sorted_legs)
1747 permutations.append(permutation)
1748 non_permuted_procs.append(fast_proc)
1749 logger.info("Crossed process found for %s, reuse diagrams." % \
1750 process.base_string())
1751 continue
1752
1753
1754 amplitude = cls.get_amplitude_from_proc(process)
1755
1756 try:
1757 result = amplitude.generate_diagrams()
1758 except InvalidCmd as error:
1759 failed_procs.append(sorted_legs)
1760 else:
1761
1762 if amplitude.get('diagrams'):
1763 amplitudes.append(amplitude)
1764 success_procs.append(sorted_legs)
1765 permutations.append(permutation)
1766 non_permuted_procs.append(fast_proc)
1767 elif not result:
1768
1769 failed_procs.append(sorted_legs)
1770
1771
1772 if not amplitudes:
1773 if len(failed_procs) == 1 and 'error' in locals():
1774 raise error
1775 else:
1776 raise NoDiagramException, \
1777 "No amplitudes generated from process %s. Please enter a valid process" % \
1778 process_definition.nice_string()
1779
1780
1781
1782 return amplitudes
1783
1784 @classmethod
1786 """ Return the correct amplitude type according to the characteristics of
1787 the process proc """
1788 return Amplitude({"process": proc})
1789
1790
1791 @staticmethod
1793 """Find the minimal WEIGHTED order for this set of processes.
1794
1795 The algorithm:
1796
1797 1) Check the coupling hierarchy of the model. Assign all
1798 particles to the different coupling hierarchies so that a
1799 particle is considered to be in the highest hierarchy (i.e.,
1800 with lowest value) where it has an interaction.
1801
1802 2) Pick out the legs in the multiprocess according to the
1803 highest hierarchy represented (so don't mix particles from
1804 different hierarchy classes in the same multiparticles!)
1805
1806 3) Find the starting maximum WEIGHTED order as the sum of the
1807 highest n-2 weighted orders
1808
1809 4) Pick out required s-channel particle hierarchies, and use
1810 the highest of the maximum WEIGHTED order from the legs and
1811 the minimum WEIGHTED order extracted from 2*s-channel
1812 hierarchys plus the n-2-2*(number of s-channels) lowest
1813 leg weighted orders.
1814
1815 5) Run process generation with the WEIGHTED order determined
1816 in 3)-4) - # final state gluons, with all gluons removed from
1817 the final state
1818
1819 6) If no process is found, increase WEIGHTED order by 1 and go
1820 back to 5), until we find a process which passes. Return that
1821 order.
1822
1823 7) Continue 5)-6) until we reach (n-2)*(highest hierarchy)-1.
1824 If still no process has passed, return
1825 WEIGHTED = (n-2)*(highest hierarchy)
1826 """
1827
1828 assert isinstance(process_definition, base_objects.ProcessDefinition), \
1829 "%s not valid ProcessDefinition object" % \
1830 repr(process_definition)
1831
1832 processes = base_objects.ProcessList()
1833 amplitudes = AmplitudeList()
1834
1835
1836 if process_definition.get('orders') or \
1837 process_definition.get('overall_orders') or \
1838 process_definition.get('NLO_mode')=='virt':
1839 return process_definition.get('orders')
1840
1841
1842 if process_definition.get_ninitial() == 1 and not \
1843 process_definition.get('is_decay_chain'):
1844 return process_definition.get('orders')
1845
1846 logger.info("Checking for minimal orders which gives processes.")
1847 logger.info("Please specify coupling orders to bypass this step.")
1848
1849
1850 max_order_now, particles, hierarchy = \
1851 process_definition.get_minimum_WEIGHTED()
1852 coupling = 'WEIGHTED'
1853
1854 model = process_definition.get('model')
1855
1856
1857 isids = [leg['ids'] for leg in \
1858 filter(lambda leg: leg['state'] == False, process_definition['legs'])]
1859 fsids = [leg['ids'] for leg in \
1860 filter(lambda leg: leg['state'] == True, process_definition['legs'])]
1861
1862 max_WEIGHTED_order = \
1863 (len(fsids + isids) - 2)*int(model.get_max_WEIGHTED())
1864
1865
1866
1867 while max_order_now < max_WEIGHTED_order:
1868
1869 logger.info("Trying coupling order WEIGHTED=%d" % max_order_now)
1870
1871 oldloglevel = logger.level
1872 logger.setLevel(logging.WARNING)
1873
1874
1875
1876 failed_procs = []
1877
1878
1879 for prod in apply(itertools.product, isids):
1880 islegs = [ base_objects.Leg({'id':id, 'state': False}) \
1881 for id in prod]
1882
1883
1884
1885
1886 red_fsidlist = []
1887
1888 for prod in apply(itertools.product, fsids):
1889
1890
1891 if tuple(sorted(prod)) in red_fsidlist:
1892 continue
1893
1894 red_fsidlist.append(tuple(sorted(prod)));
1895
1896
1897
1898 nglue = 0
1899 if 21 in particles[0]:
1900 nglue = len([id for id in prod if id == 21])
1901 prod = [id for id in prod if id != 21]
1902
1903
1904 leg_list = [copy.copy(leg) for leg in islegs]
1905
1906 leg_list.extend([\
1907 base_objects.Leg({'id':id, 'state': True}) \
1908 for id in prod])
1909
1910 legs = base_objects.LegList(leg_list)
1911
1912
1913
1914 coupling_orders_now = {coupling: max_order_now - \
1915 nglue * model['order_hierarchy']['QCD']}
1916
1917
1918 process = base_objects.Process({\
1919 'legs':legs,
1920 'model':model,
1921 'id': process_definition.get('id'),
1922 'orders': coupling_orders_now,
1923 'required_s_channels': \
1924 process_definition.get('required_s_channels'),
1925 'forbidden_onsh_s_channels': \
1926 process_definition.get('forbidden_onsh_s_channels'),
1927 'sqorders_types': \
1928 process_definition.get('sqorders_types'),
1929 'squared_orders': \
1930 process_definition.get('squared_orders'),
1931 'split_orders': \
1932 process_definition.get('split_orders'),
1933 'forbidden_s_channels': \
1934 process_definition.get('forbidden_s_channels'),
1935 'forbidden_particles': \
1936 process_definition.get('forbidden_particles'),
1937 'is_decay_chain': \
1938 process_definition.get('is_decay_chain'),
1939 'overall_orders': \
1940 process_definition.get('overall_orders'),
1941 'split_orders': \
1942 process_definition.get('split_orders')})
1943
1944
1945 process.check_expansion_orders()
1946
1947
1948 sorted_legs = sorted(legs.get_outgoing_id_list(model))
1949
1950
1951 if tuple(sorted_legs) in failed_procs:
1952 continue
1953
1954 amplitude = Amplitude({'process': process})
1955 try:
1956 amplitude.generate_diagrams()
1957 except InvalidCmd:
1958 failed_procs.append(tuple(sorted_legs))
1959 else:
1960 if amplitude.get('diagrams'):
1961
1962 logger.setLevel(oldloglevel)
1963 return {coupling: max_order_now}
1964 else:
1965 failed_procs.append(tuple(sorted_legs))
1966
1967
1968 max_order_now += 1
1969 logger.setLevel(oldloglevel)
1970
1971
1972 return {coupling: max_order_now}
1973
1974 @staticmethod
1976 """Return the amplitude crossed with the permutation new_perm"""
1977
1978 perm_map = dict(zip(org_perm, new_perm))
1979
1980 new_amp = copy.copy(amplitude)
1981
1982 for i, leg in enumerate(process.get('legs')):
1983 leg.set('number', i+1)
1984
1985 new_amp.set('process', process)
1986
1987 diagrams = base_objects.DiagramList([d.renumber_legs(perm_map,
1988 process.get('legs'),) for \
1989 d in new_amp.get('diagrams')])
1990 new_amp.set('diagrams', diagrams)
1991 new_amp.trim_diagrams()
1992
1993
1994 new_amp.set('has_mirror_process', False)
1995
1996 return new_amp
1997
2003 """Takes a list of lists and elements and returns a list of flat lists.
2004 Example: [[1,2], 3, [4,5]] -> [[1,3,4], [1,3,5], [2,3,4], [2,3,5]]
2005 """
2006
2007
2008 assert isinstance(mylist, list), "Expand_list argument must be a list"
2009
2010 res = []
2011
2012 tmplist = []
2013 for item in mylist:
2014 if isinstance(item, list):
2015 tmplist.append(item)
2016 else:
2017 tmplist.append([item])
2018
2019 for item in apply(itertools.product, tmplist):
2020 res.append(list(item))
2021
2022 return res
2023
2025 """Recursive function. Takes a list of lists and lists of lists
2026 and returns a list of flat lists.
2027 Example: [[1,2],[[4,5],[6,7]]] -> [[1,2,4,5], [1,2,6,7]]
2028 """
2029
2030 res = []
2031
2032 if not mylist or len(mylist) == 1 and not mylist[0]:
2033 return [[]]
2034
2035
2036 assert isinstance(mylist[0], list), \
2037 "Expand_list_list needs a list of lists and lists of lists"
2038
2039
2040 if len(mylist) == 1:
2041 if isinstance(mylist[0][0], list):
2042 return mylist[0]
2043 else:
2044 return mylist
2045
2046 if isinstance(mylist[0][0], list):
2047 for item in mylist[0]:
2048
2049
2050
2051 for rest in expand_list_list(mylist[1:]):
2052 reslist = copy.copy(item)
2053 reslist.extend(rest)
2054 res.append(reslist)
2055 else:
2056 for rest in expand_list_list(mylist[1:]):
2057 reslist = copy.copy(mylist[0])
2058 reslist.extend(rest)
2059 res.append(reslist)
2060
2061
2062 return res
2063