1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """Classes for diagram generation. Amplitude performs the diagram
16 generation, DecayChainAmplitude keeps track of processes with decay
17 chains, and MultiProcess allows generation of processes with
18 multiparticle definitions. DiagramTag allows to identify diagrams
19 based on relevant properties.
20 """
21
22 import array
23 import copy
24 import itertools
25 import logging
26
27 import madgraph.core.base_objects as base_objects
28 import madgraph.various.misc as misc
29 from madgraph import InvalidCmd, MadGraph5Error
30
31 logger = logging.getLogger('madgraph.diagram_generation')
35
41 """Class to tag diagrams based on objects with some __lt__ measure, e.g.
42 PDG code/interaction id (for comparing diagrams from the same amplitude),
43 or Lorentz/coupling/mass/width (for comparing AMPs from different MEs).
44 Algorithm: Create chains starting from external particles:
45 1 \ / 6
46 2 /\______/\ 7
47 3_ / | \_ 8
48 4 / 5 \_ 9
49 \ 10
50 gives ((((9,10,id910),8,id9108),(6,7,id67),id910867)
51 (((1,2,id12),(3,4,id34)),id1234),
52 5,id91086712345)
53 where idN is the id of the corresponding interaction. The ordering within
54 chains is based on chain length (depth; here, 1234 has depth 3, 910867 has
55 depth 4, 5 has depht 0), and if equal on the ordering of the chain elements.
56 The determination of central vertex is based on minimizing the chain length
57 for the longest subchain.
58 This gives a unique tag which can be used to identify diagrams
59 (instead of symmetry), as well as identify identical matrix elements from
60 different processes."""
61
63 """Exception for any problems in DiagramTags"""
64 pass
65
66 - def __init__(self, diagram, model=None, ninitial=2):
67 """Initialize with a diagram. Create DiagramTagChainLinks according to
68 the diagram, and figure out if we need to shift the central vertex."""
69
70
71 leg_dict = {}
72
73 for vertex in diagram.get('vertices'):
74
75 legs = vertex.get('legs')[:-1]
76 lastvx = vertex == diagram.get('vertices')[-1]
77 if lastvx:
78
79 legs = vertex.get('legs')
80
81 link = DiagramTagChainLink([leg_dict.setdefault(leg.get('number'),
82 DiagramTagChainLink(self.link_from_leg(leg, model))) \
83 for leg in legs],
84 self.vertex_id_from_vertex(vertex,
85 lastvx,
86 model,
87 ninitial))
88
89 if not lastvx:
90 leg_dict[vertex.get('legs')[-1].get('number')] = link
91
92
93 self.tag = link
94
95
96
97 done = max([l.depth for l in self.tag.links]) == 0
98 while not done:
99
100 longest_chain = self.tag.links[0]
101
102 new_link = DiagramTagChainLink(self.tag.links[1:],
103 self.flip_vertex(\
104 self.tag.vertex_id,
105 longest_chain.vertex_id,
106 self.tag.links[1:]))
107
108 other_links = list(longest_chain.links) + [new_link]
109 other_link = DiagramTagChainLink(other_links,
110 self.flip_vertex(\
111 longest_chain.vertex_id,
112 self.tag.vertex_id,
113 other_links))
114
115 if other_link.links[0] < self.tag.links[0]:
116
117 self.tag = other_link
118 else:
119
120 done = True
121
126
128 """Output a diagram from a DiagramTag. Note that each daughter
129 class must implement the static functions id_from_vertex_id
130 (if the vertex id is something else than an integer) and
131 leg_from_link (to pass the correct info from an end link to a
132 leg)."""
133
134
135 diagram = base_objects.Diagram({'vertices': \
136 self.vertices_from_link(self.tag,
137 model,
138 True)})
139 diagram.calculate_orders(model)
140 return diagram
141
142 @classmethod
144 """Recursively return the leg corresponding to this link and
145 the list of all vertices from all previous links"""
146
147 if link.end_link:
148
149 return cls.leg_from_link(link), []
150
151
152 leg_vertices = [cls.vertices_from_link(l, model) for l in link.links]
153
154 legs = base_objects.LegList(sorted([l for l,v in leg_vertices],
155 lambda l1,l2: l2.get('number') - \
156 l1.get('number')))
157
158 vertices = base_objects.VertexList(sum([v for l, v in leg_vertices],
159 []))
160
161 if not first_vertex:
162
163
164 last_leg = cls.leg_from_legs(legs,link.vertex_id,model)
165 legs.append(last_leg)
166
167
168 vertices.append(cls.vertex_from_link(legs,
169 link.vertex_id,
170 model))
171 if first_vertex:
172
173 return vertices
174 else:
175
176 return last_leg, vertices
177
178 @classmethod
180 """Returns the list of external PDGs of the interaction corresponding
181 to this vertex_id."""
182
183
184
185
186 if (len(vertex_id)>=3 and 'PDGs' in vertex_id[2]):
187 return vertex_id[2]['PDGs']
188 else:
189 return [part.get_pdg_code() for part in model.get_interaction(
190 cls.id_from_vertex_id(vertex_id)).get('particles')]
191
192 @classmethod
194 """Return a leg from a leg list and the model info"""
195
196 pdgs = list(cls.legPDGs_from_vertex_id(vertex_id, model))
197
198
199 for pdg in [leg.get('id') for leg in legs]:
200 pdgs.remove(pdg)
201
202 assert len(pdgs) == 1
203
204 pdg = model.get_particle(pdgs[0]).get_anti_pdg_code()
205 number = min([l.get('number') for l in legs])
206
207 state = (len([l for l in legs if l.get('state') == False]) != 1)
208
209 onshell= False
210
211 return base_objects.Leg({'id': pdg,
212 'number': number,
213 'state': state,
214 'onshell': onshell})
215
216 @classmethod
229
230 @staticmethod
232 """Return a leg from a link"""
233
234 if link.end_link:
235
236 return base_objects.Leg({'number':link.links[0][1],
237 'id':link.links[0][0][0],
238 'state':(link.links[0][0][1] == 0),
239 'onshell':False})
240
241
242 assert False
243
244 @staticmethod
246 """Return the numerical vertex id from a link.vertex_id"""
247
248 return vertex_id[0][0]
249
250 @staticmethod
252 """Return the loop_info stored in this vertex id. Notice that the
253 IdentifyME tag does not store the loop_info, but should normally never
254 need access to it."""
255
256 return vertex_id[2]
257
258 @staticmethod
260 """Reorder a permutation with respect to start_perm. Note that
261 both need to start from 1."""
262 if perm == start_perm:
263 return range(len(perm))
264 order = [i for (p,i) in \
265 sorted([(p,i) for (i,p) in enumerate(perm)])]
266 return [start_perm[i]-1 for i in order]
267
268 @staticmethod
270 """Returns the default end link for a leg: ((id, state), number).
271 Note that the number is not taken into account if tag comparison,
272 but is used only to extract leg permutations."""
273 if leg.get('state'):
274
275 return [((leg.get('id'), 0), leg.get('number'))]
276 else:
277
278 return [((leg.get('id'), leg.get('number')), leg.get('number'))]
279
280 @staticmethod
282 """Returns the default vertex id: just the interaction id
283 Note that in the vertex id, like the leg, only the first entry is
284 taken into account in the tag comparison, while the second is for
285 storing information that is not to be used in comparisons and the
286 third for additional info regarding the shrunk loop vertex."""
287
288 if isinstance(vertex,base_objects.ContractedVertex):
289
290 return ((vertex.get('id'),vertex.get('loop_tag')),(),
291 {'PDGs':vertex.get('PDGs')})
292 else:
293 return ((vertex.get('id'),()),(),{})
294
295 @staticmethod
297 """Returns the default vertex flip: just the new_vertex"""
298 return new_vertex
299
301 """Equal if same tag"""
302 if type(self) != type(other):
303 return False
304 return self.tag == other.tag
305
307 return not self.__eq__(other)
308
311
313 return self.tag < other.tag
314
316 return self.tag > other.tag
317
318 __repr__ = __str__
319
321 """Chain link for a DiagramTag. A link is a tuple + vertex id + depth,
322 with a comparison operator defined"""
323
324 - def __init__(self, objects, vertex_id = None):
325 """Initialize, either with a tuple of DiagramTagChainLinks and
326 a vertex_id (defined by DiagramTag.vertex_id_from_vertex), or
327 with an external leg object (end link) defined by
328 DiagramTag.link_from_leg"""
329
330 if vertex_id == None:
331
332 self.links = tuple(objects)
333 self.vertex_id = (0,)
334 self.depth = 0
335 self.end_link = True
336 return
337
338 self.links = tuple(sorted(list(tuple(objects)), reverse=True))
339 self.vertex_id = vertex_id
340
341
342 self.depth = sum([l.depth for l in self.links],
343 max(1, len(self.links)-1))
344 self.end_link = False
345
347 """Get the permutation of external numbers (assumed to be the
348 second entry in the end link tuples)"""
349
350 if self.end_link:
351 return [self.links[0][1]]
352
353 return sum([l.get_external_numbers() for l in self.links], [])
354
356 """Compare self with other in the order:
357 1. depth 2. len(links) 3. vertex id 4. measure of links"""
358
359 if self == other:
360 return False
361
362 if self.depth != other.depth:
363 return self.depth < other.depth
364
365 if len(self.links) != len(other.links):
366 return len(self.links) < len(other.links)
367
368 if self.vertex_id[0] != other.vertex_id[0]:
369 return self.vertex_id[0] < other.vertex_id[0]
370
371 for i, link in enumerate(self.links):
372 if i > len(other.links) - 1:
373 return False
374 if link != other.links[i]:
375 return link < other.links[i]
376
378 return self != other and not self.__lt__(other)
379
381 """For end link,
382 consider equal if self.links[0][0] == other.links[0][0],
383 i.e., ignore the leg number (in links[0][1])."""
384
385 if self.end_link and other.end_link and self.depth == other.depth \
386 and self.vertex_id == other.vertex_id:
387 return self.links[0][0] == other.links[0][0]
388
389 return self.end_link == other.end_link and self.depth == other.depth \
390 and self.vertex_id[0] == other.vertex_id[0] \
391 and self.links == other.links
392
394 return not self.__eq__(other)
395
396
398 if self.end_link:
399 return str(self.links)
400 return "%s, %s; %d" % (str(self.links),
401 str(self.vertex_id),
402 self.depth)
403
404 __repr__ = __str__
405
406
407
408
409 -class Amplitude(base_objects.PhysicsObject):
410 """Amplitude: process + list of diagrams (ordered)
411 Initialize with a process, then call generate_diagrams() to
412 generate the diagrams for the amplitude
413 """
414
416 """Default values for all properties"""
417
418 self['process'] = base_objects.Process()
419 self['diagrams'] = None
420
421
422 self['has_mirror_process'] = False
423
436
437 - def filter(self, name, value):
453
454 - def get(self, name):
463
464
465
467 """Return diagram property names as a nicely sorted list."""
468
469 return ['process', 'diagrams', 'has_mirror_process']
470
472 """Returns number of diagrams for this amplitude"""
473 return len(self.get('diagrams'))
474
476 """Return an AmplitudeList with just this amplitude.
477 Needed for DecayChainAmplitude."""
478
479 return AmplitudeList([self])
480
482 """Returns a nicely formatted string of the amplitude content."""
483 return self.get('process').nice_string(indent) + "\n" + \
484 self.get('diagrams').nice_string(indent)
485
487 """Returns a nicely formatted string of the amplitude process."""
488 return self.get('process').nice_string(indent)
489
491 """Returns the number of initial state particles in the process."""
492 return self.get('process').get_ninitial()
493
495 """ Returns wether this amplitude has a loop process."""
496
497 return self.get('process').get('perturbation_couplings')
498
500 """Generate diagrams. Algorithm:
501
502 1. Define interaction dictionaries:
503 * 2->0 (identity), 3->0, 4->0, ... , maxlegs->0
504 * 2 -> 1, 3 -> 1, ..., maxlegs-1 -> 1
505
506 2. Set flag from_group=true for all external particles.
507 Flip particle/anti particle for incoming particles.
508
509 3. If there is a dictionary n->0 with n=number of external
510 particles, create if possible the combination [(1,2,3,4,...)]
511 with *at least two* from_group==true. This will give a
512 finished (set of) diagram(s) (done by reduce_leglist)
513
514 4. Create all allowed groupings of particles with at least one
515 from_group==true (according to dictionaries n->1):
516 [(1,2),3,4...],[1,(2,3),4,...],...,
517 [(1,2),(3,4),...],...,[(1,2,3),4,...],...
518 (done by combine_legs)
519
520 5. Replace each group with a (list of) new particle(s) with number
521 n = min(group numbers). Set from_group true for these
522 particles and false for all other particles. Store vertex info.
523 (done by merge_comb_legs)
524
525 6. Stop algorithm when at most 2 particles remain.
526 Return all diagrams (lists of vertices).
527
528 7. Repeat from 3 (recursion done by reduce_leglist)
529
530 8. Replace final p=p vertex
531
532 Be aware that the resulting vertices have all particles outgoing,
533 so need to flip for incoming particles when used.
534
535 SPECIAL CASE: For A>BC... processes which are legs in decay
536 chains, we need to ensure that BC... combine first, giving A=A
537 as a final vertex. This case is defined by the Process
538 property is_decay_chain = True.
539 This function can also be called by the generate_diagram function
540 of LoopAmplitudes, in which case the generated diagrams here must not
541 be directly assigned to the 'diagrams' attributed but returned as a
542 DiagramList by the function. This is controlled by the argument
543 returndiag.
544 """
545
546 process = self.get('process')
547 model = process.get('model')
548 legs = process.get('legs')
549
550 for key in process.get('overall_orders').keys():
551 try:
552 process.get('orders')[key] = \
553 min(process.get('orders')[key],
554 process.get('overall_orders')[key])
555 except KeyError:
556 process.get('orders')[key] = process.get('overall_orders')[key]
557
558 assert model.get('particles'), \
559 "particles are missing in model: %s" % model.get('particles')
560
561 assert model.get('interactions'), \
562 "interactions are missing in model"
563
564
565 res = base_objects.DiagramList()
566
567 if len(filter(lambda leg: model.get('particle_dict')[\
568 leg.get('id')].is_fermion(), legs)) % 2 == 1:
569 if not returndiag:
570 self['diagrams'] = res
571 raise InvalidCmd, 'The number of fermion is odd'
572 else:
573 return False, res
574
575
576
577 if not model.get('got_majoranas') and \
578 len(filter(lambda leg: leg.is_incoming_fermion(model), legs)) != \
579 len(filter(lambda leg: leg.is_outgoing_fermion(model), legs)):
580 if not returndiag:
581 self['diagrams'] = res
582 raise InvalidCmd, 'The number of of incoming/outcoming fermions are different'
583 else:
584 return False, res
585
586
587
588 for charge in model.get('conserved_charge'):
589 total = 0
590 for leg in legs:
591 part = model.get('particle_dict')[leg.get('id')]
592 try:
593 value = part.get(charge)
594 except (AttributeError, base_objects.PhysicsObject.PhysicsObjectError):
595 try:
596 value = getattr(part, charge)
597 except AttributeError:
598 value = 0
599
600 if (leg.get('id') != part['pdg_code']) != leg['state']:
601 total -= value
602 else:
603 total += value
604
605 if abs(total) > 1e-10:
606 if not returndiag:
607 self['diagrams'] = res
608 raise InvalidCmd, 'No %s conservation for this process ' % charge
609 return res
610 else:
611 raise InvalidCmd, 'No %s conservation for this process ' % charge
612 return res, res
613
614 if not returndiag:
615 logger.info("Trying %s " % process.nice_string().replace('Process', 'process'))
616
617
618 for i in range(0, len(process.get('legs'))):
619
620 leg = copy.copy(process.get('legs')[i])
621 process.get('legs')[i] = leg
622 if leg.get('number') == 0:
623 leg.set('number', i + 1)
624
625
626
627 leglist = self.copy_leglist(process.get('legs'))
628
629 for leg in leglist:
630
631
632 leg.set('from_group', True)
633
634
635
636 if leg.get('state') == False:
637 part = model.get('particle_dict')[leg.get('id')]
638 leg.set('id', part.get_anti_pdg_code())
639
640
641
642 max_multi_to1 = max([len(key) for key in \
643 model.get('ref_dict_to1').keys()])
644
645
646
647
648
649
650
651
652 is_decay_proc = process.get_ninitial() == 1
653 if is_decay_proc:
654 part = model.get('particle_dict')[leglist[0].get('id')]
655
656
657
658 ref_dict_to0 = {(part.get_pdg_code(),part.get_anti_pdg_code()):[0],
659 (part.get_anti_pdg_code(),part.get_pdg_code()):[0]}
660
661
662 leglist[0].set('from_group', None)
663 reduced_leglist = self.reduce_leglist(leglist,
664 max_multi_to1,
665 ref_dict_to0,
666 is_decay_proc,
667 process.get('orders'))
668 else:
669 reduced_leglist = self.reduce_leglist(leglist,
670 max_multi_to1,
671 model.get('ref_dict_to0'),
672 is_decay_proc,
673 process.get('orders'))
674
675
676
677
678 self.convert_dgleg_to_leg(reduced_leglist)
679
680 if reduced_leglist:
681 for vertex_list in reduced_leglist:
682 res.append(self.create_diagram(base_objects.VertexList(vertex_list)))
683
684
685
686 failed_crossing = not res
687
688
689
690
691
692
693 if process.get('required_s_channels') and \
694 process.get('required_s_channels')[0]:
695
696
697 lastvx = -1
698
699
700
701 if is_decay_proc: lastvx = -2
702 ninitial = len(filter(lambda leg: leg.get('state') == False,
703 process.get('legs')))
704
705 old_res = res
706 res = base_objects.DiagramList()
707 for id_list in process.get('required_s_channels'):
708 res_diags = filter(lambda diagram: \
709 all([req_s_channel in \
710 [vertex.get_s_channel_id(\
711 process.get('model'), ninitial) \
712 for vertex in diagram.get('vertices')[:lastvx]] \
713 for req_s_channel in \
714 id_list]), old_res)
715
716 res.extend([diag for diag in res_diags if diag not in res])
717
718
719
720
721
722 if process.get('forbidden_s_channels'):
723 ninitial = len(filter(lambda leg: leg.get('state') == False,
724 process.get('legs')))
725 if ninitial == 2:
726 res = base_objects.DiagramList(\
727 filter(lambda diagram: \
728 not any([vertex.get_s_channel_id(\
729 process.get('model'), ninitial) \
730 in process.get('forbidden_s_channels')
731 for vertex in diagram.get('vertices')[:-1]]),
732 res))
733 else:
734
735
736 newres= []
737 for diagram in res:
738 leg1 = 1
739
740
741
742 vertex = diagram.get('vertices')[-1]
743 if any([l['number'] ==1 for l in vertex.get('legs')]):
744 leg1 = [l['number'] for l in vertex.get('legs') if l['number'] !=1][0]
745 to_loop = range(len(diagram.get('vertices'))-1)
746 if leg1 >1:
747 to_loop.reverse()
748 for i in to_loop:
749 vertex = diagram.get('vertices')[i]
750 if leg1:
751 if any([l['number'] ==leg1 for l in vertex.get('legs')]):
752 leg1 = 0
753 continue
754 if vertex.get_s_channel_id(process.get('model'), ninitial)\
755 in process.get('forbidden_s_channels'):
756 break
757 else:
758 newres.append(diagram)
759 res = base_objects.DiagramList(newres)
760
761
762
763
764 if process.get('forbidden_onsh_s_channels'):
765 ninitial = len(filter(lambda leg: leg.get('state') == False,
766 process.get('legs')))
767
768 verts = base_objects.VertexList(sum([[vertex for vertex \
769 in diagram.get('vertices')[:-1]
770 if vertex.get_s_channel_id(\
771 process.get('model'), ninitial) \
772 in process.get('forbidden_onsh_s_channels')] \
773 for diagram in res], []))
774 for vert in verts:
775
776 newleg = copy.copy(vert.get('legs').pop(-1))
777 newleg.set('onshell', False)
778 vert.get('legs').append(newleg)
779
780
781 for diagram in res:
782 diagram.calculate_orders(model)
783
784
785
786
787
788
789
790
791 if not returndiag and len(res)>0:
792 res = self.apply_squared_order_constraints(res)
793
794 if diagram_filter:
795 res = self.apply_user_filter(res)
796
797
798 if not process.get('is_decay_chain'):
799 for diagram in res:
800 vertices = diagram.get('vertices')
801 if len(vertices) > 1 and vertices[-1].get('id') == 0:
802
803
804
805
806 vertices = copy.copy(vertices)
807 lastvx = vertices.pop()
808 nexttolastvertex = copy.copy(vertices.pop())
809 legs = copy.copy(nexttolastvertex.get('legs'))
810 ntlnumber = legs[-1].get('number')
811 lastleg = filter(lambda leg: leg.get('number') != ntlnumber,
812 lastvx.get('legs'))[0]
813
814 if lastleg.get('onshell') == False:
815 lastleg.set('onshell', None)
816
817 legs[-1] = lastleg
818 nexttolastvertex.set('legs', legs)
819 vertices.append(nexttolastvertex)
820 diagram.set('vertices', vertices)
821
822 if res and not returndiag:
823 logger.info("Process has %d diagrams" % len(res))
824
825
826 self.trim_diagrams(diaglist=res)
827
828
829 pertur = 'QCD'
830 if self.get('process')['perturbation_couplings']:
831 pertur = sorted(self.get('process')['perturbation_couplings'])[0]
832 self.get('process').get('legs').sort(pert=pertur)
833
834
835 if not returndiag:
836 self['diagrams'] = res
837 return not failed_crossing
838 else:
839 return not failed_crossing, res
840
842 """Applies the user specified squared order constraints on the diagram
843 list in argument."""
844
845 res = copy.copy(diag_list)
846
847
848
849 for name, (value, operator) in self['process'].get('constrained_orders').items():
850 res.filter_constrained_orders(name, value, operator)
851
852
853
854
855 while True:
856 new_res = res.apply_positive_sq_orders(res,
857 self['process'].get('squared_orders'),
858 self['process']['sqorders_types'])
859
860 if len(res)==len(new_res):
861 break
862 elif (len(new_res)>len(res)):
863 raise MadGraph5Error(
864 'Inconsistency in function apply_squared_order_constraints().')
865
866 res = new_res
867
868
869
870
871 neg_orders = [(order, value) for order, value in \
872 self['process'].get('squared_orders').items() if value<0]
873 if len(neg_orders)==1:
874 neg_order, neg_value = neg_orders[0]
875
876 res, target_order = res.apply_negative_sq_order(res, neg_order,\
877 neg_value, self['process']['sqorders_types'][neg_order])
878
879
880
881
882 self['process']['squared_orders'][neg_order]=target_order
883 elif len(neg_orders)>1:
884 raise InvalidCmd('At most one negative squared order constraint'+\
885 ' can be specified, not %s.'%str(neg_orders))
886
887 return res
888
890 """Applies the user specified squared order constraints on the diagram
891 list in argument."""
892
893 if True:
894 remove_diag = misc.plugin_import('user_filter',
895 'user filter required to be defined in PLUGIN/user_filter.py with the function remove_diag(ONEDIAG) which returns True if the diagram has to be removed',
896 fcts=['remove_diag'])
897 else:
898
899 def remove_diag(diag):
900 for vertex in diag['vertices']:
901 if vertex['id'] == 0:
902 continue
903 if vertex['legs'][-1]['number'] < 3:
904 if abs(vertex['legs'][-1]['id']) <6:
905 return True
906 return False
907
908 res = diag_list.__class__()
909 nb_removed = 0
910 for diag in diag_list:
911 if remove_diag(diag):
912 nb_removed +=1
913 else:
914 res.append(diag)
915
916 if nb_removed:
917 logger.warning('Diagram filter is ON and removed %s diagrams for this subprocess.' % nb_removed)
918
919 return res
920
921
922
924 """ Return a Diagram created from the vertex list. This function can be
925 overloaded by daughter classes."""
926 return base_objects.Diagram({'vertices':vertexlist})
927
929 """ In LoopAmplitude, it converts back all DGLoopLegs into Legs.
930 In Amplitude, there is nothing to do. """
931
932 return True
933
935 """ Simply returns a copy of the leg list. This function is
936 overloaded in LoopAmplitude so that a DGLoopLeg list is returned.
937 The DGLoopLeg has some additional parameters only useful during
938 loop diagram generation"""
939
940 return base_objects.LegList(\
941 [ copy.copy(leg) for leg in legs ])
942
943 - def reduce_leglist(self, curr_leglist, max_multi_to1, ref_dict_to0,
944 is_decay_proc = False, coupling_orders = None):
945 """Recursive function to reduce N LegList to N-1
946 For algorithm, see doc for generate_diagrams.
947 """
948
949
950
951 res = []
952
953
954
955 if curr_leglist is None:
956 return None
957
958
959 model = self.get('process').get('model')
960 ref_dict_to1 = self.get('process').get('model').get('ref_dict_to1')
961
962
963
964
965
966
967 if curr_leglist.can_combine_to_0(ref_dict_to0, is_decay_proc):
968
969
970 vertex_ids = self.get_combined_vertices(curr_leglist,
971 copy.copy(ref_dict_to0[tuple(sorted([leg.get('id') for \
972 leg in curr_leglist]))]))
973
974 final_vertices = [base_objects.Vertex({'legs':curr_leglist,
975 'id':vertex_id}) for \
976 vertex_id in vertex_ids]
977
978 for final_vertex in final_vertices:
979 if self.reduce_orders(coupling_orders, model,
980 [final_vertex.get('id')]) != False:
981 res.append([final_vertex])
982
983
984 if len(curr_leglist) == 2:
985 if res:
986 return res
987 else:
988 return None
989
990
991 comb_lists = self.combine_legs(curr_leglist,
992 ref_dict_to1, max_multi_to1)
993
994
995 leg_vertex_list = self.merge_comb_legs(comb_lists, ref_dict_to1)
996
997
998 for leg_vertex_tuple in leg_vertex_list:
999
1000
1001 if self.get('process').get('forbidden_particles') and \
1002 any([abs(vertex.get('legs')[-1].get('id')) in \
1003 self.get('process').get('forbidden_particles') \
1004 for vertex in leg_vertex_tuple[1]]):
1005 continue
1006
1007
1008 new_coupling_orders = self.reduce_orders(coupling_orders,
1009 model,
1010 [vertex.get('id') for vertex in \
1011 leg_vertex_tuple[1]])
1012 if new_coupling_orders == False:
1013
1014 continue
1015
1016
1017
1018 reduced_diagram = self.reduce_leglist(leg_vertex_tuple[0],
1019 max_multi_to1,
1020 ref_dict_to0,
1021 is_decay_proc,
1022 new_coupling_orders)
1023
1024 if reduced_diagram:
1025 vertex_list_list = [list(leg_vertex_tuple[1])]
1026 vertex_list_list.append(reduced_diagram)
1027 expanded_list = expand_list_list(vertex_list_list)
1028 res.extend(expanded_list)
1029
1030 return res
1031
1032 - def reduce_orders(self, coupling_orders, model, vertex_id_list):
1033 """Return False if the coupling orders for any coupling is <
1034 0, otherwise return the new coupling orders with the vertex
1035 orders subtracted. If coupling_orders is not given, return
1036 None (which counts as success).
1037 WEIGHTED is a special order, which corresponds to the sum of
1038 order hierarchies for the couplings.
1039 We ignore negative constraints as these cannot be taken into
1040 account on the fly but only after generation."""
1041
1042 if not coupling_orders:
1043 return None
1044
1045 present_couplings = copy.copy(coupling_orders)
1046 for id in vertex_id_list:
1047
1048 if not id:
1049 continue
1050 inter = model.get("interaction_dict")[id]
1051 for coupling in inter.get('orders').keys():
1052
1053
1054 if coupling in present_couplings and \
1055 present_couplings[coupling]>=0:
1056
1057 present_couplings[coupling] -= \
1058 inter.get('orders')[coupling]
1059 if present_couplings[coupling] < 0:
1060
1061 return False
1062
1063 if 'WEIGHTED' in present_couplings and \
1064 present_couplings['WEIGHTED']>=0:
1065 weight = sum([model.get('order_hierarchy')[c]*n for \
1066 (c,n) in inter.get('orders').items()])
1067 present_couplings['WEIGHTED'] -= weight
1068 if present_couplings['WEIGHTED'] < 0:
1069
1070 return False
1071
1072 return present_couplings
1073
1074 - def combine_legs(self, list_legs, ref_dict_to1, max_multi_to1):
1075 """Recursive function. Take a list of legs as an input, with
1076 the reference dictionary n-1->1, and output a list of list of
1077 tuples of Legs (allowed combinations) and Legs (rest). Algorithm:
1078
1079 1. Get all n-combinations from list [123456]: [12],..,[23],..,[123],..
1080
1081 2. For each combination, say [34]. Check if combination is valid.
1082 If so:
1083
1084 a. Append [12[34]56] to result array
1085
1086 b. Split [123456] at index(first element in combination+1),
1087 i.e. [12],[456] and subtract combination from second half,
1088 i.e.: [456]-[34]=[56]. Repeat from 1. with this array
1089
1090 3. Take result array from call to 1. (here, [[56]]) and append
1091 (first half in step b - combination) + combination + (result
1092 from 1.) = [12[34][56]] to result array
1093
1094 4. After appending results from all n-combinations, return
1095 resulting array. Example, if [13] and [45] are valid
1096 combinations:
1097 [[[13]2456],[[13]2[45]6],[123[45]6]]
1098 """
1099
1100 res = []
1101
1102
1103 for comb_length in range(2, max_multi_to1 + 1):
1104
1105
1106 if comb_length > len(list_legs):
1107 return res
1108
1109
1110
1111 for comb in itertools.combinations(list_legs, comb_length):
1112
1113
1114 if base_objects.LegList(comb).can_combine_to_1(ref_dict_to1):
1115
1116
1117
1118 res_list = copy.copy(list_legs)
1119 for leg in comb:
1120 res_list.remove(leg)
1121 res_list.insert(list_legs.index(comb[0]), comb)
1122 res.append(res_list)
1123
1124
1125
1126
1127
1128
1129 res_list1 = list_legs[0:list_legs.index(comb[0])]
1130 res_list2 = list_legs[list_legs.index(comb[0]) + 1:]
1131 for leg in comb[1:]:
1132 res_list2.remove(leg)
1133
1134
1135 res_list = res_list1
1136 res_list.append(comb)
1137
1138
1139 for item in self.combine_legs(res_list2,
1140 ref_dict_to1,
1141 max_multi_to1):
1142 final_res_list = copy.copy(res_list)
1143 final_res_list.extend(item)
1144 res.append(final_res_list)
1145
1146 return res
1147
1148
1150 """Takes a list of allowed leg combinations as an input and returns
1151 a set of lists where combinations have been properly replaced
1152 (one list per element in the ref_dict, so that all possible intermediate
1153 particles are included). For each list, give the list of vertices
1154 corresponding to the executed merging, group the two as a tuple.
1155 """
1156
1157 res = []
1158
1159 for comb_list in comb_lists:
1160
1161 reduced_list = []
1162 vertex_list = []
1163
1164 for entry in comb_list:
1165
1166
1167 if isinstance(entry, tuple):
1168
1169
1170
1171 leg_vert_ids = copy.copy(ref_dict_to1[\
1172 tuple(sorted([leg.get('id') for leg in entry]))])
1173
1174
1175 number = min([leg.get('number') for leg in entry])
1176
1177
1178 if len(filter(lambda leg: leg.get('state') == False,
1179 entry)) == 1:
1180 state = False
1181 else:
1182 state = True
1183
1184
1185
1186
1187
1188 new_leg_vert_ids = []
1189 if leg_vert_ids:
1190 new_leg_vert_ids = self.get_combined_legs(entry,
1191 leg_vert_ids,
1192 number,
1193 state)
1194
1195 reduced_list.append([l[0] for l in new_leg_vert_ids])
1196
1197
1198
1199
1200
1201 vlist = base_objects.VertexList()
1202 for (myleg, vert_id) in new_leg_vert_ids:
1203
1204 myleglist = base_objects.LegList(list(entry))
1205
1206 myleglist.append(myleg)
1207
1208 vlist.append(base_objects.Vertex(
1209 {'legs':myleglist,
1210 'id':vert_id}))
1211
1212 vertex_list.append(vlist)
1213
1214
1215
1216 else:
1217 cp_entry = copy.copy(entry)
1218
1219
1220
1221 if cp_entry.get('from_group') != None:
1222 cp_entry.set('from_group', False)
1223 reduced_list.append(cp_entry)
1224
1225
1226 flat_red_lists = expand_list(reduced_list)
1227 flat_vx_lists = expand_list(vertex_list)
1228
1229
1230 for i in range(0, len(flat_vx_lists)):
1231 res.append((base_objects.LegList(flat_red_lists[i]), \
1232 base_objects.VertexList(flat_vx_lists[i])))
1233
1234 return res
1235
1237 """Create a set of new legs from the info given. This can be
1238 overloaded by daughter classes."""
1239
1240 mylegs = [(base_objects.Leg({'id':leg_id,
1241 'number':number,
1242 'state':state,
1243 'from_group':True}),
1244 vert_id)\
1245 for leg_id, vert_id in leg_vert_ids]
1246
1247 return mylegs
1248
1250 """Allow for selection of vertex ids. This can be
1251 overloaded by daughter classes."""
1252
1253 return vert_ids
1254
1256 """Reduce the number of legs and vertices used in memory.
1257 When called by a diagram generation initiated by LoopAmplitude,
1258 this function should not trim the diagrams in the attribute 'diagrams'
1259 but rather a given list in the 'diaglist' argument."""
1260
1261 legs = []
1262 vertices = []
1263
1264 if diaglist is None:
1265 diaglist=self.get('diagrams')
1266
1267
1268 process = self.get('process')
1269 for leg in process.get('legs'):
1270 if leg.get('state') and leg.get('id') in decay_ids:
1271 leg.set('onshell', True)
1272
1273 for diagram in diaglist:
1274
1275 leg_external = set()
1276 for ivx, vertex in enumerate(diagram.get('vertices')):
1277 for ileg, leg in enumerate(vertex.get('legs')):
1278
1279 if leg.get('state') and leg.get('id') in decay_ids and \
1280 leg.get('number') not in leg_external:
1281
1282
1283 leg = copy.copy(leg)
1284 leg.set('onshell', True)
1285 try:
1286 index = legs.index(leg)
1287 except ValueError:
1288 vertex.get('legs')[ileg] = leg
1289 legs.append(leg)
1290 else:
1291 vertex.get('legs')[ileg] = legs[index]
1292 leg_external.add(leg.get('number'))
1293 try:
1294 index = vertices.index(vertex)
1295 diagram.get('vertices')[ivx] = vertices[index]
1296 except ValueError:
1297 vertices.append(vertex)
1298
1299
1300
1301
1302 -class AmplitudeList(base_objects.PhysicsObjectList):
1303 """List of Amplitude objects
1304 """
1305
1307 """ Check the content of all processes of the amplitudes in this list to
1308 see if there is any which defines perturbation couplings. """
1309
1310 for amp in self:
1311 if amp.has_loop_process():
1312 return True
1313
1315 """Test if object obj is a valid Amplitude for the list."""
1316
1317 return isinstance(obj, Amplitude)
1318
1323 """A list of amplitudes + a list of decay chain amplitude lists;
1324 corresponding to a ProcessDefinition with a list of decay chains
1325 """
1326
1332
1333 - def __init__(self, argument = None, collect_mirror_procs = False,
1334 ignore_six_quark_processes = False, loop_filter=None, diagram_filter=False):
1335 """Allow initialization with Process and with ProcessDefinition"""
1336
1337 if isinstance(argument, base_objects.Process):
1338 super(DecayChainAmplitude, self).__init__()
1339 from madgraph.loop.loop_diagram_generation import LoopMultiProcess
1340 if argument['perturbation_couplings']:
1341 MultiProcessClass=LoopMultiProcess
1342 else:
1343 MultiProcessClass=MultiProcess
1344 if isinstance(argument, base_objects.ProcessDefinition):
1345 self['amplitudes'].extend(\
1346 MultiProcessClass.generate_multi_amplitudes(argument,
1347 collect_mirror_procs,
1348 ignore_six_quark_processes,
1349 loop_filter=loop_filter,
1350 diagram_filter=diagram_filter))
1351 else:
1352 self['amplitudes'].append(\
1353 MultiProcessClass.get_amplitude_from_proc(argument,
1354 loop_filter=loop_filter,
1355 diagram_filter=diagram_filter))
1356
1357
1358 process = copy.copy(self.get('amplitudes')[0].get('process'))
1359 process.set('decay_chains', base_objects.ProcessList())
1360 self['amplitudes'][0].set('process', process)
1361
1362 for process in argument.get('decay_chains'):
1363 if process.get('perturbation_couplings'):
1364 raise MadGraph5Error,\
1365 "Decay processes can not be perturbed"
1366 process.set('overall_orders', argument.get('overall_orders'))
1367 if not process.get('is_decay_chain'):
1368 process.set('is_decay_chain',True)
1369 if not process.get_ninitial() == 1:
1370 raise InvalidCmd,\
1371 "Decay chain process must have exactly one" + \
1372 " incoming particle"
1373 self['decay_chains'].append(\
1374 DecayChainAmplitude(process, collect_mirror_procs,
1375 ignore_six_quark_processes,
1376 diagram_filter=diagram_filter))
1377
1378
1379 decay_ids = sum([[a.get('process').get('legs')[0].get('id') \
1380 for a in dec.get('amplitudes')] for dec in \
1381 self['decay_chains']], [])
1382 decay_ids = set(decay_ids)
1383 for amp in self['amplitudes']:
1384 amp.trim_diagrams(decay_ids)
1385
1386
1387 for amp in self['amplitudes']:
1388 for l in amp.get('process').get('legs'):
1389 if l.get('id') in decay_ids:
1390 decay_ids.remove(l.get('id'))
1391
1392 if decay_ids:
1393 model = amp.get('process').get('model')
1394 names = [model.get_particle(id).get('name') for id in decay_ids]
1395
1396 logger.warning(
1397 "$RED Decay without corresponding particle in core process found.\n" + \
1398 "Decay information for particle(s) %s is discarded.\n" % ','.join(names) + \
1399 "Please check your process definition carefully. \n" + \
1400 "This warning usually means that you forgot parentheses in presence of subdecay.\n" + \
1401 "Example of correct syntax: p p > t t~, ( t > w+ b, w+ > l+ vl)")
1402
1403
1404 for dc in reversed(self['decay_chains']):
1405 for a in reversed(dc.get('amplitudes')):
1406
1407 if a.get('process').get('legs')[0].get('id') in decay_ids:
1408 dc.get('amplitudes').remove(a)
1409 if not dc.get('amplitudes'):
1410
1411 self['decay_chains'].remove(dc)
1412
1413
1414
1415 bad_procs = []
1416 for dc in self['decay_chains']:
1417 for amp in dc.get('amplitudes'):
1418 legs = amp.get('process').get('legs')
1419 fs_parts = [abs(l.get('id')) for l in legs if
1420 l.get('state')]
1421 is_part = [l.get('id') for l in legs if not
1422 l.get('state')][0]
1423 if abs(is_part) in fs_parts:
1424 bad_procs.append(amp.get('process'))
1425
1426 if bad_procs:
1427 logger.warning(
1428 "$RED Decay(s) with particle decaying to itself:\n" + \
1429 '\n'.join([p.nice_string() for p in bad_procs]) + \
1430 "\nPlease check your process definition carefully. \n")
1431
1432
1433 elif argument != None:
1434
1435 super(DecayChainAmplitude, self).__init__(argument)
1436 else:
1437
1438 super(DecayChainAmplitude, self).__init__()
1439
1440 - def filter(self, name, value):
1441 """Filter for valid amplitude property values."""
1442
1443 if name == 'amplitudes':
1444 if not isinstance(value, AmplitudeList):
1445 raise self.PhysicsObjectError, \
1446 "%s is not a valid AmplitudeList" % str(value)
1447 if name == 'decay_chains':
1448 if not isinstance(value, DecayChainAmplitudeList):
1449 raise self.PhysicsObjectError, \
1450 "%s is not a valid DecayChainAmplitudeList object" % \
1451 str(value)
1452 return True
1453
1455 """Return diagram property names as a nicely sorted list."""
1456
1457 return ['amplitudes', 'decay_chains']
1458
1459
1460
1462 """Returns number of diagrams for this amplitude"""
1463 return sum(len(a.get('diagrams')) for a in self.get('amplitudes')) \
1464 + sum(d.get_number_of_diagrams() for d in \
1465 self.get('decay_chains'))
1466
1468 """Returns a nicely formatted string of the amplitude content."""
1469 mystr = ""
1470 for amplitude in self.get('amplitudes'):
1471 mystr = mystr + amplitude.nice_string(indent) + "\n"
1472
1473 if self.get('decay_chains'):
1474 mystr = mystr + " " * indent + "Decays:\n"
1475 for dec in self.get('decay_chains'):
1476 mystr = mystr + dec.nice_string(indent + 2) + "\n"
1477
1478 return mystr[:-1]
1479
1481 """Returns a nicely formatted string of the amplitude processes."""
1482 mystr = ""
1483 for amplitude in self.get('amplitudes'):
1484 mystr = mystr + amplitude.nice_string_processes(indent) + "\n"
1485
1486 if self.get('decay_chains'):
1487 mystr = mystr + " " * indent + "Decays:\n"
1488 for dec in self.get('decay_chains'):
1489 mystr = mystr + dec.nice_string_processes(indent + 2) + "\n"
1490
1491 return mystr[:-1]
1492
1494 """Returns the number of initial state particles in the process."""
1495 return self.get('amplitudes')[0].get('process').get_ninitial()
1496
1498 """Returns a set of all particle ids for which a decay is defined"""
1499
1500 decay_ids = []
1501
1502
1503 for amp in sum([dc.get('amplitudes') for dc \
1504 in self['decay_chains']], []):
1505
1506 decay_ids.append(amp.get('process').get_initial_ids()[0])
1507
1508
1509 return list(set(decay_ids))
1510
1512 """ Returns wether this amplitude has a loop process."""
1513 return self['amplitudes'].has_any_loop_process()
1514
1516 """Recursive function to extract all amplitudes for this process"""
1517
1518 amplitudes = AmplitudeList()
1519
1520 amplitudes.extend(self.get('amplitudes'))
1521 for decay in self.get('decay_chains'):
1522 amplitudes.extend(decay.get_amplitudes())
1523
1524 return amplitudes
1525
1531 """List of DecayChainAmplitude objects
1532 """
1533
1535 """Test if object obj is a valid DecayChainAmplitude for the list."""
1536
1537 return isinstance(obj, DecayChainAmplitude)
1538
1539
1540
1541
1542
1543 -class MultiProcess(base_objects.PhysicsObject):
1544 """MultiProcess: list of process definitions
1545 list of processes (after cleaning)
1546 list of amplitudes (after generation)
1547 """
1548
1550 """Default values for all properties"""
1551
1552 self['process_definitions'] = base_objects.ProcessDefinitionList()
1553
1554
1555
1556 self['amplitudes'] = AmplitudeList()
1557
1558 self['collect_mirror_procs'] = False
1559
1560
1561 self['ignore_six_quark_processes'] = []
1562
1563
1564 self['use_numerical'] = False
1565
1566 - def __init__(self, argument=None, collect_mirror_procs = False,
1567 ignore_six_quark_processes = [], optimize=False,
1568 loop_filter=None, diagram_filter=None):
1596
1597
1598 - def filter(self, name, value):
1599 """Filter for valid process property values."""
1600
1601 if name == 'process_definitions':
1602 if not isinstance(value, base_objects.ProcessDefinitionList):
1603 raise self.PhysicsObjectError, \
1604 "%s is not a valid ProcessDefinitionList object" % str(value)
1605
1606 if name == 'amplitudes':
1607 if not isinstance(value, AmplitudeList):
1608 raise self.PhysicsObjectError, \
1609 "%s is not a valid AmplitudeList object" % str(value)
1610
1611 if name in ['collect_mirror_procs']:
1612 if not isinstance(value, bool):
1613 raise self.PhysicsObjectError, \
1614 "%s is not a valid boolean" % str(value)
1615
1616 if name == 'ignore_six_quark_processes':
1617 if not isinstance(value, list):
1618 raise self.PhysicsObjectError, \
1619 "%s is not a valid list" % str(value)
1620
1621 return True
1622
1623 - def get(self, name):
1624 """Get the value of the property name."""
1625
1626 if (name == 'amplitudes') and not self[name]:
1627 for process_def in self.get('process_definitions'):
1628 if process_def.get('decay_chains'):
1629
1630
1631 self['amplitudes'].append(\
1632 DecayChainAmplitude(process_def,
1633 self.get('collect_mirror_procs'),
1634 self.get('ignore_six_quark_processes'),
1635 diagram_filter=self['diagram_filter']))
1636 else:
1637 self['amplitudes'].extend(\
1638 self.generate_multi_amplitudes(process_def,
1639 self.get('collect_mirror_procs'),
1640 self.get('ignore_six_quark_processes'),
1641 self['use_numerical'],
1642 loop_filter=self['loop_filter'],
1643 diagram_filter=self['diagram_filter']))
1644
1645 return MultiProcess.__bases__[0].get(self, name)
1646
1648 """Return process property names as a nicely sorted list."""
1649
1650 return ['process_definitions', 'amplitudes']
1651
1652 @classmethod
1653 - def generate_multi_amplitudes(cls,process_definition,
1654 collect_mirror_procs = False,
1655 ignore_six_quark_processes = [],
1656 use_numerical=False,
1657 loop_filter=None,
1658 diagram_filter=False):
1659 """Generate amplitudes in a semi-efficient way.
1660 Make use of crossing symmetry for processes that fail diagram
1661 generation, but not for processes that succeed diagram
1662 generation. Doing so will risk making it impossible to
1663 identify processes with identical amplitudes.
1664 """
1665 assert isinstance(process_definition, base_objects.ProcessDefinition), \
1666 "%s not valid ProcessDefinition object" % \
1667 repr(process_definition)
1668
1669
1670 process_definition.set('orders', MultiProcess.\
1671 find_optimal_process_orders(process_definition,
1672 diagram_filter))
1673
1674 process_definition.check_expansion_orders()
1675
1676 processes = base_objects.ProcessList()
1677 amplitudes = AmplitudeList()
1678
1679
1680
1681 failed_procs = []
1682 success_procs = []
1683
1684 non_permuted_procs = []
1685
1686 permutations = []
1687
1688
1689
1690 model = process_definition['model']
1691
1692 islegs = [leg for leg in process_definition['legs'] \
1693 if leg['state'] == False]
1694 fslegs = [leg for leg in process_definition['legs'] \
1695 if leg['state'] == True]
1696
1697 isids = [leg['ids'] for leg in process_definition['legs'] \
1698 if leg['state'] == False]
1699 fsids = [leg['ids'] for leg in process_definition['legs'] \
1700 if leg['state'] == True]
1701 polids = [tuple(leg['polarization']) for leg in process_definition['legs'] \
1702 if leg['state'] == True]
1703
1704 for prod in itertools.product(*isids):
1705 islegs = [\
1706 base_objects.Leg({'id':id, 'state': False,
1707 'polarization': islegs[i]['polarization']})
1708 for i,id in enumerate(prod)]
1709
1710
1711
1712
1713 red_fsidlist = set()
1714
1715 for prod in itertools.product(*fsids):
1716 tag = zip(prod, polids)
1717 tag = sorted(tag)
1718
1719 if tuple(tag) in red_fsidlist:
1720 continue
1721
1722 red_fsidlist.add(tuple(tag))
1723
1724 leg_list = [copy.copy(leg) for leg in islegs]
1725 leg_list.extend([\
1726 base_objects.Leg({'id':id, 'state': True, 'polarization': fslegs[i]['polarization']}) \
1727 for i,id in enumerate(prod)])
1728
1729 legs = base_objects.LegList(leg_list)
1730
1731
1732 sorted_legs = sorted([(l,i+1) for (i,l) in \
1733 enumerate(legs.get_outgoing_id_list(model))])
1734 permutation = [l[1] for l in sorted_legs]
1735
1736 sorted_legs = array.array('i', [l[0] for l in sorted_legs])
1737
1738
1739 if ignore_six_quark_processes and \
1740 len([i for i in sorted_legs if abs(i) in \
1741 ignore_six_quark_processes]) >= 6:
1742 continue
1743
1744
1745
1746 if sorted_legs in failed_procs:
1747 continue
1748
1749
1750 if use_numerical:
1751
1752 initial_mass = abs(model['parameter_dict'][model.get_particle(legs[0].get('id')).get('mass')])
1753 if initial_mass == 0:
1754 continue
1755 for leg in legs[1:]:
1756 m = model['parameter_dict'][model.get_particle(leg.get('id')).get('mass')]
1757 initial_mass -= abs(m)
1758 if initial_mass.real <= 0:
1759 continue
1760
1761
1762 process = process_definition.get_process_with_legs(legs)
1763
1764 fast_proc = \
1765 array.array('i',[leg.get('id') for leg in legs])
1766 if collect_mirror_procs and \
1767 process_definition.get_ninitial() == 2:
1768
1769 mirror_proc = \
1770 array.array('i', [fast_proc[1], fast_proc[0]] + \
1771 list(fast_proc[2:]))
1772 try:
1773 mirror_amp = \
1774 amplitudes[non_permuted_procs.index(mirror_proc)]
1775 except Exception:
1776
1777 pass
1778 else:
1779
1780 mirror_amp.set('has_mirror_process', True)
1781 logger.info("Process %s added to mirror process %s" % \
1782 (process.base_string(),
1783 mirror_amp.get('process').base_string()))
1784 continue
1785
1786
1787
1788 if not process.get('required_s_channels') and \
1789 not process.get('forbidden_onsh_s_channels') and \
1790 not process.get('forbidden_s_channels') and \
1791 not process.get('is_decay_chain') and not diagram_filter:
1792 try:
1793 crossed_index = success_procs.index(sorted_legs)
1794
1795
1796
1797
1798 if 'loop_diagrams' in amplitudes[crossed_index]:
1799 raise ValueError
1800 except ValueError:
1801
1802 pass
1803 else:
1804
1805 amplitude = MultiProcess.cross_amplitude(\
1806 amplitudes[crossed_index],
1807 process,
1808 permutations[crossed_index],
1809 permutation)
1810 amplitudes.append(amplitude)
1811 success_procs.append(sorted_legs)
1812 permutations.append(permutation)
1813 non_permuted_procs.append(fast_proc)
1814 logger.info("Crossed process found for %s, reuse diagrams." % \
1815 process.base_string())
1816 continue
1817
1818
1819 amplitude = cls.get_amplitude_from_proc(process,
1820 loop_filter=loop_filter)
1821
1822 try:
1823 result = amplitude.generate_diagrams(diagram_filter=diagram_filter)
1824 except InvalidCmd as error:
1825 failed_procs.append(sorted_legs)
1826 else:
1827
1828 if amplitude.get('diagrams'):
1829 amplitudes.append(amplitude)
1830 success_procs.append(sorted_legs)
1831 permutations.append(permutation)
1832 non_permuted_procs.append(fast_proc)
1833 elif not result:
1834
1835 failed_procs.append(sorted_legs)
1836
1837
1838 if not amplitudes:
1839 if len(failed_procs) == 1 and 'error' in locals():
1840 raise error
1841 else:
1842 raise NoDiagramException, \
1843 "No amplitudes generated from process %s. Please enter a valid process" % \
1844 process_definition.nice_string()
1845
1846
1847
1848 return amplitudes
1849
1850 @classmethod
1852 """ Return the correct amplitude type according to the characteristics of
1853 the process proc. The only option that could be specified here is
1854 loop_filter and it is of course not relevant for a tree amplitude."""
1855
1856 return Amplitude({"process": proc})
1857
1858
1859 @staticmethod
1861 """Find the minimal WEIGHTED order for this set of processes.
1862
1863 The algorithm:
1864
1865 1) Check the coupling hierarchy of the model. Assign all
1866 particles to the different coupling hierarchies so that a
1867 particle is considered to be in the highest hierarchy (i.e.,
1868 with lowest value) where it has an interaction.
1869
1870 2) Pick out the legs in the multiprocess according to the
1871 highest hierarchy represented (so don't mix particles from
1872 different hierarchy classes in the same multiparticles!)
1873
1874 3) Find the starting maximum WEIGHTED order as the sum of the
1875 highest n-2 weighted orders
1876
1877 4) Pick out required s-channel particle hierarchies, and use
1878 the highest of the maximum WEIGHTED order from the legs and
1879 the minimum WEIGHTED order extracted from 2*s-channel
1880 hierarchys plus the n-2-2*(number of s-channels) lowest
1881 leg weighted orders.
1882
1883 5) Run process generation with the WEIGHTED order determined
1884 in 3)-4) - # final state gluons, with all gluons removed from
1885 the final state
1886
1887 6) If no process is found, increase WEIGHTED order by 1 and go
1888 back to 5), until we find a process which passes. Return that
1889 order.
1890
1891 7) Continue 5)-6) until we reach (n-2)*(highest hierarchy)-1.
1892 If still no process has passed, return
1893 WEIGHTED = (n-2)*(highest hierarchy)
1894 """
1895
1896 assert isinstance(process_definition, base_objects.ProcessDefinition), \
1897 "%s not valid ProcessDefinition object" % \
1898 repr(process_definition)
1899
1900 processes = base_objects.ProcessList()
1901 amplitudes = AmplitudeList()
1902
1903
1904 if process_definition.get('orders') or \
1905 process_definition.get('overall_orders') or \
1906 process_definition.get('NLO_mode')=='virt':
1907 return process_definition.get('orders')
1908
1909
1910 if process_definition.get_ninitial() == 1 and not \
1911 process_definition.get('is_decay_chain'):
1912 return process_definition.get('orders')
1913
1914 logger.info("Checking for minimal orders which gives processes.")
1915 logger.info("Please specify coupling orders to bypass this step.")
1916
1917
1918 max_order_now, particles, hierarchy = \
1919 process_definition.get_minimum_WEIGHTED()
1920 coupling = 'WEIGHTED'
1921
1922 model = process_definition.get('model')
1923
1924
1925 isids = [leg['ids'] for leg in \
1926 filter(lambda leg: leg['state'] == False, process_definition['legs'])]
1927 fsids = [leg['ids'] for leg in \
1928 filter(lambda leg: leg['state'] == True, process_definition['legs'])]
1929
1930 max_WEIGHTED_order = \
1931 (len(fsids + isids) - 2)*int(model.get_max_WEIGHTED())
1932
1933 hierarchydef = process_definition['model'].get('order_hierarchy')
1934 tmp = []
1935 hierarchy = hierarchydef.items()
1936 hierarchy.sort()
1937 for key, value in hierarchydef.items():
1938 if value>1:
1939 tmp.append('%s*%s' % (value,key))
1940 else:
1941 tmp.append('%s' % key)
1942 wgtdef = '+'.join(tmp)
1943
1944
1945 while max_order_now < max_WEIGHTED_order:
1946 logger.info("Trying coupling order WEIGHTED<=%d: WEIGTHED IS %s" % (max_order_now, wgtdef))
1947
1948 oldloglevel = logger.level
1949 logger.setLevel(logging.WARNING)
1950
1951
1952
1953 failed_procs = []
1954
1955 for prod in apply(itertools.product, isids):
1956 islegs = [ base_objects.Leg({'id':id, 'state': False}) \
1957 for id in prod]
1958
1959
1960
1961
1962 red_fsidlist = []
1963
1964 for prod in apply(itertools.product, fsids):
1965
1966
1967 if tuple(sorted(prod)) in red_fsidlist:
1968 continue
1969
1970 red_fsidlist.append(tuple(sorted(prod)));
1971
1972
1973
1974 nglue = 0
1975 if 21 in particles[0]:
1976 nglue = len([id for id in prod if id == 21])
1977 prod = [id for id in prod if id != 21]
1978
1979
1980 leg_list = [copy.copy(leg) for leg in islegs]
1981
1982 leg_list.extend([\
1983 base_objects.Leg({'id':id, 'state': True}) \
1984 for id in prod])
1985
1986 legs = base_objects.LegList(leg_list)
1987
1988
1989
1990 coupling_orders_now = {coupling: max_order_now - \
1991 nglue * model['order_hierarchy']['QCD']}
1992
1993
1994 process = base_objects.Process({\
1995 'legs':legs,
1996 'model':model,
1997 'id': process_definition.get('id'),
1998 'orders': coupling_orders_now,
1999 'required_s_channels': \
2000 process_definition.get('required_s_channels'),
2001 'forbidden_onsh_s_channels': \
2002 process_definition.get('forbidden_onsh_s_channels'),
2003 'sqorders_types': \
2004 process_definition.get('sqorders_types'),
2005 'squared_orders': \
2006 process_definition.get('squared_orders'),
2007 'split_orders': \
2008 process_definition.get('split_orders'),
2009 'forbidden_s_channels': \
2010 process_definition.get('forbidden_s_channels'),
2011 'forbidden_particles': \
2012 process_definition.get('forbidden_particles'),
2013 'is_decay_chain': \
2014 process_definition.get('is_decay_chain'),
2015 'overall_orders': \
2016 process_definition.get('overall_orders'),
2017 'split_orders': \
2018 process_definition.get('split_orders')})
2019
2020
2021 process.check_expansion_orders()
2022
2023
2024 sorted_legs = sorted(legs.get_outgoing_id_list(model))
2025
2026
2027 if tuple(sorted_legs) in failed_procs and not process_definition.get('forbidden_s_channels'):
2028 continue
2029
2030 amplitude = Amplitude({'process': process})
2031 try:
2032 amplitude.generate_diagrams(diagram_filter=diagram_filter)
2033 except InvalidCmd, error:
2034 failed_procs.append(tuple(sorted_legs))
2035 else:
2036 if amplitude.get('diagrams'):
2037
2038 logger.setLevel(oldloglevel)
2039 return {coupling: max_order_now}
2040 else:
2041 failed_procs.append(tuple(sorted_legs))
2042
2043 max_order_now += 1
2044 logger.setLevel(oldloglevel)
2045
2046
2047 return {coupling: max_order_now}
2048
2049 @staticmethod
2051 """Return the amplitude crossed with the permutation new_perm"""
2052
2053 perm_map = dict(zip(org_perm, new_perm))
2054
2055 new_amp = copy.copy(amplitude)
2056
2057 for i, leg in enumerate(process.get('legs')):
2058 leg.set('number', i+1)
2059
2060 new_amp.set('process', process)
2061
2062 diagrams = base_objects.DiagramList([d.renumber_legs(perm_map,
2063 process.get('legs'),) for \
2064 d in new_amp.get('diagrams')])
2065 new_amp.set('diagrams', diagrams)
2066 new_amp.trim_diagrams()
2067
2068
2069 new_amp.set('has_mirror_process', False)
2070
2071 return new_amp
2072
2078 """Takes a list of lists and elements and returns a list of flat lists.
2079 Example: [[1,2], 3, [4,5]] -> [[1,3,4], [1,3,5], [2,3,4], [2,3,5]]
2080 """
2081
2082
2083 assert isinstance(mylist, list), "Expand_list argument must be a list"
2084
2085 res = []
2086
2087 tmplist = []
2088 for item in mylist:
2089 if isinstance(item, list):
2090 tmplist.append(item)
2091 else:
2092 tmplist.append([item])
2093
2094 for item in apply(itertools.product, tmplist):
2095 res.append(list(item))
2096
2097 return res
2098
2100 """Recursive function. Takes a list of lists and lists of lists
2101 and returns a list of flat lists.
2102 Example: [[1,2],[[4,5],[6,7]]] -> [[1,2,4,5], [1,2,6,7]]
2103 """
2104
2105 res = []
2106
2107 if not mylist or len(mylist) == 1 and not mylist[0]:
2108 return [[]]
2109
2110
2111 assert isinstance(mylist[0], list), \
2112 "Expand_list_list needs a list of lists and lists of lists"
2113
2114
2115 if len(mylist) == 1:
2116 if isinstance(mylist[0][0], list):
2117 return mylist[0]
2118 else:
2119 return mylist
2120
2121 if isinstance(mylist[0][0], list):
2122 for item in mylist[0]:
2123
2124
2125
2126 for rest in expand_list_list(mylist[1:]):
2127 reslist = copy.copy(item)
2128 reslist.extend(rest)
2129 res.append(reslist)
2130 else:
2131 for rest in expand_list_list(mylist[1:]):
2132 reslist = copy.copy(mylist[0])
2133 reslist.extend(rest)
2134 res.append(reslist)
2135
2136
2137 return res
2138